2003-05-31 Bud Davis <bdavis9659@comcast.net>
[official-gcc.git] / gcc / expr.c
blobe3872e8e2693cd958c78ae202dcfcc8dd1416375
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
172 tree));
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 #ifdef PUSH_ROUNDING
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 #endif
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 #endif
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
216 #ifndef CLEAR_RATIO
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
219 #else
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 #endif
223 #endif
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 #endif
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237 #endif
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
249 #endif
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
254 void
255 init_expr_once ()
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 rtx reg;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
280 int regno;
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
298 REGNO (reg) = regno;
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
331 enum insn_code ic;
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
347 void
348 init_expr ()
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
352 pending_chain = 0;
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
363 void
364 finish_expr_for_function ()
366 if (pending_chain)
367 abort ();
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
380 static rtx
381 enqueue_insn (var, body)
382 rtx var, body;
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
406 rtx x;
407 int modify;
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415 #endif
417 if (code != QUEUED)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
430 if (QUEUED_INSN (y))
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
435 QUEUED_INSN (y));
436 return temp;
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
466 return x;
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
491 queued_subexp_p (x)
492 rtx x;
494 enum rtx_code code = GET_CODE (x);
495 switch (code)
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
511 /* Perform all the pending incrementations. */
513 void
514 emit_queue ()
516 rtx p;
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
533 #ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537 #endif
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
553 void
554 convert_move (to, from, unsignedp)
555 rtx to, from;
556 int unsignedp;
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
573 abort ();
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
592 return;
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
602 else
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
606 return;
609 if (to_real != from_real)
610 abort ();
612 if (to_real)
614 rtx value, insns;
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
620 != CODE_FOR_nothing)
622 emit_unop_insn (code, to, from, UNKNOWN);
623 return;
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 return;
633 #endif
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 return;
647 #endif
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
666 return;
668 #endif
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 return;
676 #endif
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
702 return;
704 #endif
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 return;
712 #endif
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 return;
719 #endif
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 return;
726 #endif
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
731 return;
733 #endif
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
741 #endif
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
748 #endif
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
755 #endif
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
762 #endif
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
769 #endif
771 libcall = (rtx) 0;
772 switch (from_mode)
774 case SFmode:
775 switch (to_mode)
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 case DFmode:
795 switch (to_mode)
797 case SFmode:
798 libcall = truncdfsf2_libfunc;
799 break;
801 case XFmode:
802 libcall = extenddfxf2_libfunc;
803 break;
805 case TFmode:
806 libcall = extenddftf2_libfunc;
807 break;
809 default:
810 break;
812 break;
814 case XFmode:
815 switch (to_mode)
817 case SFmode:
818 libcall = truncxfsf2_libfunc;
819 break;
821 case DFmode:
822 libcall = truncxfdf2_libfunc;
823 break;
825 default:
826 break;
828 break;
830 case TFmode:
831 switch (to_mode)
833 case SFmode:
834 libcall = trunctfsf2_libfunc;
835 break;
837 case DFmode:
838 libcall = trunctfdf2_libfunc;
839 break;
841 default:
842 break;
844 break;
846 default:
847 break;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
852 abort ();
854 start_sequence ();
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
856 1, from, from_mode);
857 insns = get_insns ();
858 end_sequence ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
860 from));
861 return;
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
870 rtx insns;
871 rtx lowpart;
872 rtx fill_value;
873 rtx lowfrom;
874 int i;
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
880 != CODE_FOR_nothing)
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
889 return;
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
901 return;
904 /* No special multiword conversion insn; do it by hand. */
905 start_sequence ();
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
916 else
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
925 if (unsignedp)
926 fill_value = const0_rtx;
927 else
929 #ifdef HAVE_slt
930 if (HAVE_slt
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
935 lowpart_mode, 0);
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
939 else
940 #endif
942 fill_value
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
945 NULL_RTX, 0);
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
956 if (subword == 0)
957 abort ();
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
964 end_sequence ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
968 return;
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
983 return;
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
996 return;
998 #endif /* HAVE_truncqipqi2 */
999 abort ();
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1007 from_mode = QImode;
1009 else
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1015 return;
1017 #endif /* HAVE_extendpqiqi2 */
1018 abort ();
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1031 return;
1033 #endif /* HAVE_truncsipsi2 */
1034 abort ();
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1042 from_mode = SImode;
1044 else
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1050 return;
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1057 return;
1059 #endif /* HAVE_zero_extendpsisi2 */
1060 abort ();
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 return;
1075 #endif /* HAVE_truncdipdi2 */
1076 abort ();
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1084 from_mode = DImode;
1086 else
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 return;
1094 #endif /* HAVE_extendpdidi2 */
1095 abort ();
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 return;
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1128 if (flag_force_mem)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1132 return;
1134 else
1136 enum machine_mode intermediate;
1137 rtx tmp;
1138 tree shift_amount;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1153 return;
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1162 to, unsignedp);
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1164 to, unsignedp);
1165 if (tmp != to)
1166 emit_move_insn (to, tmp);
1167 return;
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 return;
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 return;
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 return;
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 return;
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 return;
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 return;
1246 #endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 return;
1259 #endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 return;
1272 #endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 return;
1285 #endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 return;
1298 #endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1310 return;
1313 /* Mode combination is not recognized. */
1314 abort ();
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1330 rtx x;
1331 int unsignedp;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1352 rtx x;
1353 int unsignedp;
1355 rtx temp;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1369 return x;
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1426 if (! unsignedp
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1430 return gen_int_mode (val, mode);
1433 return gen_lowpart (mode, x);
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1438 return temp;
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1450 #endif
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1459 /* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
1466 ALIGN is maximum stack alignment we can assume. */
1468 void
1469 move_by_pieces (to, from, len, align)
1470 rtx to, from;
1471 unsigned HOST_WIDE_INT len;
1472 unsigned int align;
1474 struct move_by_pieces data;
1475 rtx to_addr, from_addr = XEXP (from, 0);
1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
1480 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1482 data.offset = 0;
1483 data.from_addr = from_addr;
1484 if (to)
1486 to_addr = XEXP (to, 0);
1487 data.to = to;
1488 data.autinc_to
1489 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1490 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1491 data.reverse
1492 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1494 else
1496 to_addr = NULL_RTX;
1497 data.to = NULL_RTX;
1498 data.autinc_to = 1;
1499 #ifdef STACK_GROWS_DOWNWARD
1500 data.reverse = 1;
1501 #else
1502 data.reverse = 0;
1503 #endif
1505 data.to_addr = to_addr;
1506 data.from = from;
1507 data.autinc_from
1508 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1509 || GET_CODE (from_addr) == POST_INC
1510 || GET_CODE (from_addr) == POST_DEC);
1512 data.explicit_inc_from = 0;
1513 data.explicit_inc_to = 0;
1514 if (data.reverse) data.offset = len;
1515 data.len = len;
1517 /* If copying requires more than two move insns,
1518 copy addresses to registers (to make displacements shorter)
1519 and use post-increment if available. */
1520 if (!(data.autinc_from && data.autinc_to)
1521 && move_by_pieces_ninsns (len, align) > 2)
1523 /* Find the mode of the largest move... */
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1527 mode = tmode;
1529 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1531 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1532 data.autinc_from = 1;
1533 data.explicit_inc_from = -1;
1535 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1537 data.from_addr = copy_addr_to_reg (from_addr);
1538 data.autinc_from = 1;
1539 data.explicit_inc_from = 1;
1541 if (!data.autinc_from && CONSTANT_P (from_addr))
1542 data.from_addr = copy_addr_to_reg (from_addr);
1543 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1545 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1546 data.autinc_to = 1;
1547 data.explicit_inc_to = -1;
1549 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1551 data.to_addr = copy_addr_to_reg (to_addr);
1552 data.autinc_to = 1;
1553 data.explicit_inc_to = 1;
1555 if (!data.autinc_to && CONSTANT_P (to_addr))
1556 data.to_addr = copy_addr_to_reg (to_addr);
1559 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1560 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1561 align = MOVE_MAX * BITS_PER_UNIT;
1563 /* First move what we can in the largest integer mode, then go to
1564 successively smaller modes. */
1566 while (max_size > 1)
1568 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1569 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1570 if (GET_MODE_SIZE (tmode) < max_size)
1571 mode = tmode;
1573 if (mode == VOIDmode)
1574 break;
1576 icode = mov_optab->handlers[(int) mode].insn_code;
1577 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1578 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1580 max_size = GET_MODE_SIZE (mode);
1583 /* The code above should have handled everything. */
1584 if (data.len > 0)
1585 abort ();
1588 /* Return number of insns required to move L bytes by pieces.
1589 ALIGN (in bits) is maximum alignment we can assume. */
1591 static unsigned HOST_WIDE_INT
1592 move_by_pieces_ninsns (l, align)
1593 unsigned HOST_WIDE_INT l;
1594 unsigned int align;
1596 unsigned HOST_WIDE_INT n_insns = 0;
1597 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1599 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1600 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1601 align = MOVE_MAX * BITS_PER_UNIT;
1603 while (max_size > 1)
1605 enum machine_mode mode = VOIDmode, tmode;
1606 enum insn_code icode;
1608 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1609 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1610 if (GET_MODE_SIZE (tmode) < max_size)
1611 mode = tmode;
1613 if (mode == VOIDmode)
1614 break;
1616 icode = mov_optab->handlers[(int) mode].insn_code;
1617 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1618 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1620 max_size = GET_MODE_SIZE (mode);
1623 if (l)
1624 abort ();
1625 return n_insns;
1628 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1629 with move instructions for mode MODE. GENFUN is the gen_... function
1630 to make a move insn for that mode. DATA has all the other info. */
1632 static void
1633 move_by_pieces_1 (genfun, mode, data)
1634 rtx (*genfun) PARAMS ((rtx, ...));
1635 enum machine_mode mode;
1636 struct move_by_pieces *data;
1638 unsigned int size = GET_MODE_SIZE (mode);
1639 rtx to1 = NULL_RTX, from1;
1641 while (data->len >= size)
1643 if (data->reverse)
1644 data->offset -= size;
1646 if (data->to)
1648 if (data->autinc_to)
1649 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1650 data->offset);
1651 else
1652 to1 = adjust_address (data->to, mode, data->offset);
1655 if (data->autinc_from)
1656 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1657 data->offset);
1658 else
1659 from1 = adjust_address (data->from, mode, data->offset);
1661 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1662 emit_insn (gen_add2_insn (data->to_addr,
1663 GEN_INT (-(HOST_WIDE_INT)size)));
1664 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1665 emit_insn (gen_add2_insn (data->from_addr,
1666 GEN_INT (-(HOST_WIDE_INT)size)));
1668 if (data->to)
1669 emit_insn ((*genfun) (to1, from1));
1670 else
1672 #ifdef PUSH_ROUNDING
1673 emit_single_push_insn (mode, from1, NULL);
1674 #else
1675 abort ();
1676 #endif
1679 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1680 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1681 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1682 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1684 if (! data->reverse)
1685 data->offset += size;
1687 data->len -= size;
1691 /* Emit code to move a block Y to a block X. This may be done with
1692 string-move instructions, with multiple scalar move instructions,
1693 or with a library call.
1695 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1696 SIZE is an rtx that says how long they are.
1697 ALIGN is the maximum alignment we can assume they have.
1698 METHOD describes what kind of copy this is, and what mechanisms may be used.
1700 Return the address of the new block, if memcpy is called and returns it,
1701 0 otherwise. */
1704 emit_block_move (x, y, size, method)
1705 rtx x, y, size;
1706 enum block_op_methods method;
1708 bool may_use_call;
1709 rtx retval = 0;
1710 unsigned int align;
1712 switch (method)
1714 case BLOCK_OP_NORMAL:
1715 may_use_call = true;
1716 break;
1718 case BLOCK_OP_CALL_PARM:
1719 may_use_call = block_move_libcall_safe_for_call_parm ();
1721 /* Make inhibit_defer_pop nonzero around the library call
1722 to force it to pop the arguments right away. */
1723 NO_DEFER_POP;
1724 break;
1726 case BLOCK_OP_NO_LIBCALL:
1727 may_use_call = false;
1728 break;
1730 default:
1731 abort ();
1734 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1736 if (GET_MODE (x) != BLKmode)
1737 abort ();
1738 if (GET_MODE (y) != BLKmode)
1739 abort ();
1741 x = protect_from_queue (x, 1);
1742 y = protect_from_queue (y, 0);
1743 size = protect_from_queue (size, 0);
1745 if (GET_CODE (x) != MEM)
1746 abort ();
1747 if (GET_CODE (y) != MEM)
1748 abort ();
1749 if (size == 0)
1750 abort ();
1752 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1753 can be incorrect is coming from __builtin_memcpy. */
1754 if (GET_CODE (size) == CONST_INT)
1756 x = shallow_copy_rtx (x);
1757 y = shallow_copy_rtx (y);
1758 set_mem_size (x, size);
1759 set_mem_size (y, size);
1762 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1763 move_by_pieces (x, y, INTVAL (size), align);
1764 else if (emit_block_move_via_movstr (x, y, size, align))
1766 else if (may_use_call)
1767 retval = emit_block_move_via_libcall (x, y, size);
1768 else
1769 emit_block_move_via_loop (x, y, size, align);
1771 if (method == BLOCK_OP_CALL_PARM)
1772 OK_DEFER_POP;
1774 return retval;
1777 /* A subroutine of emit_block_move. Returns true if calling the
1778 block move libcall will not clobber any parameters which may have
1779 already been placed on the stack. */
1781 static bool
1782 block_move_libcall_safe_for_call_parm ()
1784 if (PUSH_ARGS)
1785 return true;
1786 else
1788 /* Check to see whether memcpy takes all register arguments. */
1789 static enum {
1790 takes_regs_uninit, takes_regs_no, takes_regs_yes
1791 } takes_regs = takes_regs_uninit;
1793 switch (takes_regs)
1795 case takes_regs_uninit:
1797 CUMULATIVE_ARGS args_so_far;
1798 tree fn, arg;
1800 fn = emit_block_move_libcall_fn (false);
1801 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1803 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1804 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1806 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1807 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1808 if (!tmp || !REG_P (tmp))
1809 goto fail_takes_regs;
1810 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1811 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1812 NULL_TREE, 1))
1813 goto fail_takes_regs;
1814 #endif
1815 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1818 takes_regs = takes_regs_yes;
1819 /* FALLTHRU */
1821 case takes_regs_yes:
1822 return true;
1824 fail_takes_regs:
1825 takes_regs = takes_regs_no;
1826 /* FALLTHRU */
1827 case takes_regs_no:
1828 return false;
1830 default:
1831 abort ();
1836 /* A subroutine of emit_block_move. Expand a movstr pattern;
1837 return true if successful. */
1839 static bool
1840 emit_block_move_via_movstr (x, y, size, align)
1841 rtx x, y, size;
1842 unsigned int align;
1844 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1845 enum machine_mode mode;
1847 /* Since this is a move insn, we don't care about volatility. */
1848 volatile_ok = 1;
1850 /* Try the most limited insn first, because there's no point
1851 including more than one in the machine description unless
1852 the more limited one has some advantage. */
1854 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1855 mode = GET_MODE_WIDER_MODE (mode))
1857 enum insn_code code = movstr_optab[(int) mode];
1858 insn_operand_predicate_fn pred;
1860 if (code != CODE_FOR_nothing
1861 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1862 here because if SIZE is less than the mode mask, as it is
1863 returned by the macro, it will definitely be less than the
1864 actual mode mask. */
1865 && ((GET_CODE (size) == CONST_INT
1866 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1867 <= (GET_MODE_MASK (mode) >> 1)))
1868 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1869 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1870 || (*pred) (x, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1872 || (*pred) (y, BLKmode))
1873 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1874 || (*pred) (opalign, VOIDmode)))
1876 rtx op2;
1877 rtx last = get_last_insn ();
1878 rtx pat;
1880 op2 = convert_to_mode (mode, size, 1);
1881 pred = insn_data[(int) code].operand[2].predicate;
1882 if (pred != 0 && ! (*pred) (op2, mode))
1883 op2 = copy_to_mode_reg (mode, op2);
1885 /* ??? When called via emit_block_move_for_call, it'd be
1886 nice if there were some way to inform the backend, so
1887 that it doesn't fail the expansion because it thinks
1888 emitting the libcall would be more efficient. */
1890 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1891 if (pat)
1893 emit_insn (pat);
1894 volatile_ok = 0;
1895 return true;
1897 else
1898 delete_insns_since (last);
1902 volatile_ok = 0;
1903 return false;
1906 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1907 Return the return value from memcpy, 0 otherwise. */
1909 static rtx
1910 emit_block_move_via_libcall (dst, src, size)
1911 rtx dst, src, size;
1913 rtx dst_addr, src_addr;
1914 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1915 enum machine_mode size_mode;
1916 rtx retval;
1918 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1920 It is unsafe to save the value generated by protect_from_queue and reuse
1921 it later. Consider what happens if emit_queue is called before the
1922 return value from protect_from_queue is used.
1924 Expansion of the CALL_EXPR below will call emit_queue before we are
1925 finished emitting RTL for argument setup. So if we are not careful we
1926 could get the wrong value for an argument.
1928 To avoid this problem we go ahead and emit code to copy the addresses of
1929 DST and SRC and SIZE into new pseudos. We can then place those new
1930 pseudos into an RTL_EXPR and use them later, even after a call to
1931 emit_queue.
1933 Note this is not strictly needed for library calls since they do not call
1934 emit_queue before loading their arguments. However, we may need to have
1935 library calls call emit_queue in the future since failing to do so could
1936 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1937 arguments in registers. */
1939 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1940 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1942 #ifdef POINTERS_EXTEND_UNSIGNED
1943 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1944 src_addr = convert_memory_address (ptr_mode, src_addr);
1945 #endif
1947 dst_tree = make_tree (ptr_type_node, dst_addr);
1948 src_tree = make_tree (ptr_type_node, src_addr);
1950 if (TARGET_MEM_FUNCTIONS)
1951 size_mode = TYPE_MODE (sizetype);
1952 else
1953 size_mode = TYPE_MODE (unsigned_type_node);
1955 size = convert_to_mode (size_mode, size, 1);
1956 size = copy_to_mode_reg (size_mode, size);
1958 /* It is incorrect to use the libcall calling conventions to call
1959 memcpy in this context. This could be a user call to memcpy and
1960 the user may wish to examine the return value from memcpy. For
1961 targets where libcalls and normal calls have different conventions
1962 for returning pointers, we could end up generating incorrect code.
1964 For convenience, we generate the call to bcopy this way as well. */
1966 if (TARGET_MEM_FUNCTIONS)
1967 size_tree = make_tree (sizetype, size);
1968 else
1969 size_tree = make_tree (unsigned_type_node, size);
1971 fn = emit_block_move_libcall_fn (true);
1972 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1973 if (TARGET_MEM_FUNCTIONS)
1975 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1976 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1978 else
1980 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1981 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1984 /* Now we have to build up the CALL_EXPR itself. */
1985 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1986 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1987 call_expr, arg_list, NULL_TREE);
1988 TREE_SIDE_EFFECTS (call_expr) = 1;
1990 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1992 /* If we are initializing a readonly value, show the above call clobbered
1993 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1994 the delay slot scheduler might overlook conflicts and take nasty
1995 decisions. */
1996 if (RTX_UNCHANGING_P (dst))
1997 add_function_usage_to
1998 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1999 gen_rtx_CLOBBER (VOIDmode, dst),
2000 NULL_RTX));
2002 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2005 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2006 for the function we use for block copies. The first time FOR_CALL
2007 is true, we call assemble_external. */
2009 static GTY(()) tree block_move_fn;
2011 void
2012 init_block_move_fn (asmspec)
2013 const char *asmspec;
2015 if (!block_move_fn)
2017 tree fn, args;
2019 if (TARGET_MEM_FUNCTIONS)
2021 fn = get_identifier ("memcpy");
2022 args = build_function_type_list (ptr_type_node, ptr_type_node,
2023 const_ptr_type_node, sizetype,
2024 NULL_TREE);
2026 else
2028 fn = get_identifier ("bcopy");
2029 args = build_function_type_list (void_type_node, const_ptr_type_node,
2030 ptr_type_node, unsigned_type_node,
2031 NULL_TREE);
2034 fn = build_decl (FUNCTION_DECL, fn, args);
2035 DECL_EXTERNAL (fn) = 1;
2036 TREE_PUBLIC (fn) = 1;
2037 DECL_ARTIFICIAL (fn) = 1;
2038 TREE_NOTHROW (fn) = 1;
2040 block_move_fn = fn;
2043 if (asmspec)
2045 SET_DECL_RTL (block_move_fn, NULL_RTX);
2046 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2050 static tree
2051 emit_block_move_libcall_fn (for_call)
2052 int for_call;
2054 static bool emitted_extern;
2056 if (!block_move_fn)
2057 init_block_move_fn (NULL);
2059 if (for_call && !emitted_extern)
2061 emitted_extern = true;
2062 make_decl_rtl (block_move_fn, NULL);
2063 assemble_external (block_move_fn);
2066 return block_move_fn;
2069 /* A subroutine of emit_block_move. Copy the data via an explicit
2070 loop. This is used only when libcalls are forbidden. */
2071 /* ??? It'd be nice to copy in hunks larger than QImode. */
2073 static void
2074 emit_block_move_via_loop (x, y, size, align)
2075 rtx x, y, size;
2076 unsigned int align ATTRIBUTE_UNUSED;
2078 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2079 enum machine_mode iter_mode;
2081 iter_mode = GET_MODE (size);
2082 if (iter_mode == VOIDmode)
2083 iter_mode = word_mode;
2085 top_label = gen_label_rtx ();
2086 cmp_label = gen_label_rtx ();
2087 iter = gen_reg_rtx (iter_mode);
2089 emit_move_insn (iter, const0_rtx);
2091 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2092 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2093 do_pending_stack_adjust ();
2095 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2097 emit_jump (cmp_label);
2098 emit_label (top_label);
2100 tmp = convert_modes (Pmode, iter_mode, iter, true);
2101 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2102 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2103 x = change_address (x, QImode, x_addr);
2104 y = change_address (y, QImode, y_addr);
2106 emit_move_insn (x, y);
2108 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2109 true, OPTAB_LIB_WIDEN);
2110 if (tmp != iter)
2111 emit_move_insn (iter, tmp);
2113 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2114 emit_label (cmp_label);
2116 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2117 true, top_label);
2119 emit_note (NULL, NOTE_INSN_LOOP_END);
2122 /* Copy all or part of a value X into registers starting at REGNO.
2123 The number of registers to be filled is NREGS. */
2125 void
2126 move_block_to_reg (regno, x, nregs, mode)
2127 int regno;
2128 rtx x;
2129 int nregs;
2130 enum machine_mode mode;
2132 int i;
2133 #ifdef HAVE_load_multiple
2134 rtx pat;
2135 rtx last;
2136 #endif
2138 if (nregs == 0)
2139 return;
2141 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2142 x = validize_mem (force_const_mem (mode, x));
2144 /* See if the machine can do this with a load multiple insn. */
2145 #ifdef HAVE_load_multiple
2146 if (HAVE_load_multiple)
2148 last = get_last_insn ();
2149 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2150 GEN_INT (nregs));
2151 if (pat)
2153 emit_insn (pat);
2154 return;
2156 else
2157 delete_insns_since (last);
2159 #endif
2161 for (i = 0; i < nregs; i++)
2162 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2163 operand_subword_force (x, i, mode));
2166 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2167 The number of registers to be filled is NREGS. */
2169 void
2170 move_block_from_reg (regno, x, nregs)
2171 int regno;
2172 rtx x;
2173 int nregs;
2175 int i;
2177 if (nregs == 0)
2178 return;
2180 /* See if the machine can do this with a store multiple insn. */
2181 #ifdef HAVE_store_multiple
2182 if (HAVE_store_multiple)
2184 rtx last = get_last_insn ();
2185 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2186 GEN_INT (nregs));
2187 if (pat)
2189 emit_insn (pat);
2190 return;
2192 else
2193 delete_insns_since (last);
2195 #endif
2197 for (i = 0; i < nregs; i++)
2199 rtx tem = operand_subword (x, i, 1, BLKmode);
2201 if (tem == 0)
2202 abort ();
2204 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2208 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2209 ORIG, where ORIG is a non-consecutive group of registers represented by
2210 a PARALLEL. The clone is identical to the original except in that the
2211 original set of registers is replaced by a new set of pseudo registers.
2212 The new set has the same modes as the original set. */
2215 gen_group_rtx (orig)
2216 rtx orig;
2218 int i, length;
2219 rtx *tmps;
2221 if (GET_CODE (orig) != PARALLEL)
2222 abort ();
2224 length = XVECLEN (orig, 0);
2225 tmps = (rtx *) alloca (sizeof (rtx) * length);
2227 /* Skip a NULL entry in first slot. */
2228 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2230 if (i)
2231 tmps[0] = 0;
2233 for (; i < length; i++)
2235 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2236 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2238 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2241 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2244 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2245 registers represented by a PARALLEL. SSIZE represents the total size of
2246 block SRC in bytes, or -1 if not known. */
2247 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2248 the balance will be in what would be the low-order memory addresses, i.e.
2249 left justified for big endian, right justified for little endian. This
2250 happens to be true for the targets currently using this support. If this
2251 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2252 would be needed. */
2254 void
2255 emit_group_load (dst, orig_src, ssize)
2256 rtx dst, orig_src;
2257 int ssize;
2259 rtx *tmps, src;
2260 int start, i;
2262 if (GET_CODE (dst) != PARALLEL)
2263 abort ();
2265 /* Check for a NULL entry, used to indicate that the parameter goes
2266 both on the stack and in registers. */
2267 if (XEXP (XVECEXP (dst, 0, 0), 0))
2268 start = 0;
2269 else
2270 start = 1;
2272 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2274 /* Process the pieces. */
2275 for (i = start; i < XVECLEN (dst, 0); i++)
2277 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2278 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2279 unsigned int bytelen = GET_MODE_SIZE (mode);
2280 int shift = 0;
2282 /* Handle trailing fragments that run over the size of the struct. */
2283 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2285 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2286 bytelen = ssize - bytepos;
2287 if (bytelen <= 0)
2288 abort ();
2291 /* If we won't be loading directly from memory, protect the real source
2292 from strange tricks we might play; but make sure that the source can
2293 be loaded directly into the destination. */
2294 src = orig_src;
2295 if (GET_CODE (orig_src) != MEM
2296 && (!CONSTANT_P (orig_src)
2297 || (GET_MODE (orig_src) != mode
2298 && GET_MODE (orig_src) != VOIDmode)))
2300 if (GET_MODE (orig_src) == VOIDmode)
2301 src = gen_reg_rtx (mode);
2302 else
2303 src = gen_reg_rtx (GET_MODE (orig_src));
2305 emit_move_insn (src, orig_src);
2308 /* Optimize the access just a bit. */
2309 if (GET_CODE (src) == MEM
2310 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2311 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2312 && bytelen == GET_MODE_SIZE (mode))
2314 tmps[i] = gen_reg_rtx (mode);
2315 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2317 else if (GET_CODE (src) == CONCAT)
2319 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2320 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2322 if ((bytepos == 0 && bytelen == slen0)
2323 || (bytepos != 0 && bytepos + bytelen <= slen))
2325 /* The following assumes that the concatenated objects all
2326 have the same size. In this case, a simple calculation
2327 can be used to determine the object and the bit field
2328 to be extracted. */
2329 tmps[i] = XEXP (src, bytepos / slen0);
2330 if (! CONSTANT_P (tmps[i])
2331 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2332 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2333 (bytepos % slen0) * BITS_PER_UNIT,
2334 1, NULL_RTX, mode, mode, ssize);
2336 else if (bytepos == 0)
2338 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2339 emit_move_insn (mem, src);
2340 tmps[i] = adjust_address (mem, mode, 0);
2342 else
2343 abort ();
2345 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2346 SIMD register, which is currently broken. While we get GCC
2347 to emit proper RTL for these cases, let's dump to memory. */
2348 else if (VECTOR_MODE_P (GET_MODE (dst))
2349 && GET_CODE (src) == REG)
2351 int slen = GET_MODE_SIZE (GET_MODE (src));
2352 rtx mem;
2354 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2355 emit_move_insn (mem, src);
2356 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2358 else if (CONSTANT_P (src)
2359 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2360 tmps[i] = src;
2361 else
2362 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2363 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2364 mode, mode, ssize);
2366 if (BYTES_BIG_ENDIAN && shift)
2367 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2368 tmps[i], 0, OPTAB_WIDEN);
2371 emit_queue ();
2373 /* Copy the extracted pieces into the proper (probable) hard regs. */
2374 for (i = start; i < XVECLEN (dst, 0); i++)
2375 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2378 /* Emit code to move a block SRC to block DST, where SRC and DST are
2379 non-consecutive groups of registers, each represented by a PARALLEL. */
2381 void
2382 emit_group_move (dst, src)
2383 rtx dst, src;
2385 int i;
2387 if (GET_CODE (src) != PARALLEL
2388 || GET_CODE (dst) != PARALLEL
2389 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2390 abort ();
2392 /* Skip first entry if NULL. */
2393 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2394 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2395 XEXP (XVECEXP (src, 0, i), 0));
2398 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2399 registers represented by a PARALLEL. SSIZE represents the total size of
2400 block DST, or -1 if not known. */
2402 void
2403 emit_group_store (orig_dst, src, ssize)
2404 rtx orig_dst, src;
2405 int ssize;
2407 rtx *tmps, dst;
2408 int start, i;
2410 if (GET_CODE (src) != PARALLEL)
2411 abort ();
2413 /* Check for a NULL entry, used to indicate that the parameter goes
2414 both on the stack and in registers. */
2415 if (XEXP (XVECEXP (src, 0, 0), 0))
2416 start = 0;
2417 else
2418 start = 1;
2420 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2422 /* Copy the (probable) hard regs into pseudos. */
2423 for (i = start; i < XVECLEN (src, 0); i++)
2425 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2426 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2427 emit_move_insn (tmps[i], reg);
2429 emit_queue ();
2431 /* If we won't be storing directly into memory, protect the real destination
2432 from strange tricks we might play. */
2433 dst = orig_dst;
2434 if (GET_CODE (dst) == PARALLEL)
2436 rtx temp;
2438 /* We can get a PARALLEL dst if there is a conditional expression in
2439 a return statement. In that case, the dst and src are the same,
2440 so no action is necessary. */
2441 if (rtx_equal_p (dst, src))
2442 return;
2444 /* It is unclear if we can ever reach here, but we may as well handle
2445 it. Allocate a temporary, and split this into a store/load to/from
2446 the temporary. */
2448 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2449 emit_group_store (temp, src, ssize);
2450 emit_group_load (dst, temp, ssize);
2451 return;
2453 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2455 dst = gen_reg_rtx (GET_MODE (orig_dst));
2456 /* Make life a bit easier for combine. */
2457 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2460 /* Process the pieces. */
2461 for (i = start; i < XVECLEN (src, 0); i++)
2463 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2464 enum machine_mode mode = GET_MODE (tmps[i]);
2465 unsigned int bytelen = GET_MODE_SIZE (mode);
2466 rtx dest = dst;
2468 /* Handle trailing fragments that run over the size of the struct. */
2469 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2471 if (BYTES_BIG_ENDIAN)
2473 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2474 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2475 tmps[i], 0, OPTAB_WIDEN);
2477 bytelen = ssize - bytepos;
2480 if (GET_CODE (dst) == CONCAT)
2482 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2483 dest = XEXP (dst, 0);
2484 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2486 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2487 dest = XEXP (dst, 1);
2489 else if (bytepos == 0 && XVECLEN (src, 0))
2491 dest = assign_stack_temp (GET_MODE (dest),
2492 GET_MODE_SIZE (GET_MODE (dest)), 0);
2493 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2494 tmps[i]);
2495 dst = dest;
2496 break;
2498 else
2499 abort ();
2502 /* Optimize the access just a bit. */
2503 if (GET_CODE (dest) == MEM
2504 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2505 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2506 && bytelen == GET_MODE_SIZE (mode))
2507 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2508 else
2509 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2510 mode, tmps[i], ssize);
2513 emit_queue ();
2515 /* Copy from the pseudo into the (probable) hard reg. */
2516 if (orig_dst != dst)
2517 emit_move_insn (orig_dst, dst);
2520 /* Generate code to copy a BLKmode object of TYPE out of a
2521 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2522 is null, a stack temporary is created. TGTBLK is returned.
2524 The primary purpose of this routine is to handle functions
2525 that return BLKmode structures in registers. Some machines
2526 (the PA for example) want to return all small structures
2527 in registers regardless of the structure's alignment. */
2530 copy_blkmode_from_reg (tgtblk, srcreg, type)
2531 rtx tgtblk;
2532 rtx srcreg;
2533 tree type;
2535 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2536 rtx src = NULL, dst = NULL;
2537 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2538 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2540 if (tgtblk == 0)
2542 tgtblk = assign_temp (build_qualified_type (type,
2543 (TYPE_QUALS (type)
2544 | TYPE_QUAL_CONST)),
2545 0, 1, 1);
2546 preserve_temp_slots (tgtblk);
2549 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2550 into a new pseudo which is a full word. */
2552 if (GET_MODE (srcreg) != BLKmode
2553 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2554 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2556 /* Structures whose size is not a multiple of a word are aligned
2557 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2558 machine, this means we must skip the empty high order bytes when
2559 calculating the bit offset. */
2560 if (BYTES_BIG_ENDIAN
2561 && bytes % UNITS_PER_WORD)
2562 big_endian_correction
2563 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2565 /* Copy the structure BITSIZE bites at a time.
2567 We could probably emit more efficient code for machines which do not use
2568 strict alignment, but it doesn't seem worth the effort at the current
2569 time. */
2570 for (bitpos = 0, xbitpos = big_endian_correction;
2571 bitpos < bytes * BITS_PER_UNIT;
2572 bitpos += bitsize, xbitpos += bitsize)
2574 /* We need a new source operand each time xbitpos is on a
2575 word boundary and when xbitpos == big_endian_correction
2576 (the first time through). */
2577 if (xbitpos % BITS_PER_WORD == 0
2578 || xbitpos == big_endian_correction)
2579 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2580 GET_MODE (srcreg));
2582 /* We need a new destination operand each time bitpos is on
2583 a word boundary. */
2584 if (bitpos % BITS_PER_WORD == 0)
2585 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2587 /* Use xbitpos for the source extraction (right justified) and
2588 xbitpos for the destination store (left justified). */
2589 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2590 extract_bit_field (src, bitsize,
2591 xbitpos % BITS_PER_WORD, 1,
2592 NULL_RTX, word_mode, word_mode,
2593 BITS_PER_WORD),
2594 BITS_PER_WORD);
2597 return tgtblk;
2600 /* Add a USE expression for REG to the (possibly empty) list pointed
2601 to by CALL_FUSAGE. REG must denote a hard register. */
2603 void
2604 use_reg (call_fusage, reg)
2605 rtx *call_fusage, reg;
2607 if (GET_CODE (reg) != REG
2608 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2609 abort ();
2611 *call_fusage
2612 = gen_rtx_EXPR_LIST (VOIDmode,
2613 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2616 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2617 starting at REGNO. All of these registers must be hard registers. */
2619 void
2620 use_regs (call_fusage, regno, nregs)
2621 rtx *call_fusage;
2622 int regno;
2623 int nregs;
2625 int i;
2627 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2628 abort ();
2630 for (i = 0; i < nregs; i++)
2631 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2634 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2635 PARALLEL REGS. This is for calls that pass values in multiple
2636 non-contiguous locations. The Irix 6 ABI has examples of this. */
2638 void
2639 use_group_regs (call_fusage, regs)
2640 rtx *call_fusage;
2641 rtx regs;
2643 int i;
2645 for (i = 0; i < XVECLEN (regs, 0); i++)
2647 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2649 /* A NULL entry means the parameter goes both on the stack and in
2650 registers. This can also be a MEM for targets that pass values
2651 partially on the stack and partially in registers. */
2652 if (reg != 0 && GET_CODE (reg) == REG)
2653 use_reg (call_fusage, reg);
2658 /* Determine whether the LEN bytes generated by CONSTFUN can be
2659 stored to memory using several move instructions. CONSTFUNDATA is
2660 a pointer which will be passed as argument in every CONSTFUN call.
2661 ALIGN is maximum alignment we can assume. Return nonzero if a
2662 call to store_by_pieces should succeed. */
2665 can_store_by_pieces (len, constfun, constfundata, align)
2666 unsigned HOST_WIDE_INT len;
2667 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2668 PTR constfundata;
2669 unsigned int align;
2671 unsigned HOST_WIDE_INT max_size, l;
2672 HOST_WIDE_INT offset = 0;
2673 enum machine_mode mode, tmode;
2674 enum insn_code icode;
2675 int reverse;
2676 rtx cst;
2678 if (! STORE_BY_PIECES_P (len, align))
2679 return 0;
2681 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2682 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2683 align = MOVE_MAX * BITS_PER_UNIT;
2685 /* We would first store what we can in the largest integer mode, then go to
2686 successively smaller modes. */
2688 for (reverse = 0;
2689 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2690 reverse++)
2692 l = len;
2693 mode = VOIDmode;
2694 max_size = STORE_MAX_PIECES + 1;
2695 while (max_size > 1)
2697 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2698 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2699 if (GET_MODE_SIZE (tmode) < max_size)
2700 mode = tmode;
2702 if (mode == VOIDmode)
2703 break;
2705 icode = mov_optab->handlers[(int) mode].insn_code;
2706 if (icode != CODE_FOR_nothing
2707 && align >= GET_MODE_ALIGNMENT (mode))
2709 unsigned int size = GET_MODE_SIZE (mode);
2711 while (l >= size)
2713 if (reverse)
2714 offset -= size;
2716 cst = (*constfun) (constfundata, offset, mode);
2717 if (!LEGITIMATE_CONSTANT_P (cst))
2718 return 0;
2720 if (!reverse)
2721 offset += size;
2723 l -= size;
2727 max_size = GET_MODE_SIZE (mode);
2730 /* The code above should have handled everything. */
2731 if (l != 0)
2732 abort ();
2735 return 1;
2738 /* Generate several move instructions to store LEN bytes generated by
2739 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2740 pointer which will be passed as argument in every CONSTFUN call.
2741 ALIGN is maximum alignment we can assume. */
2743 void
2744 store_by_pieces (to, len, constfun, constfundata, align)
2745 rtx to;
2746 unsigned HOST_WIDE_INT len;
2747 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2748 PTR constfundata;
2749 unsigned int align;
2751 struct store_by_pieces data;
2753 if (! STORE_BY_PIECES_P (len, align))
2754 abort ();
2755 to = protect_from_queue (to, 1);
2756 data.constfun = constfun;
2757 data.constfundata = constfundata;
2758 data.len = len;
2759 data.to = to;
2760 store_by_pieces_1 (&data, align);
2763 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2764 rtx with BLKmode). The caller must pass TO through protect_from_queue
2765 before calling. ALIGN is maximum alignment we can assume. */
2767 static void
2768 clear_by_pieces (to, len, align)
2769 rtx to;
2770 unsigned HOST_WIDE_INT len;
2771 unsigned int align;
2773 struct store_by_pieces data;
2775 data.constfun = clear_by_pieces_1;
2776 data.constfundata = NULL;
2777 data.len = len;
2778 data.to = to;
2779 store_by_pieces_1 (&data, align);
2782 /* Callback routine for clear_by_pieces.
2783 Return const0_rtx unconditionally. */
2785 static rtx
2786 clear_by_pieces_1 (data, offset, mode)
2787 PTR data ATTRIBUTE_UNUSED;
2788 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2789 enum machine_mode mode ATTRIBUTE_UNUSED;
2791 return const0_rtx;
2794 /* Subroutine of clear_by_pieces and store_by_pieces.
2795 Generate several move instructions to store LEN bytes of block TO. (A MEM
2796 rtx with BLKmode). The caller must pass TO through protect_from_queue
2797 before calling. ALIGN is maximum alignment we can assume. */
2799 static void
2800 store_by_pieces_1 (data, align)
2801 struct store_by_pieces *data;
2802 unsigned int align;
2804 rtx to_addr = XEXP (data->to, 0);
2805 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2806 enum machine_mode mode = VOIDmode, tmode;
2807 enum insn_code icode;
2809 data->offset = 0;
2810 data->to_addr = to_addr;
2811 data->autinc_to
2812 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2813 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2815 data->explicit_inc_to = 0;
2816 data->reverse
2817 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2818 if (data->reverse)
2819 data->offset = data->len;
2821 /* If storing requires more than two move insns,
2822 copy addresses to registers (to make displacements shorter)
2823 and use post-increment if available. */
2824 if (!data->autinc_to
2825 && move_by_pieces_ninsns (data->len, align) > 2)
2827 /* Determine the main mode we'll be using. */
2828 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2829 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2830 if (GET_MODE_SIZE (tmode) < max_size)
2831 mode = tmode;
2833 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2835 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2836 data->autinc_to = 1;
2837 data->explicit_inc_to = -1;
2840 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2841 && ! data->autinc_to)
2843 data->to_addr = copy_addr_to_reg (to_addr);
2844 data->autinc_to = 1;
2845 data->explicit_inc_to = 1;
2848 if ( !data->autinc_to && CONSTANT_P (to_addr))
2849 data->to_addr = copy_addr_to_reg (to_addr);
2852 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2853 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2854 align = MOVE_MAX * BITS_PER_UNIT;
2856 /* First store what we can in the largest integer mode, then go to
2857 successively smaller modes. */
2859 while (max_size > 1)
2861 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2862 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2863 if (GET_MODE_SIZE (tmode) < max_size)
2864 mode = tmode;
2866 if (mode == VOIDmode)
2867 break;
2869 icode = mov_optab->handlers[(int) mode].insn_code;
2870 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2871 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2873 max_size = GET_MODE_SIZE (mode);
2876 /* The code above should have handled everything. */
2877 if (data->len != 0)
2878 abort ();
2881 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2882 with move instructions for mode MODE. GENFUN is the gen_... function
2883 to make a move insn for that mode. DATA has all the other info. */
2885 static void
2886 store_by_pieces_2 (genfun, mode, data)
2887 rtx (*genfun) PARAMS ((rtx, ...));
2888 enum machine_mode mode;
2889 struct store_by_pieces *data;
2891 unsigned int size = GET_MODE_SIZE (mode);
2892 rtx to1, cst;
2894 while (data->len >= size)
2896 if (data->reverse)
2897 data->offset -= size;
2899 if (data->autinc_to)
2900 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2901 data->offset);
2902 else
2903 to1 = adjust_address (data->to, mode, data->offset);
2905 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2906 emit_insn (gen_add2_insn (data->to_addr,
2907 GEN_INT (-(HOST_WIDE_INT) size)));
2909 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2910 emit_insn ((*genfun) (to1, cst));
2912 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2913 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2915 if (! data->reverse)
2916 data->offset += size;
2918 data->len -= size;
2922 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2923 its length in bytes. */
2926 clear_storage (object, size)
2927 rtx object;
2928 rtx size;
2930 rtx retval = 0;
2931 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2932 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2934 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2935 just move a zero. Otherwise, do this a piece at a time. */
2936 if (GET_MODE (object) != BLKmode
2937 && GET_CODE (size) == CONST_INT
2938 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2939 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2940 else
2942 object = protect_from_queue (object, 1);
2943 size = protect_from_queue (size, 0);
2945 if (GET_CODE (size) == CONST_INT
2946 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2947 clear_by_pieces (object, INTVAL (size), align);
2948 else if (clear_storage_via_clrstr (object, size, align))
2950 else
2951 retval = clear_storage_via_libcall (object, size);
2954 return retval;
2957 /* A subroutine of clear_storage. Expand a clrstr pattern;
2958 return true if successful. */
2960 static bool
2961 clear_storage_via_clrstr (object, size, align)
2962 rtx object, size;
2963 unsigned int align;
2965 /* Try the most limited insn first, because there's no point
2966 including more than one in the machine description unless
2967 the more limited one has some advantage. */
2969 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2970 enum machine_mode mode;
2972 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2973 mode = GET_MODE_WIDER_MODE (mode))
2975 enum insn_code code = clrstr_optab[(int) mode];
2976 insn_operand_predicate_fn pred;
2978 if (code != CODE_FOR_nothing
2979 /* We don't need MODE to be narrower than
2980 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2981 the mode mask, as it is returned by the macro, it will
2982 definitely be less than the actual mode mask. */
2983 && ((GET_CODE (size) == CONST_INT
2984 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2985 <= (GET_MODE_MASK (mode) >> 1)))
2986 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2987 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2988 || (*pred) (object, BLKmode))
2989 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2990 || (*pred) (opalign, VOIDmode)))
2992 rtx op1;
2993 rtx last = get_last_insn ();
2994 rtx pat;
2996 op1 = convert_to_mode (mode, size, 1);
2997 pred = insn_data[(int) code].operand[1].predicate;
2998 if (pred != 0 && ! (*pred) (op1, mode))
2999 op1 = copy_to_mode_reg (mode, op1);
3001 pat = GEN_FCN ((int) code) (object, op1, opalign);
3002 if (pat)
3004 emit_insn (pat);
3005 return true;
3007 else
3008 delete_insns_since (last);
3012 return false;
3015 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3016 Return the return value of memset, 0 otherwise. */
3018 static rtx
3019 clear_storage_via_libcall (object, size)
3020 rtx object, size;
3022 tree call_expr, arg_list, fn, object_tree, size_tree;
3023 enum machine_mode size_mode;
3024 rtx retval;
3026 /* OBJECT or SIZE may have been passed through protect_from_queue.
3028 It is unsafe to save the value generated by protect_from_queue
3029 and reuse it later. Consider what happens if emit_queue is
3030 called before the return value from protect_from_queue is used.
3032 Expansion of the CALL_EXPR below will call emit_queue before
3033 we are finished emitting RTL for argument setup. So if we are
3034 not careful we could get the wrong value for an argument.
3036 To avoid this problem we go ahead and emit code to copy OBJECT
3037 and SIZE into new pseudos. We can then place those new pseudos
3038 into an RTL_EXPR and use them later, even after a call to
3039 emit_queue.
3041 Note this is not strictly needed for library calls since they
3042 do not call emit_queue before loading their arguments. However,
3043 we may need to have library calls call emit_queue in the future
3044 since failing to do so could cause problems for targets which
3045 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3047 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3049 if (TARGET_MEM_FUNCTIONS)
3050 size_mode = TYPE_MODE (sizetype);
3051 else
3052 size_mode = TYPE_MODE (unsigned_type_node);
3053 size = convert_to_mode (size_mode, size, 1);
3054 size = copy_to_mode_reg (size_mode, size);
3056 /* It is incorrect to use the libcall calling conventions to call
3057 memset in this context. This could be a user call to memset and
3058 the user may wish to examine the return value from memset. For
3059 targets where libcalls and normal calls have different conventions
3060 for returning pointers, we could end up generating incorrect code.
3062 For convenience, we generate the call to bzero this way as well. */
3064 object_tree = make_tree (ptr_type_node, object);
3065 if (TARGET_MEM_FUNCTIONS)
3066 size_tree = make_tree (sizetype, size);
3067 else
3068 size_tree = make_tree (unsigned_type_node, size);
3070 fn = clear_storage_libcall_fn (true);
3071 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3072 if (TARGET_MEM_FUNCTIONS)
3073 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3074 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3076 /* Now we have to build up the CALL_EXPR itself. */
3077 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3078 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3079 call_expr, arg_list, NULL_TREE);
3080 TREE_SIDE_EFFECTS (call_expr) = 1;
3082 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3084 /* If we are initializing a readonly value, show the above call
3085 clobbered it. Otherwise, a load from it may erroneously be
3086 hoisted from a loop. */
3087 if (RTX_UNCHANGING_P (object))
3088 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3090 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3093 /* A subroutine of clear_storage_via_libcall. Create the tree node
3094 for the function we use for block clears. The first time FOR_CALL
3095 is true, we call assemble_external. */
3097 static GTY(()) tree block_clear_fn;
3099 void
3100 init_block_clear_fn (asmspec)
3101 const char *asmspec;
3103 if (!block_clear_fn)
3105 tree fn, args;
3107 if (TARGET_MEM_FUNCTIONS)
3109 fn = get_identifier ("memset");
3110 args = build_function_type_list (ptr_type_node, ptr_type_node,
3111 integer_type_node, sizetype,
3112 NULL_TREE);
3114 else
3116 fn = get_identifier ("bzero");
3117 args = build_function_type_list (void_type_node, ptr_type_node,
3118 unsigned_type_node, NULL_TREE);
3121 fn = build_decl (FUNCTION_DECL, fn, args);
3122 DECL_EXTERNAL (fn) = 1;
3123 TREE_PUBLIC (fn) = 1;
3124 DECL_ARTIFICIAL (fn) = 1;
3125 TREE_NOTHROW (fn) = 1;
3127 block_clear_fn = fn;
3130 if (asmspec)
3132 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3133 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3137 static tree
3138 clear_storage_libcall_fn (for_call)
3139 int for_call;
3141 static bool emitted_extern;
3143 if (!block_clear_fn)
3144 init_block_clear_fn (NULL);
3146 if (for_call && !emitted_extern)
3148 emitted_extern = true;
3149 make_decl_rtl (block_clear_fn, NULL);
3150 assemble_external (block_clear_fn);
3153 return block_clear_fn;
3156 /* Generate code to copy Y into X.
3157 Both Y and X must have the same mode, except that
3158 Y can be a constant with VOIDmode.
3159 This mode cannot be BLKmode; use emit_block_move for that.
3161 Return the last instruction emitted. */
3164 emit_move_insn (x, y)
3165 rtx x, y;
3167 enum machine_mode mode = GET_MODE (x);
3168 rtx y_cst = NULL_RTX;
3169 rtx last_insn, set;
3171 x = protect_from_queue (x, 1);
3172 y = protect_from_queue (y, 0);
3174 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3175 abort ();
3177 /* Never force constant_p_rtx to memory. */
3178 if (GET_CODE (y) == CONSTANT_P_RTX)
3180 else if (CONSTANT_P (y))
3182 if (optimize
3183 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3184 && (last_insn = compress_float_constant (x, y)))
3185 return last_insn;
3187 y_cst = y;
3189 if (!LEGITIMATE_CONSTANT_P (y))
3191 y = force_const_mem (mode, y);
3193 /* If the target's cannot_force_const_mem prevented the spill,
3194 assume that the target's move expanders will also take care
3195 of the non-legitimate constant. */
3196 if (!y)
3197 y = y_cst;
3201 /* If X or Y are memory references, verify that their addresses are valid
3202 for the machine. */
3203 if (GET_CODE (x) == MEM
3204 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3205 && ! push_operand (x, GET_MODE (x)))
3206 || (flag_force_addr
3207 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3208 x = validize_mem (x);
3210 if (GET_CODE (y) == MEM
3211 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3212 || (flag_force_addr
3213 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3214 y = validize_mem (y);
3216 if (mode == BLKmode)
3217 abort ();
3219 last_insn = emit_move_insn_1 (x, y);
3221 if (y_cst && GET_CODE (x) == REG
3222 && (set = single_set (last_insn)) != NULL_RTX
3223 && SET_DEST (set) == x
3224 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3225 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3227 return last_insn;
3230 /* Low level part of emit_move_insn.
3231 Called just like emit_move_insn, but assumes X and Y
3232 are basically valid. */
3235 emit_move_insn_1 (x, y)
3236 rtx x, y;
3238 enum machine_mode mode = GET_MODE (x);
3239 enum machine_mode submode;
3240 enum mode_class class = GET_MODE_CLASS (mode);
3242 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3243 abort ();
3245 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3246 return
3247 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3249 /* Expand complex moves by moving real part and imag part, if possible. */
3250 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3251 && BLKmode != (submode = GET_MODE_INNER (mode))
3252 && (mov_optab->handlers[(int) submode].insn_code
3253 != CODE_FOR_nothing))
3255 /* Don't split destination if it is a stack push. */
3256 int stack = push_operand (x, GET_MODE (x));
3258 #ifdef PUSH_ROUNDING
3259 /* In case we output to the stack, but the size is smaller machine can
3260 push exactly, we need to use move instructions. */
3261 if (stack
3262 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3263 != GET_MODE_SIZE (submode)))
3265 rtx temp;
3266 HOST_WIDE_INT offset1, offset2;
3268 /* Do not use anti_adjust_stack, since we don't want to update
3269 stack_pointer_delta. */
3270 temp = expand_binop (Pmode,
3271 #ifdef STACK_GROWS_DOWNWARD
3272 sub_optab,
3273 #else
3274 add_optab,
3275 #endif
3276 stack_pointer_rtx,
3277 GEN_INT
3278 (PUSH_ROUNDING
3279 (GET_MODE_SIZE (GET_MODE (x)))),
3280 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3282 if (temp != stack_pointer_rtx)
3283 emit_move_insn (stack_pointer_rtx, temp);
3285 #ifdef STACK_GROWS_DOWNWARD
3286 offset1 = 0;
3287 offset2 = GET_MODE_SIZE (submode);
3288 #else
3289 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3290 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3291 + GET_MODE_SIZE (submode));
3292 #endif
3294 emit_move_insn (change_address (x, submode,
3295 gen_rtx_PLUS (Pmode,
3296 stack_pointer_rtx,
3297 GEN_INT (offset1))),
3298 gen_realpart (submode, y));
3299 emit_move_insn (change_address (x, submode,
3300 gen_rtx_PLUS (Pmode,
3301 stack_pointer_rtx,
3302 GEN_INT (offset2))),
3303 gen_imagpart (submode, y));
3305 else
3306 #endif
3307 /* If this is a stack, push the highpart first, so it
3308 will be in the argument order.
3310 In that case, change_address is used only to convert
3311 the mode, not to change the address. */
3312 if (stack)
3314 /* Note that the real part always precedes the imag part in memory
3315 regardless of machine's endianness. */
3316 #ifdef STACK_GROWS_DOWNWARD
3317 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3318 (gen_rtx_MEM (submode, XEXP (x, 0)),
3319 gen_imagpart (submode, y)));
3320 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3321 (gen_rtx_MEM (submode, XEXP (x, 0)),
3322 gen_realpart (submode, y)));
3323 #else
3324 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3325 (gen_rtx_MEM (submode, XEXP (x, 0)),
3326 gen_realpart (submode, y)));
3327 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3328 (gen_rtx_MEM (submode, XEXP (x, 0)),
3329 gen_imagpart (submode, y)));
3330 #endif
3332 else
3334 rtx realpart_x, realpart_y;
3335 rtx imagpart_x, imagpart_y;
3337 /* If this is a complex value with each part being smaller than a
3338 word, the usual calling sequence will likely pack the pieces into
3339 a single register. Unfortunately, SUBREG of hard registers only
3340 deals in terms of words, so we have a problem converting input
3341 arguments to the CONCAT of two registers that is used elsewhere
3342 for complex values. If this is before reload, we can copy it into
3343 memory and reload. FIXME, we should see about using extract and
3344 insert on integer registers, but complex short and complex char
3345 variables should be rarely used. */
3346 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3347 && (reload_in_progress | reload_completed) == 0)
3349 int packed_dest_p
3350 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3351 int packed_src_p
3352 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3354 if (packed_dest_p || packed_src_p)
3356 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3357 ? MODE_FLOAT : MODE_INT);
3359 enum machine_mode reg_mode
3360 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3362 if (reg_mode != BLKmode)
3364 rtx mem = assign_stack_temp (reg_mode,
3365 GET_MODE_SIZE (mode), 0);
3366 rtx cmem = adjust_address (mem, mode, 0);
3368 cfun->cannot_inline
3369 = N_("function using short complex types cannot be inline");
3371 if (packed_dest_p)
3373 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3375 emit_move_insn_1 (cmem, y);
3376 return emit_move_insn_1 (sreg, mem);
3378 else
3380 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3382 emit_move_insn_1 (mem, sreg);
3383 return emit_move_insn_1 (x, cmem);
3389 realpart_x = gen_realpart (submode, x);
3390 realpart_y = gen_realpart (submode, y);
3391 imagpart_x = gen_imagpart (submode, x);
3392 imagpart_y = gen_imagpart (submode, y);
3394 /* Show the output dies here. This is necessary for SUBREGs
3395 of pseudos since we cannot track their lifetimes correctly;
3396 hard regs shouldn't appear here except as return values.
3397 We never want to emit such a clobber after reload. */
3398 if (x != y
3399 && ! (reload_in_progress || reload_completed)
3400 && (GET_CODE (realpart_x) == SUBREG
3401 || GET_CODE (imagpart_x) == SUBREG))
3402 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3404 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3405 (realpart_x, realpart_y));
3406 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3407 (imagpart_x, imagpart_y));
3410 return get_last_insn ();
3413 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3414 find a mode to do it in. If we have a movcc, use it. Otherwise,
3415 find the MODE_INT mode of the same width. */
3416 else if (GET_MODE_CLASS (mode) == MODE_CC
3417 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3419 enum insn_code insn_code;
3420 enum machine_mode tmode = VOIDmode;
3421 rtx x1 = x, y1 = y;
3423 if (mode != CCmode
3424 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3425 tmode = CCmode;
3426 else
3427 for (tmode = QImode; tmode != VOIDmode;
3428 tmode = GET_MODE_WIDER_MODE (tmode))
3429 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3430 break;
3432 if (tmode == VOIDmode)
3433 abort ();
3435 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3436 may call change_address which is not appropriate if we were
3437 called when a reload was in progress. We don't have to worry
3438 about changing the address since the size in bytes is supposed to
3439 be the same. Copy the MEM to change the mode and move any
3440 substitutions from the old MEM to the new one. */
3442 if (reload_in_progress)
3444 x = gen_lowpart_common (tmode, x1);
3445 if (x == 0 && GET_CODE (x1) == MEM)
3447 x = adjust_address_nv (x1, tmode, 0);
3448 copy_replacements (x1, x);
3451 y = gen_lowpart_common (tmode, y1);
3452 if (y == 0 && GET_CODE (y1) == MEM)
3454 y = adjust_address_nv (y1, tmode, 0);
3455 copy_replacements (y1, y);
3458 else
3460 x = gen_lowpart (tmode, x);
3461 y = gen_lowpart (tmode, y);
3464 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3465 return emit_insn (GEN_FCN (insn_code) (x, y));
3468 /* This will handle any multi-word or full-word mode that lacks a move_insn
3469 pattern. However, you will get better code if you define such patterns,
3470 even if they must turn into multiple assembler instructions. */
3471 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3473 rtx last_insn = 0;
3474 rtx seq, inner;
3475 int need_clobber;
3476 int i;
3478 #ifdef PUSH_ROUNDING
3480 /* If X is a push on the stack, do the push now and replace
3481 X with a reference to the stack pointer. */
3482 if (push_operand (x, GET_MODE (x)))
3484 rtx temp;
3485 enum rtx_code code;
3487 /* Do not use anti_adjust_stack, since we don't want to update
3488 stack_pointer_delta. */
3489 temp = expand_binop (Pmode,
3490 #ifdef STACK_GROWS_DOWNWARD
3491 sub_optab,
3492 #else
3493 add_optab,
3494 #endif
3495 stack_pointer_rtx,
3496 GEN_INT
3497 (PUSH_ROUNDING
3498 (GET_MODE_SIZE (GET_MODE (x)))),
3499 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3501 if (temp != stack_pointer_rtx)
3502 emit_move_insn (stack_pointer_rtx, temp);
3504 code = GET_CODE (XEXP (x, 0));
3506 /* Just hope that small offsets off SP are OK. */
3507 if (code == POST_INC)
3508 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3509 GEN_INT (-((HOST_WIDE_INT)
3510 GET_MODE_SIZE (GET_MODE (x)))));
3511 else if (code == POST_DEC)
3512 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3513 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3514 else
3515 temp = stack_pointer_rtx;
3517 x = change_address (x, VOIDmode, temp);
3519 #endif
3521 /* If we are in reload, see if either operand is a MEM whose address
3522 is scheduled for replacement. */
3523 if (reload_in_progress && GET_CODE (x) == MEM
3524 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3525 x = replace_equiv_address_nv (x, inner);
3526 if (reload_in_progress && GET_CODE (y) == MEM
3527 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3528 y = replace_equiv_address_nv (y, inner);
3530 start_sequence ();
3532 need_clobber = 0;
3533 for (i = 0;
3534 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3535 i++)
3537 rtx xpart = operand_subword (x, i, 1, mode);
3538 rtx ypart = operand_subword (y, i, 1, mode);
3540 /* If we can't get a part of Y, put Y into memory if it is a
3541 constant. Otherwise, force it into a register. If we still
3542 can't get a part of Y, abort. */
3543 if (ypart == 0 && CONSTANT_P (y))
3545 y = force_const_mem (mode, y);
3546 ypart = operand_subword (y, i, 1, mode);
3548 else if (ypart == 0)
3549 ypart = operand_subword_force (y, i, mode);
3551 if (xpart == 0 || ypart == 0)
3552 abort ();
3554 need_clobber |= (GET_CODE (xpart) == SUBREG);
3556 last_insn = emit_move_insn (xpart, ypart);
3559 seq = get_insns ();
3560 end_sequence ();
3562 /* Show the output dies here. This is necessary for SUBREGs
3563 of pseudos since we cannot track their lifetimes correctly;
3564 hard regs shouldn't appear here except as return values.
3565 We never want to emit such a clobber after reload. */
3566 if (x != y
3567 && ! (reload_in_progress || reload_completed)
3568 && need_clobber != 0)
3569 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3571 emit_insn (seq);
3573 return last_insn;
3575 else
3576 abort ();
3579 /* If Y is representable exactly in a narrower mode, and the target can
3580 perform the extension directly from constant or memory, then emit the
3581 move as an extension. */
3583 static rtx
3584 compress_float_constant (x, y)
3585 rtx x, y;
3587 enum machine_mode dstmode = GET_MODE (x);
3588 enum machine_mode orig_srcmode = GET_MODE (y);
3589 enum machine_mode srcmode;
3590 REAL_VALUE_TYPE r;
3592 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3594 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3595 srcmode != orig_srcmode;
3596 srcmode = GET_MODE_WIDER_MODE (srcmode))
3598 enum insn_code ic;
3599 rtx trunc_y, last_insn;
3601 /* Skip if the target can't extend this way. */
3602 ic = can_extend_p (dstmode, srcmode, 0);
3603 if (ic == CODE_FOR_nothing)
3604 continue;
3606 /* Skip if the narrowed value isn't exact. */
3607 if (! exact_real_truncate (srcmode, &r))
3608 continue;
3610 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3612 if (LEGITIMATE_CONSTANT_P (trunc_y))
3614 /* Skip if the target needs extra instructions to perform
3615 the extension. */
3616 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3617 continue;
3619 else if (float_extend_from_mem[dstmode][srcmode])
3620 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3621 else
3622 continue;
3624 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3625 last_insn = get_last_insn ();
3627 if (GET_CODE (x) == REG)
3628 set_unique_reg_note (last_insn, REG_EQUAL, y);
3630 return last_insn;
3633 return NULL_RTX;
3636 /* Pushing data onto the stack. */
3638 /* Push a block of length SIZE (perhaps variable)
3639 and return an rtx to address the beginning of the block.
3640 Note that it is not possible for the value returned to be a QUEUED.
3641 The value may be virtual_outgoing_args_rtx.
3643 EXTRA is the number of bytes of padding to push in addition to SIZE.
3644 BELOW nonzero means this padding comes at low addresses;
3645 otherwise, the padding comes at high addresses. */
3648 push_block (size, extra, below)
3649 rtx size;
3650 int extra, below;
3652 rtx temp;
3654 size = convert_modes (Pmode, ptr_mode, size, 1);
3655 if (CONSTANT_P (size))
3656 anti_adjust_stack (plus_constant (size, extra));
3657 else if (GET_CODE (size) == REG && extra == 0)
3658 anti_adjust_stack (size);
3659 else
3661 temp = copy_to_mode_reg (Pmode, size);
3662 if (extra != 0)
3663 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3664 temp, 0, OPTAB_LIB_WIDEN);
3665 anti_adjust_stack (temp);
3668 #ifndef STACK_GROWS_DOWNWARD
3669 if (0)
3670 #else
3671 if (1)
3672 #endif
3674 temp = virtual_outgoing_args_rtx;
3675 if (extra != 0 && below)
3676 temp = plus_constant (temp, extra);
3678 else
3680 if (GET_CODE (size) == CONST_INT)
3681 temp = plus_constant (virtual_outgoing_args_rtx,
3682 -INTVAL (size) - (below ? 0 : extra));
3683 else if (extra != 0 && !below)
3684 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3685 negate_rtx (Pmode, plus_constant (size, extra)));
3686 else
3687 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3688 negate_rtx (Pmode, size));
3691 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3694 #ifdef PUSH_ROUNDING
3696 /* Emit single push insn. */
3698 static void
3699 emit_single_push_insn (mode, x, type)
3700 rtx x;
3701 enum machine_mode mode;
3702 tree type;
3704 rtx dest_addr;
3705 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3706 rtx dest;
3707 enum insn_code icode;
3708 insn_operand_predicate_fn pred;
3710 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3711 /* If there is push pattern, use it. Otherwise try old way of throwing
3712 MEM representing push operation to move expander. */
3713 icode = push_optab->handlers[(int) mode].insn_code;
3714 if (icode != CODE_FOR_nothing)
3716 if (((pred = insn_data[(int) icode].operand[0].predicate)
3717 && !((*pred) (x, mode))))
3718 x = force_reg (mode, x);
3719 emit_insn (GEN_FCN (icode) (x));
3720 return;
3722 if (GET_MODE_SIZE (mode) == rounded_size)
3723 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3724 else
3726 #ifdef STACK_GROWS_DOWNWARD
3727 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3728 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3729 #else
3730 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3731 GEN_INT (rounded_size));
3732 #endif
3733 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3736 dest = gen_rtx_MEM (mode, dest_addr);
3738 if (type != 0)
3740 set_mem_attributes (dest, type, 1);
3742 if (flag_optimize_sibling_calls)
3743 /* Function incoming arguments may overlap with sibling call
3744 outgoing arguments and we cannot allow reordering of reads
3745 from function arguments with stores to outgoing arguments
3746 of sibling calls. */
3747 set_mem_alias_set (dest, 0);
3749 emit_move_insn (dest, x);
3751 #endif
3753 /* Generate code to push X onto the stack, assuming it has mode MODE and
3754 type TYPE.
3755 MODE is redundant except when X is a CONST_INT (since they don't
3756 carry mode info).
3757 SIZE is an rtx for the size of data to be copied (in bytes),
3758 needed only if X is BLKmode.
3760 ALIGN (in bits) is maximum alignment we can assume.
3762 If PARTIAL and REG are both nonzero, then copy that many of the first
3763 words of X into registers starting with REG, and push the rest of X.
3764 The amount of space pushed is decreased by PARTIAL words,
3765 rounded *down* to a multiple of PARM_BOUNDARY.
3766 REG must be a hard register in this case.
3767 If REG is zero but PARTIAL is not, take any all others actions for an
3768 argument partially in registers, but do not actually load any
3769 registers.
3771 EXTRA is the amount in bytes of extra space to leave next to this arg.
3772 This is ignored if an argument block has already been allocated.
3774 On a machine that lacks real push insns, ARGS_ADDR is the address of
3775 the bottom of the argument block for this call. We use indexing off there
3776 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3777 argument block has not been preallocated.
3779 ARGS_SO_FAR is the size of args previously pushed for this call.
3781 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3782 for arguments passed in registers. If nonzero, it will be the number
3783 of bytes required. */
3785 void
3786 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3787 args_addr, args_so_far, reg_parm_stack_space,
3788 alignment_pad)
3789 rtx x;
3790 enum machine_mode mode;
3791 tree type;
3792 rtx size;
3793 unsigned int align;
3794 int partial;
3795 rtx reg;
3796 int extra;
3797 rtx args_addr;
3798 rtx args_so_far;
3799 int reg_parm_stack_space;
3800 rtx alignment_pad;
3802 rtx xinner;
3803 enum direction stack_direction
3804 #ifdef STACK_GROWS_DOWNWARD
3805 = downward;
3806 #else
3807 = upward;
3808 #endif
3810 /* Decide where to pad the argument: `downward' for below,
3811 `upward' for above, or `none' for don't pad it.
3812 Default is below for small data on big-endian machines; else above. */
3813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3815 /* Invert direction if stack is post-decrement.
3816 FIXME: why? */
3817 if (STACK_PUSH_CODE == POST_DEC)
3818 if (where_pad != none)
3819 where_pad = (where_pad == downward ? upward : downward);
3821 xinner = x = protect_from_queue (x, 0);
3823 if (mode == BLKmode)
3825 /* Copy a block into the stack, entirely or partially. */
3827 rtx temp;
3828 int used = partial * UNITS_PER_WORD;
3829 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3830 int skip;
3832 if (size == 0)
3833 abort ();
3835 used -= offset;
3837 /* USED is now the # of bytes we need not copy to the stack
3838 because registers will take care of them. */
3840 if (partial != 0)
3841 xinner = adjust_address (xinner, BLKmode, used);
3843 /* If the partial register-part of the arg counts in its stack size,
3844 skip the part of stack space corresponding to the registers.
3845 Otherwise, start copying to the beginning of the stack space,
3846 by setting SKIP to 0. */
3847 skip = (reg_parm_stack_space == 0) ? 0 : used;
3849 #ifdef PUSH_ROUNDING
3850 /* Do it with several push insns if that doesn't take lots of insns
3851 and if there is no difficulty with push insns that skip bytes
3852 on the stack for alignment purposes. */
3853 if (args_addr == 0
3854 && PUSH_ARGS
3855 && GET_CODE (size) == CONST_INT
3856 && skip == 0
3857 && MEM_ALIGN (xinner) >= align
3858 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3859 /* Here we avoid the case of a structure whose weak alignment
3860 forces many pushes of a small amount of data,
3861 and such small pushes do rounding that causes trouble. */
3862 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3863 || align >= BIGGEST_ALIGNMENT
3864 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3865 == (align / BITS_PER_UNIT)))
3866 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3868 /* Push padding now if padding above and stack grows down,
3869 or if padding below and stack grows up.
3870 But if space already allocated, this has already been done. */
3871 if (extra && args_addr == 0
3872 && where_pad != none && where_pad != stack_direction)
3873 anti_adjust_stack (GEN_INT (extra));
3875 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3877 else
3878 #endif /* PUSH_ROUNDING */
3880 rtx target;
3882 /* Otherwise make space on the stack and copy the data
3883 to the address of that space. */
3885 /* Deduct words put into registers from the size we must copy. */
3886 if (partial != 0)
3888 if (GET_CODE (size) == CONST_INT)
3889 size = GEN_INT (INTVAL (size) - used);
3890 else
3891 size = expand_binop (GET_MODE (size), sub_optab, size,
3892 GEN_INT (used), NULL_RTX, 0,
3893 OPTAB_LIB_WIDEN);
3896 /* Get the address of the stack space.
3897 In this case, we do not deal with EXTRA separately.
3898 A single stack adjust will do. */
3899 if (! args_addr)
3901 temp = push_block (size, extra, where_pad == downward);
3902 extra = 0;
3904 else if (GET_CODE (args_so_far) == CONST_INT)
3905 temp = memory_address (BLKmode,
3906 plus_constant (args_addr,
3907 skip + INTVAL (args_so_far)));
3908 else
3909 temp = memory_address (BLKmode,
3910 plus_constant (gen_rtx_PLUS (Pmode,
3911 args_addr,
3912 args_so_far),
3913 skip));
3915 if (!ACCUMULATE_OUTGOING_ARGS)
3917 /* If the source is referenced relative to the stack pointer,
3918 copy it to another register to stabilize it. We do not need
3919 to do this if we know that we won't be changing sp. */
3921 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3922 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3923 temp = copy_to_reg (temp);
3926 target = gen_rtx_MEM (BLKmode, temp);
3928 if (type != 0)
3930 set_mem_attributes (target, type, 1);
3931 /* Function incoming arguments may overlap with sibling call
3932 outgoing arguments and we cannot allow reordering of reads
3933 from function arguments with stores to outgoing arguments
3934 of sibling calls. */
3935 set_mem_alias_set (target, 0);
3938 /* ALIGN may well be better aligned than TYPE, e.g. due to
3939 PARM_BOUNDARY. Assume the caller isn't lying. */
3940 set_mem_align (target, align);
3942 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3945 else if (partial > 0)
3947 /* Scalar partly in registers. */
3949 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3950 int i;
3951 int not_stack;
3952 /* # words of start of argument
3953 that we must make space for but need not store. */
3954 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3955 int args_offset = INTVAL (args_so_far);
3956 int skip;
3958 /* Push padding now if padding above and stack grows down,
3959 or if padding below and stack grows up.
3960 But if space already allocated, this has already been done. */
3961 if (extra && args_addr == 0
3962 && where_pad != none && where_pad != stack_direction)
3963 anti_adjust_stack (GEN_INT (extra));
3965 /* If we make space by pushing it, we might as well push
3966 the real data. Otherwise, we can leave OFFSET nonzero
3967 and leave the space uninitialized. */
3968 if (args_addr == 0)
3969 offset = 0;
3971 /* Now NOT_STACK gets the number of words that we don't need to
3972 allocate on the stack. */
3973 not_stack = partial - offset;
3975 /* If the partial register-part of the arg counts in its stack size,
3976 skip the part of stack space corresponding to the registers.
3977 Otherwise, start copying to the beginning of the stack space,
3978 by setting SKIP to 0. */
3979 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3981 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3982 x = validize_mem (force_const_mem (mode, x));
3984 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3985 SUBREGs of such registers are not allowed. */
3986 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3987 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3988 x = copy_to_reg (x);
3990 /* Loop over all the words allocated on the stack for this arg. */
3991 /* We can do it by words, because any scalar bigger than a word
3992 has a size a multiple of a word. */
3993 #ifndef PUSH_ARGS_REVERSED
3994 for (i = not_stack; i < size; i++)
3995 #else
3996 for (i = size - 1; i >= not_stack; i--)
3997 #endif
3998 if (i >= not_stack + offset)
3999 emit_push_insn (operand_subword_force (x, i, mode),
4000 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4001 0, args_addr,
4002 GEN_INT (args_offset + ((i - not_stack + skip)
4003 * UNITS_PER_WORD)),
4004 reg_parm_stack_space, alignment_pad);
4006 else
4008 rtx addr;
4009 rtx dest;
4011 /* Push padding now if padding above and stack grows down,
4012 or if padding below and stack grows up.
4013 But if space already allocated, this has already been done. */
4014 if (extra && args_addr == 0
4015 && where_pad != none && where_pad != stack_direction)
4016 anti_adjust_stack (GEN_INT (extra));
4018 #ifdef PUSH_ROUNDING
4019 if (args_addr == 0 && PUSH_ARGS)
4020 emit_single_push_insn (mode, x, type);
4021 else
4022 #endif
4024 if (GET_CODE (args_so_far) == CONST_INT)
4025 addr
4026 = memory_address (mode,
4027 plus_constant (args_addr,
4028 INTVAL (args_so_far)));
4029 else
4030 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4031 args_so_far));
4032 dest = gen_rtx_MEM (mode, addr);
4033 if (type != 0)
4035 set_mem_attributes (dest, type, 1);
4036 /* Function incoming arguments may overlap with sibling call
4037 outgoing arguments and we cannot allow reordering of reads
4038 from function arguments with stores to outgoing arguments
4039 of sibling calls. */
4040 set_mem_alias_set (dest, 0);
4043 emit_move_insn (dest, x);
4047 /* If part should go in registers, copy that part
4048 into the appropriate registers. Do this now, at the end,
4049 since mem-to-mem copies above may do function calls. */
4050 if (partial > 0 && reg != 0)
4052 /* Handle calls that pass values in multiple non-contiguous locations.
4053 The Irix 6 ABI has examples of this. */
4054 if (GET_CODE (reg) == PARALLEL)
4055 emit_group_load (reg, x, -1); /* ??? size? */
4056 else
4057 move_block_to_reg (REGNO (reg), x, partial, mode);
4060 if (extra && args_addr == 0 && where_pad == stack_direction)
4061 anti_adjust_stack (GEN_INT (extra));
4063 if (alignment_pad && args_addr == 0)
4064 anti_adjust_stack (alignment_pad);
4067 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4068 operations. */
4070 static rtx
4071 get_subtarget (x)
4072 rtx x;
4074 return ((x == 0
4075 /* Only registers can be subtargets. */
4076 || GET_CODE (x) != REG
4077 /* If the register is readonly, it can't be set more than once. */
4078 || RTX_UNCHANGING_P (x)
4079 /* Don't use hard regs to avoid extending their life. */
4080 || REGNO (x) < FIRST_PSEUDO_REGISTER
4081 /* Avoid subtargets inside loops,
4082 since they hide some invariant expressions. */
4083 || preserve_subexpressions_p ())
4084 ? 0 : x);
4087 /* Expand an assignment that stores the value of FROM into TO.
4088 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4089 (This may contain a QUEUED rtx;
4090 if the value is constant, this rtx is a constant.)
4091 Otherwise, the returned value is NULL_RTX.
4093 SUGGEST_REG is no longer actually used.
4094 It used to mean, copy the value through a register
4095 and return that register, if that is possible.
4096 We now use WANT_VALUE to decide whether to do this. */
4099 expand_assignment (to, from, want_value, suggest_reg)
4100 tree to, from;
4101 int want_value;
4102 int suggest_reg ATTRIBUTE_UNUSED;
4104 rtx to_rtx = 0;
4105 rtx result;
4107 /* Don't crash if the lhs of the assignment was erroneous. */
4109 if (TREE_CODE (to) == ERROR_MARK)
4111 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4112 return want_value ? result : NULL_RTX;
4115 /* Assignment of a structure component needs special treatment
4116 if the structure component's rtx is not simply a MEM.
4117 Assignment of an array element at a constant index, and assignment of
4118 an array element in an unaligned packed structure field, has the same
4119 problem. */
4121 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4122 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4123 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4125 enum machine_mode mode1;
4126 HOST_WIDE_INT bitsize, bitpos;
4127 rtx orig_to_rtx;
4128 tree offset;
4129 int unsignedp;
4130 int volatilep = 0;
4131 tree tem;
4133 push_temp_slots ();
4134 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4135 &unsignedp, &volatilep);
4137 /* If we are going to use store_bit_field and extract_bit_field,
4138 make sure to_rtx will be safe for multiple use. */
4140 if (mode1 == VOIDmode && want_value)
4141 tem = stabilize_reference (tem);
4143 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4145 if (offset != 0)
4147 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4149 if (GET_CODE (to_rtx) != MEM)
4150 abort ();
4152 #ifdef POINTERS_EXTEND_UNSIGNED
4153 if (GET_MODE (offset_rtx) != Pmode)
4154 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4155 #else
4156 if (GET_MODE (offset_rtx) != ptr_mode)
4157 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4158 #endif
4160 /* A constant address in TO_RTX can have VOIDmode, we must not try
4161 to call force_reg for that case. Avoid that case. */
4162 if (GET_CODE (to_rtx) == MEM
4163 && GET_MODE (to_rtx) == BLKmode
4164 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4165 && bitsize > 0
4166 && (bitpos % bitsize) == 0
4167 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4168 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4170 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4171 bitpos = 0;
4174 to_rtx = offset_address (to_rtx, offset_rtx,
4175 highest_pow2_factor_for_type (TREE_TYPE (to),
4176 offset));
4179 if (GET_CODE (to_rtx) == MEM)
4181 /* If the field is at offset zero, we could have been given the
4182 DECL_RTX of the parent struct. Don't munge it. */
4183 to_rtx = shallow_copy_rtx (to_rtx);
4185 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4188 /* Deal with volatile and readonly fields. The former is only done
4189 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4190 if (volatilep && GET_CODE (to_rtx) == MEM)
4192 if (to_rtx == orig_to_rtx)
4193 to_rtx = copy_rtx (to_rtx);
4194 MEM_VOLATILE_P (to_rtx) = 1;
4197 if (TREE_CODE (to) == COMPONENT_REF
4198 && TREE_READONLY (TREE_OPERAND (to, 1)))
4200 if (to_rtx == orig_to_rtx)
4201 to_rtx = copy_rtx (to_rtx);
4202 RTX_UNCHANGING_P (to_rtx) = 1;
4205 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4207 if (to_rtx == orig_to_rtx)
4208 to_rtx = copy_rtx (to_rtx);
4209 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4212 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4213 (want_value
4214 /* Spurious cast for HPUX compiler. */
4215 ? ((enum machine_mode)
4216 TYPE_MODE (TREE_TYPE (to)))
4217 : VOIDmode),
4218 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4220 preserve_temp_slots (result);
4221 free_temp_slots ();
4222 pop_temp_slots ();
4224 /* If the value is meaningful, convert RESULT to the proper mode.
4225 Otherwise, return nothing. */
4226 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4227 TYPE_MODE (TREE_TYPE (from)),
4228 result,
4229 TREE_UNSIGNED (TREE_TYPE (to)))
4230 : NULL_RTX);
4233 /* If the rhs is a function call and its value is not an aggregate,
4234 call the function before we start to compute the lhs.
4235 This is needed for correct code for cases such as
4236 val = setjmp (buf) on machines where reference to val
4237 requires loading up part of an address in a separate insn.
4239 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4240 since it might be a promoted variable where the zero- or sign- extension
4241 needs to be done. Handling this in the normal way is safe because no
4242 computation is done before the call. */
4243 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4244 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4245 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4246 && GET_CODE (DECL_RTL (to)) == REG))
4248 rtx value;
4250 push_temp_slots ();
4251 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4252 if (to_rtx == 0)
4253 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4255 /* Handle calls that return values in multiple non-contiguous locations.
4256 The Irix 6 ABI has examples of this. */
4257 if (GET_CODE (to_rtx) == PARALLEL)
4258 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4259 else if (GET_MODE (to_rtx) == BLKmode)
4260 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4261 else
4263 #ifdef POINTERS_EXTEND_UNSIGNED
4264 if (POINTER_TYPE_P (TREE_TYPE (to))
4265 && GET_MODE (to_rtx) != GET_MODE (value))
4266 value = convert_memory_address (GET_MODE (to_rtx), value);
4267 #endif
4268 emit_move_insn (to_rtx, value);
4270 preserve_temp_slots (to_rtx);
4271 free_temp_slots ();
4272 pop_temp_slots ();
4273 return want_value ? to_rtx : NULL_RTX;
4276 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4277 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4279 if (to_rtx == 0)
4280 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4282 /* Don't move directly into a return register. */
4283 if (TREE_CODE (to) == RESULT_DECL
4284 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4286 rtx temp;
4288 push_temp_slots ();
4289 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4291 if (GET_CODE (to_rtx) == PARALLEL)
4292 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4293 else
4294 emit_move_insn (to_rtx, temp);
4296 preserve_temp_slots (to_rtx);
4297 free_temp_slots ();
4298 pop_temp_slots ();
4299 return want_value ? to_rtx : NULL_RTX;
4302 /* In case we are returning the contents of an object which overlaps
4303 the place the value is being stored, use a safe function when copying
4304 a value through a pointer into a structure value return block. */
4305 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4306 && current_function_returns_struct
4307 && !current_function_returns_pcc_struct)
4309 rtx from_rtx, size;
4311 push_temp_slots ();
4312 size = expr_size (from);
4313 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4315 if (TARGET_MEM_FUNCTIONS)
4316 emit_library_call (memmove_libfunc, LCT_NORMAL,
4317 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4318 XEXP (from_rtx, 0), Pmode,
4319 convert_to_mode (TYPE_MODE (sizetype),
4320 size, TREE_UNSIGNED (sizetype)),
4321 TYPE_MODE (sizetype));
4322 else
4323 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4324 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4325 XEXP (to_rtx, 0), Pmode,
4326 convert_to_mode (TYPE_MODE (integer_type_node),
4327 size,
4328 TREE_UNSIGNED (integer_type_node)),
4329 TYPE_MODE (integer_type_node));
4331 preserve_temp_slots (to_rtx);
4332 free_temp_slots ();
4333 pop_temp_slots ();
4334 return want_value ? to_rtx : NULL_RTX;
4337 /* Compute FROM and store the value in the rtx we got. */
4339 push_temp_slots ();
4340 result = store_expr (from, to_rtx, want_value);
4341 preserve_temp_slots (result);
4342 free_temp_slots ();
4343 pop_temp_slots ();
4344 return want_value ? result : NULL_RTX;
4347 /* Generate code for computing expression EXP,
4348 and storing the value into TARGET.
4349 TARGET may contain a QUEUED rtx.
4351 If WANT_VALUE & 1 is nonzero, return a copy of the value
4352 not in TARGET, so that we can be sure to use the proper
4353 value in a containing expression even if TARGET has something
4354 else stored in it. If possible, we copy the value through a pseudo
4355 and return that pseudo. Or, if the value is constant, we try to
4356 return the constant. In some cases, we return a pseudo
4357 copied *from* TARGET.
4359 If the mode is BLKmode then we may return TARGET itself.
4360 It turns out that in BLKmode it doesn't cause a problem.
4361 because C has no operators that could combine two different
4362 assignments into the same BLKmode object with different values
4363 with no sequence point. Will other languages need this to
4364 be more thorough?
4366 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4367 to catch quickly any cases where the caller uses the value
4368 and fails to set WANT_VALUE.
4370 If WANT_VALUE & 2 is set, this is a store into a call param on the
4371 stack, and block moves may need to be treated specially. */
4374 store_expr (exp, target, want_value)
4375 tree exp;
4376 rtx target;
4377 int want_value;
4379 rtx temp;
4380 int dont_return_target = 0;
4381 int dont_store_target = 0;
4383 if (VOID_TYPE_P (TREE_TYPE (exp)))
4385 /* C++ can generate ?: expressions with a throw expression in one
4386 branch and an rvalue in the other. Here, we resolve attempts to
4387 store the throw expression's nonexistant result. */
4388 if (want_value)
4389 abort ();
4390 expand_expr (exp, const0_rtx, VOIDmode, 0);
4391 return NULL_RTX;
4393 if (TREE_CODE (exp) == COMPOUND_EXPR)
4395 /* Perform first part of compound expression, then assign from second
4396 part. */
4397 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4398 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4399 emit_queue ();
4400 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4402 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4404 /* For conditional expression, get safe form of the target. Then
4405 test the condition, doing the appropriate assignment on either
4406 side. This avoids the creation of unnecessary temporaries.
4407 For non-BLKmode, it is more efficient not to do this. */
4409 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4411 emit_queue ();
4412 target = protect_from_queue (target, 1);
4414 do_pending_stack_adjust ();
4415 NO_DEFER_POP;
4416 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4417 start_cleanup_deferral ();
4418 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4419 end_cleanup_deferral ();
4420 emit_queue ();
4421 emit_jump_insn (gen_jump (lab2));
4422 emit_barrier ();
4423 emit_label (lab1);
4424 start_cleanup_deferral ();
4425 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4426 end_cleanup_deferral ();
4427 emit_queue ();
4428 emit_label (lab2);
4429 OK_DEFER_POP;
4431 return want_value & 1 ? target : NULL_RTX;
4433 else if (queued_subexp_p (target))
4434 /* If target contains a postincrement, let's not risk
4435 using it as the place to generate the rhs. */
4437 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4439 /* Expand EXP into a new pseudo. */
4440 temp = gen_reg_rtx (GET_MODE (target));
4441 temp = expand_expr (exp, temp, GET_MODE (target),
4442 (want_value & 2
4443 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4445 else
4446 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4447 (want_value & 2
4448 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4450 /* If target is volatile, ANSI requires accessing the value
4451 *from* the target, if it is accessed. So make that happen.
4452 In no case return the target itself. */
4453 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4454 dont_return_target = 1;
4456 else if ((want_value & 1) != 0
4457 && GET_CODE (target) == MEM
4458 && ! MEM_VOLATILE_P (target)
4459 && GET_MODE (target) != BLKmode)
4460 /* If target is in memory and caller wants value in a register instead,
4461 arrange that. Pass TARGET as target for expand_expr so that,
4462 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4463 We know expand_expr will not use the target in that case.
4464 Don't do this if TARGET is volatile because we are supposed
4465 to write it and then read it. */
4467 temp = expand_expr (exp, target, GET_MODE (target),
4468 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4469 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4471 /* If TEMP is already in the desired TARGET, only copy it from
4472 memory and don't store it there again. */
4473 if (temp == target
4474 || (rtx_equal_p (temp, target)
4475 && ! side_effects_p (temp) && ! side_effects_p (target)))
4476 dont_store_target = 1;
4477 temp = copy_to_reg (temp);
4479 dont_return_target = 1;
4481 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4482 /* If this is a scalar in a register that is stored in a wider mode
4483 than the declared mode, compute the result into its declared mode
4484 and then convert to the wider mode. Our value is the computed
4485 expression. */
4487 rtx inner_target = 0;
4489 /* If we don't want a value, we can do the conversion inside EXP,
4490 which will often result in some optimizations. Do the conversion
4491 in two steps: first change the signedness, if needed, then
4492 the extend. But don't do this if the type of EXP is a subtype
4493 of something else since then the conversion might involve
4494 more than just converting modes. */
4495 if ((want_value & 1) == 0
4496 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4497 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4499 if (TREE_UNSIGNED (TREE_TYPE (exp))
4500 != SUBREG_PROMOTED_UNSIGNED_P (target))
4501 exp = convert
4502 ((*lang_hooks.types.signed_or_unsigned_type)
4503 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4505 exp = convert ((*lang_hooks.types.type_for_mode)
4506 (GET_MODE (SUBREG_REG (target)),
4507 SUBREG_PROMOTED_UNSIGNED_P (target)),
4508 exp);
4510 inner_target = SUBREG_REG (target);
4513 temp = expand_expr (exp, inner_target, VOIDmode,
4514 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4516 /* If TEMP is a MEM and we want a result value, make the access
4517 now so it gets done only once. Strictly speaking, this is
4518 only necessary if the MEM is volatile, or if the address
4519 overlaps TARGET. But not performing the load twice also
4520 reduces the amount of rtl we generate and then have to CSE. */
4521 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4522 temp = copy_to_reg (temp);
4524 /* If TEMP is a VOIDmode constant, use convert_modes to make
4525 sure that we properly convert it. */
4526 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4528 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4529 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4530 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4531 GET_MODE (target), temp,
4532 SUBREG_PROMOTED_UNSIGNED_P (target));
4535 convert_move (SUBREG_REG (target), temp,
4536 SUBREG_PROMOTED_UNSIGNED_P (target));
4538 /* If we promoted a constant, change the mode back down to match
4539 target. Otherwise, the caller might get confused by a result whose
4540 mode is larger than expected. */
4542 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4544 if (GET_MODE (temp) != VOIDmode)
4546 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4547 SUBREG_PROMOTED_VAR_P (temp) = 1;
4548 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4549 SUBREG_PROMOTED_UNSIGNED_P (target));
4551 else
4552 temp = convert_modes (GET_MODE (target),
4553 GET_MODE (SUBREG_REG (target)),
4554 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4557 return want_value & 1 ? temp : NULL_RTX;
4559 else
4561 temp = expand_expr (exp, target, GET_MODE (target),
4562 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4563 /* Return TARGET if it's a specified hardware register.
4564 If TARGET is a volatile mem ref, either return TARGET
4565 or return a reg copied *from* TARGET; ANSI requires this.
4567 Otherwise, if TEMP is not TARGET, return TEMP
4568 if it is constant (for efficiency),
4569 or if we really want the correct value. */
4570 if (!(target && GET_CODE (target) == REG
4571 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4572 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4573 && ! rtx_equal_p (temp, target)
4574 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4575 dont_return_target = 1;
4578 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4579 the same as that of TARGET, adjust the constant. This is needed, for
4580 example, in case it is a CONST_DOUBLE and we want only a word-sized
4581 value. */
4582 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4583 && TREE_CODE (exp) != ERROR_MARK
4584 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4585 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4586 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4588 /* If value was not generated in the target, store it there.
4589 Convert the value to TARGET's type first if necessary.
4590 If TEMP and TARGET compare equal according to rtx_equal_p, but
4591 one or both of them are volatile memory refs, we have to distinguish
4592 two cases:
4593 - expand_expr has used TARGET. In this case, we must not generate
4594 another copy. This can be detected by TARGET being equal according
4595 to == .
4596 - expand_expr has not used TARGET - that means that the source just
4597 happens to have the same RTX form. Since temp will have been created
4598 by expand_expr, it will compare unequal according to == .
4599 We must generate a copy in this case, to reach the correct number
4600 of volatile memory references. */
4602 if ((! rtx_equal_p (temp, target)
4603 || (temp != target && (side_effects_p (temp)
4604 || side_effects_p (target))))
4605 && TREE_CODE (exp) != ERROR_MARK
4606 && ! dont_store_target
4607 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4608 but TARGET is not valid memory reference, TEMP will differ
4609 from TARGET although it is really the same location. */
4610 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4611 || target != DECL_RTL_IF_SET (exp))
4612 /* If there's nothing to copy, don't bother. Don't call expr_size
4613 unless necessary, because some front-ends (C++) expr_size-hook
4614 aborts on objects that are not supposed to be bit-copied or
4615 bit-initialized. */
4616 && expr_size (exp) != const0_rtx)
4618 target = protect_from_queue (target, 1);
4619 if (GET_MODE (temp) != GET_MODE (target)
4620 && GET_MODE (temp) != VOIDmode)
4622 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4623 if (dont_return_target)
4625 /* In this case, we will return TEMP,
4626 so make sure it has the proper mode.
4627 But don't forget to store the value into TARGET. */
4628 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4629 emit_move_insn (target, temp);
4631 else
4632 convert_move (target, temp, unsignedp);
4635 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4637 /* Handle copying a string constant into an array. The string
4638 constant may be shorter than the array. So copy just the string's
4639 actual length, and clear the rest. First get the size of the data
4640 type of the string, which is actually the size of the target. */
4641 rtx size = expr_size (exp);
4643 if (GET_CODE (size) == CONST_INT
4644 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4645 emit_block_move (target, temp, size,
4646 (want_value & 2
4647 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4648 else
4650 /* Compute the size of the data to copy from the string. */
4651 tree copy_size
4652 = size_binop (MIN_EXPR,
4653 make_tree (sizetype, size),
4654 size_int (TREE_STRING_LENGTH (exp)));
4655 rtx copy_size_rtx
4656 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4657 (want_value & 2
4658 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4659 rtx label = 0;
4661 /* Copy that much. */
4662 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4663 TREE_UNSIGNED (sizetype));
4664 emit_block_move (target, temp, copy_size_rtx,
4665 (want_value & 2
4666 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4668 /* Figure out how much is left in TARGET that we have to clear.
4669 Do all calculations in ptr_mode. */
4670 if (GET_CODE (copy_size_rtx) == CONST_INT)
4672 size = plus_constant (size, -INTVAL (copy_size_rtx));
4673 target = adjust_address (target, BLKmode,
4674 INTVAL (copy_size_rtx));
4676 else
4678 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4679 copy_size_rtx, NULL_RTX, 0,
4680 OPTAB_LIB_WIDEN);
4682 #ifdef POINTERS_EXTEND_UNSIGNED
4683 if (GET_MODE (copy_size_rtx) != Pmode)
4684 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4685 TREE_UNSIGNED (sizetype));
4686 #endif
4688 target = offset_address (target, copy_size_rtx,
4689 highest_pow2_factor (copy_size));
4690 label = gen_label_rtx ();
4691 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4692 GET_MODE (size), 0, label);
4695 if (size != const0_rtx)
4696 clear_storage (target, size);
4698 if (label)
4699 emit_label (label);
4702 /* Handle calls that return values in multiple non-contiguous locations.
4703 The Irix 6 ABI has examples of this. */
4704 else if (GET_CODE (target) == PARALLEL)
4705 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4706 else if (GET_MODE (temp) == BLKmode)
4707 emit_block_move (target, temp, expr_size (exp),
4708 (want_value & 2
4709 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4710 else
4711 emit_move_insn (target, temp);
4714 /* If we don't want a value, return NULL_RTX. */
4715 if ((want_value & 1) == 0)
4716 return NULL_RTX;
4718 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4719 ??? The latter test doesn't seem to make sense. */
4720 else if (dont_return_target && GET_CODE (temp) != MEM)
4721 return temp;
4723 /* Return TARGET itself if it is a hard register. */
4724 else if ((want_value & 1) != 0
4725 && GET_MODE (target) != BLKmode
4726 && ! (GET_CODE (target) == REG
4727 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4728 return copy_to_reg (target);
4730 else
4731 return target;
4734 /* Return 1 if EXP just contains zeros. */
4736 static int
4737 is_zeros_p (exp)
4738 tree exp;
4740 tree elt;
4742 switch (TREE_CODE (exp))
4744 case CONVERT_EXPR:
4745 case NOP_EXPR:
4746 case NON_LVALUE_EXPR:
4747 case VIEW_CONVERT_EXPR:
4748 return is_zeros_p (TREE_OPERAND (exp, 0));
4750 case INTEGER_CST:
4751 return integer_zerop (exp);
4753 case COMPLEX_CST:
4754 return
4755 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4757 case REAL_CST:
4758 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4760 case VECTOR_CST:
4761 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4762 elt = TREE_CHAIN (elt))
4763 if (!is_zeros_p (TREE_VALUE (elt)))
4764 return 0;
4766 return 1;
4768 case CONSTRUCTOR:
4769 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4770 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4771 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4772 if (! is_zeros_p (TREE_VALUE (elt)))
4773 return 0;
4775 return 1;
4777 default:
4778 return 0;
4782 /* Return 1 if EXP contains mostly (3/4) zeros. */
4784 static int
4785 mostly_zeros_p (exp)
4786 tree exp;
4788 if (TREE_CODE (exp) == CONSTRUCTOR)
4790 int elts = 0, zeros = 0;
4791 tree elt = CONSTRUCTOR_ELTS (exp);
4792 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4794 /* If there are no ranges of true bits, it is all zero. */
4795 return elt == NULL_TREE;
4797 for (; elt; elt = TREE_CHAIN (elt))
4799 /* We do not handle the case where the index is a RANGE_EXPR,
4800 so the statistic will be somewhat inaccurate.
4801 We do make a more accurate count in store_constructor itself,
4802 so since this function is only used for nested array elements,
4803 this should be close enough. */
4804 if (mostly_zeros_p (TREE_VALUE (elt)))
4805 zeros++;
4806 elts++;
4809 return 4 * zeros >= 3 * elts;
4812 return is_zeros_p (exp);
4815 /* Helper function for store_constructor.
4816 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4817 TYPE is the type of the CONSTRUCTOR, not the element type.
4818 CLEARED is as for store_constructor.
4819 ALIAS_SET is the alias set to use for any stores.
4821 This provides a recursive shortcut back to store_constructor when it isn't
4822 necessary to go through store_field. This is so that we can pass through
4823 the cleared field to let store_constructor know that we may not have to
4824 clear a substructure if the outer structure has already been cleared. */
4826 static void
4827 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4828 alias_set)
4829 rtx target;
4830 unsigned HOST_WIDE_INT bitsize;
4831 HOST_WIDE_INT bitpos;
4832 enum machine_mode mode;
4833 tree exp, type;
4834 int cleared;
4835 int alias_set;
4837 if (TREE_CODE (exp) == CONSTRUCTOR
4838 && bitpos % BITS_PER_UNIT == 0
4839 /* If we have a nonzero bitpos for a register target, then we just
4840 let store_field do the bitfield handling. This is unlikely to
4841 generate unnecessary clear instructions anyways. */
4842 && (bitpos == 0 || GET_CODE (target) == MEM))
4844 if (GET_CODE (target) == MEM)
4845 target
4846 = adjust_address (target,
4847 GET_MODE (target) == BLKmode
4848 || 0 != (bitpos
4849 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4850 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4853 /* Update the alias set, if required. */
4854 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4855 && MEM_ALIAS_SET (target) != 0)
4857 target = copy_rtx (target);
4858 set_mem_alias_set (target, alias_set);
4861 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4863 else
4864 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4865 alias_set);
4868 /* Store the value of constructor EXP into the rtx TARGET.
4869 TARGET is either a REG or a MEM; we know it cannot conflict, since
4870 safe_from_p has been called.
4871 CLEARED is true if TARGET is known to have been zero'd.
4872 SIZE is the number of bytes of TARGET we are allowed to modify: this
4873 may not be the same as the size of EXP if we are assigning to a field
4874 which has been packed to exclude padding bits. */
4876 static void
4877 store_constructor (exp, target, cleared, size)
4878 tree exp;
4879 rtx target;
4880 int cleared;
4881 HOST_WIDE_INT size;
4883 tree type = TREE_TYPE (exp);
4884 #ifdef WORD_REGISTER_OPERATIONS
4885 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4886 #endif
4888 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4889 || TREE_CODE (type) == QUAL_UNION_TYPE)
4891 tree elt;
4893 /* We either clear the aggregate or indicate the value is dead. */
4894 if ((TREE_CODE (type) == UNION_TYPE
4895 || TREE_CODE (type) == QUAL_UNION_TYPE)
4896 && ! cleared
4897 && ! CONSTRUCTOR_ELTS (exp))
4898 /* If the constructor is empty, clear the union. */
4900 clear_storage (target, expr_size (exp));
4901 cleared = 1;
4904 /* If we are building a static constructor into a register,
4905 set the initial value as zero so we can fold the value into
4906 a constant. But if more than one register is involved,
4907 this probably loses. */
4908 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4909 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4911 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4912 cleared = 1;
4915 /* If the constructor has fewer fields than the structure
4916 or if we are initializing the structure to mostly zeros,
4917 clear the whole structure first. Don't do this if TARGET is a
4918 register whose mode size isn't equal to SIZE since clear_storage
4919 can't handle this case. */
4920 else if (! cleared && size > 0
4921 && ((list_length (CONSTRUCTOR_ELTS (exp))
4922 != fields_length (type))
4923 || mostly_zeros_p (exp))
4924 && (GET_CODE (target) != REG
4925 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4926 == size)))
4928 rtx xtarget = target;
4930 if (readonly_fields_p (type))
4932 xtarget = copy_rtx (xtarget);
4933 RTX_UNCHANGING_P (xtarget) = 1;
4936 clear_storage (xtarget, GEN_INT (size));
4937 cleared = 1;
4940 if (! cleared)
4941 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4943 /* Store each element of the constructor into
4944 the corresponding field of TARGET. */
4946 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4948 tree field = TREE_PURPOSE (elt);
4949 tree value = TREE_VALUE (elt);
4950 enum machine_mode mode;
4951 HOST_WIDE_INT bitsize;
4952 HOST_WIDE_INT bitpos = 0;
4953 tree offset;
4954 rtx to_rtx = target;
4956 /* Just ignore missing fields.
4957 We cleared the whole structure, above,
4958 if any fields are missing. */
4959 if (field == 0)
4960 continue;
4962 if (cleared && is_zeros_p (value))
4963 continue;
4965 if (host_integerp (DECL_SIZE (field), 1))
4966 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4967 else
4968 bitsize = -1;
4970 mode = DECL_MODE (field);
4971 if (DECL_BIT_FIELD (field))
4972 mode = VOIDmode;
4974 offset = DECL_FIELD_OFFSET (field);
4975 if (host_integerp (offset, 0)
4976 && host_integerp (bit_position (field), 0))
4978 bitpos = int_bit_position (field);
4979 offset = 0;
4981 else
4982 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4984 if (offset)
4986 rtx offset_rtx;
4988 if (CONTAINS_PLACEHOLDER_P (offset))
4989 offset = build (WITH_RECORD_EXPR, sizetype,
4990 offset, make_tree (TREE_TYPE (exp), target));
4992 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4993 if (GET_CODE (to_rtx) != MEM)
4994 abort ();
4996 #ifdef POINTERS_EXTEND_UNSIGNED
4997 if (GET_MODE (offset_rtx) != Pmode)
4998 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4999 #else
5000 if (GET_MODE (offset_rtx) != ptr_mode)
5001 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5002 #endif
5004 to_rtx = offset_address (to_rtx, offset_rtx,
5005 highest_pow2_factor (offset));
5008 if (TREE_READONLY (field))
5010 if (GET_CODE (to_rtx) == MEM)
5011 to_rtx = copy_rtx (to_rtx);
5013 RTX_UNCHANGING_P (to_rtx) = 1;
5016 #ifdef WORD_REGISTER_OPERATIONS
5017 /* If this initializes a field that is smaller than a word, at the
5018 start of a word, try to widen it to a full word.
5019 This special case allows us to output C++ member function
5020 initializations in a form that the optimizers can understand. */
5021 if (GET_CODE (target) == REG
5022 && bitsize < BITS_PER_WORD
5023 && bitpos % BITS_PER_WORD == 0
5024 && GET_MODE_CLASS (mode) == MODE_INT
5025 && TREE_CODE (value) == INTEGER_CST
5026 && exp_size >= 0
5027 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5029 tree type = TREE_TYPE (value);
5031 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5033 type = (*lang_hooks.types.type_for_size)
5034 (BITS_PER_WORD, TREE_UNSIGNED (type));
5035 value = convert (type, value);
5038 if (BYTES_BIG_ENDIAN)
5039 value
5040 = fold (build (LSHIFT_EXPR, type, value,
5041 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5042 bitsize = BITS_PER_WORD;
5043 mode = word_mode;
5045 #endif
5047 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5048 && DECL_NONADDRESSABLE_P (field))
5050 to_rtx = copy_rtx (to_rtx);
5051 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5054 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5055 value, type, cleared,
5056 get_alias_set (TREE_TYPE (field)));
5059 else if (TREE_CODE (type) == ARRAY_TYPE
5060 || TREE_CODE (type) == VECTOR_TYPE)
5062 tree elt;
5063 int i;
5064 int need_to_clear;
5065 tree domain = TYPE_DOMAIN (type);
5066 tree elttype = TREE_TYPE (type);
5067 int const_bounds_p;
5068 HOST_WIDE_INT minelt = 0;
5069 HOST_WIDE_INT maxelt = 0;
5071 /* Vectors are like arrays, but the domain is stored via an array
5072 type indirectly. */
5073 if (TREE_CODE (type) == VECTOR_TYPE)
5075 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5076 the same field as TYPE_DOMAIN, we are not guaranteed that
5077 it always will. */
5078 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5079 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5082 const_bounds_p = (TYPE_MIN_VALUE (domain)
5083 && TYPE_MAX_VALUE (domain)
5084 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5085 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5087 /* If we have constant bounds for the range of the type, get them. */
5088 if (const_bounds_p)
5090 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5091 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5094 /* If the constructor has fewer elements than the array,
5095 clear the whole array first. Similarly if this is
5096 static constructor of a non-BLKmode object. */
5097 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5098 need_to_clear = 1;
5099 else
5101 HOST_WIDE_INT count = 0, zero_count = 0;
5102 need_to_clear = ! const_bounds_p;
5104 /* This loop is a more accurate version of the loop in
5105 mostly_zeros_p (it handles RANGE_EXPR in an index).
5106 It is also needed to check for missing elements. */
5107 for (elt = CONSTRUCTOR_ELTS (exp);
5108 elt != NULL_TREE && ! need_to_clear;
5109 elt = TREE_CHAIN (elt))
5111 tree index = TREE_PURPOSE (elt);
5112 HOST_WIDE_INT this_node_count;
5114 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5116 tree lo_index = TREE_OPERAND (index, 0);
5117 tree hi_index = TREE_OPERAND (index, 1);
5119 if (! host_integerp (lo_index, 1)
5120 || ! host_integerp (hi_index, 1))
5122 need_to_clear = 1;
5123 break;
5126 this_node_count = (tree_low_cst (hi_index, 1)
5127 - tree_low_cst (lo_index, 1) + 1);
5129 else
5130 this_node_count = 1;
5132 count += this_node_count;
5133 if (mostly_zeros_p (TREE_VALUE (elt)))
5134 zero_count += this_node_count;
5137 /* Clear the entire array first if there are any missing elements,
5138 or if the incidence of zero elements is >= 75%. */
5139 if (! need_to_clear
5140 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5141 need_to_clear = 1;
5144 if (need_to_clear && size > 0)
5146 if (! cleared)
5148 if (REG_P (target))
5149 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5150 else
5151 clear_storage (target, GEN_INT (size));
5153 cleared = 1;
5155 else if (REG_P (target))
5156 /* Inform later passes that the old value is dead. */
5157 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5159 /* Store each element of the constructor into
5160 the corresponding element of TARGET, determined
5161 by counting the elements. */
5162 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5163 elt;
5164 elt = TREE_CHAIN (elt), i++)
5166 enum machine_mode mode;
5167 HOST_WIDE_INT bitsize;
5168 HOST_WIDE_INT bitpos;
5169 int unsignedp;
5170 tree value = TREE_VALUE (elt);
5171 tree index = TREE_PURPOSE (elt);
5172 rtx xtarget = target;
5174 if (cleared && is_zeros_p (value))
5175 continue;
5177 unsignedp = TREE_UNSIGNED (elttype);
5178 mode = TYPE_MODE (elttype);
5179 if (mode == BLKmode)
5180 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5181 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5182 : -1);
5183 else
5184 bitsize = GET_MODE_BITSIZE (mode);
5186 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5188 tree lo_index = TREE_OPERAND (index, 0);
5189 tree hi_index = TREE_OPERAND (index, 1);
5190 rtx index_r, pos_rtx, loop_end;
5191 struct nesting *loop;
5192 HOST_WIDE_INT lo, hi, count;
5193 tree position;
5195 /* If the range is constant and "small", unroll the loop. */
5196 if (const_bounds_p
5197 && host_integerp (lo_index, 0)
5198 && host_integerp (hi_index, 0)
5199 && (lo = tree_low_cst (lo_index, 0),
5200 hi = tree_low_cst (hi_index, 0),
5201 count = hi - lo + 1,
5202 (GET_CODE (target) != MEM
5203 || count <= 2
5204 || (host_integerp (TYPE_SIZE (elttype), 1)
5205 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5206 <= 40 * 8)))))
5208 lo -= minelt; hi -= minelt;
5209 for (; lo <= hi; lo++)
5211 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5213 if (GET_CODE (target) == MEM
5214 && !MEM_KEEP_ALIAS_SET_P (target)
5215 && TREE_CODE (type) == ARRAY_TYPE
5216 && TYPE_NONALIASED_COMPONENT (type))
5218 target = copy_rtx (target);
5219 MEM_KEEP_ALIAS_SET_P (target) = 1;
5222 store_constructor_field
5223 (target, bitsize, bitpos, mode, value, type, cleared,
5224 get_alias_set (elttype));
5227 else
5229 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5230 loop_end = gen_label_rtx ();
5232 unsignedp = TREE_UNSIGNED (domain);
5234 index = build_decl (VAR_DECL, NULL_TREE, domain);
5236 index_r
5237 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5238 &unsignedp, 0));
5239 SET_DECL_RTL (index, index_r);
5240 if (TREE_CODE (value) == SAVE_EXPR
5241 && SAVE_EXPR_RTL (value) == 0)
5243 /* Make sure value gets expanded once before the
5244 loop. */
5245 expand_expr (value, const0_rtx, VOIDmode, 0);
5246 emit_queue ();
5248 store_expr (lo_index, index_r, 0);
5249 loop = expand_start_loop (0);
5251 /* Assign value to element index. */
5252 position
5253 = convert (ssizetype,
5254 fold (build (MINUS_EXPR, TREE_TYPE (index),
5255 index, TYPE_MIN_VALUE (domain))));
5256 position = size_binop (MULT_EXPR, position,
5257 convert (ssizetype,
5258 TYPE_SIZE_UNIT (elttype)));
5260 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5261 xtarget = offset_address (target, pos_rtx,
5262 highest_pow2_factor (position));
5263 xtarget = adjust_address (xtarget, mode, 0);
5264 if (TREE_CODE (value) == CONSTRUCTOR)
5265 store_constructor (value, xtarget, cleared,
5266 bitsize / BITS_PER_UNIT);
5267 else
5268 store_expr (value, xtarget, 0);
5270 expand_exit_loop_if_false (loop,
5271 build (LT_EXPR, integer_type_node,
5272 index, hi_index));
5274 expand_increment (build (PREINCREMENT_EXPR,
5275 TREE_TYPE (index),
5276 index, integer_one_node), 0, 0);
5277 expand_end_loop ();
5278 emit_label (loop_end);
5281 else if ((index != 0 && ! host_integerp (index, 0))
5282 || ! host_integerp (TYPE_SIZE (elttype), 1))
5284 tree position;
5286 if (index == 0)
5287 index = ssize_int (1);
5289 if (minelt)
5290 index = convert (ssizetype,
5291 fold (build (MINUS_EXPR, index,
5292 TYPE_MIN_VALUE (domain))));
5294 position = size_binop (MULT_EXPR, index,
5295 convert (ssizetype,
5296 TYPE_SIZE_UNIT (elttype)));
5297 xtarget = offset_address (target,
5298 expand_expr (position, 0, VOIDmode, 0),
5299 highest_pow2_factor (position));
5300 xtarget = adjust_address (xtarget, mode, 0);
5301 store_expr (value, xtarget, 0);
5303 else
5305 if (index != 0)
5306 bitpos = ((tree_low_cst (index, 0) - minelt)
5307 * tree_low_cst (TYPE_SIZE (elttype), 1));
5308 else
5309 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5311 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5312 && TREE_CODE (type) == ARRAY_TYPE
5313 && TYPE_NONALIASED_COMPONENT (type))
5315 target = copy_rtx (target);
5316 MEM_KEEP_ALIAS_SET_P (target) = 1;
5319 store_constructor_field (target, bitsize, bitpos, mode, value,
5320 type, cleared, get_alias_set (elttype));
5326 /* Set constructor assignments. */
5327 else if (TREE_CODE (type) == SET_TYPE)
5329 tree elt = CONSTRUCTOR_ELTS (exp);
5330 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5331 tree domain = TYPE_DOMAIN (type);
5332 tree domain_min, domain_max, bitlength;
5334 /* The default implementation strategy is to extract the constant
5335 parts of the constructor, use that to initialize the target,
5336 and then "or" in whatever non-constant ranges we need in addition.
5338 If a large set is all zero or all ones, it is
5339 probably better to set it using memset (if available) or bzero.
5340 Also, if a large set has just a single range, it may also be
5341 better to first clear all the first clear the set (using
5342 bzero/memset), and set the bits we want. */
5344 /* Check for all zeros. */
5345 if (elt == NULL_TREE && size > 0)
5347 if (!cleared)
5348 clear_storage (target, GEN_INT (size));
5349 return;
5352 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5353 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5354 bitlength = size_binop (PLUS_EXPR,
5355 size_diffop (domain_max, domain_min),
5356 ssize_int (1));
5358 nbits = tree_low_cst (bitlength, 1);
5360 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5361 are "complicated" (more than one range), initialize (the
5362 constant parts) by copying from a constant. */
5363 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5364 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5366 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5367 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5368 char *bit_buffer = (char *) alloca (nbits);
5369 HOST_WIDE_INT word = 0;
5370 unsigned int bit_pos = 0;
5371 unsigned int ibit = 0;
5372 unsigned int offset = 0; /* In bytes from beginning of set. */
5374 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5375 for (;;)
5377 if (bit_buffer[ibit])
5379 if (BYTES_BIG_ENDIAN)
5380 word |= (1 << (set_word_size - 1 - bit_pos));
5381 else
5382 word |= 1 << bit_pos;
5385 bit_pos++; ibit++;
5386 if (bit_pos >= set_word_size || ibit == nbits)
5388 if (word != 0 || ! cleared)
5390 rtx datum = GEN_INT (word);
5391 rtx to_rtx;
5393 /* The assumption here is that it is safe to use
5394 XEXP if the set is multi-word, but not if
5395 it's single-word. */
5396 if (GET_CODE (target) == MEM)
5397 to_rtx = adjust_address (target, mode, offset);
5398 else if (offset == 0)
5399 to_rtx = target;
5400 else
5401 abort ();
5402 emit_move_insn (to_rtx, datum);
5405 if (ibit == nbits)
5406 break;
5407 word = 0;
5408 bit_pos = 0;
5409 offset += set_word_size / BITS_PER_UNIT;
5413 else if (!cleared)
5414 /* Don't bother clearing storage if the set is all ones. */
5415 if (TREE_CHAIN (elt) != NULL_TREE
5416 || (TREE_PURPOSE (elt) == NULL_TREE
5417 ? nbits != 1
5418 : ( ! host_integerp (TREE_VALUE (elt), 0)
5419 || ! host_integerp (TREE_PURPOSE (elt), 0)
5420 || (tree_low_cst (TREE_VALUE (elt), 0)
5421 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5422 != (HOST_WIDE_INT) nbits))))
5423 clear_storage (target, expr_size (exp));
5425 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5427 /* Start of range of element or NULL. */
5428 tree startbit = TREE_PURPOSE (elt);
5429 /* End of range of element, or element value. */
5430 tree endbit = TREE_VALUE (elt);
5431 HOST_WIDE_INT startb, endb;
5432 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5434 bitlength_rtx = expand_expr (bitlength,
5435 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5437 /* Handle non-range tuple element like [ expr ]. */
5438 if (startbit == NULL_TREE)
5440 startbit = save_expr (endbit);
5441 endbit = startbit;
5444 startbit = convert (sizetype, startbit);
5445 endbit = convert (sizetype, endbit);
5446 if (! integer_zerop (domain_min))
5448 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5449 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5451 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5452 EXPAND_CONST_ADDRESS);
5453 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5454 EXPAND_CONST_ADDRESS);
5456 if (REG_P (target))
5458 targetx
5459 = assign_temp
5460 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5461 (GET_MODE (target), 0),
5462 TYPE_QUAL_CONST)),
5463 0, 1, 1);
5464 emit_move_insn (targetx, target);
5467 else if (GET_CODE (target) == MEM)
5468 targetx = target;
5469 else
5470 abort ();
5472 /* Optimization: If startbit and endbit are constants divisible
5473 by BITS_PER_UNIT, call memset instead. */
5474 if (TARGET_MEM_FUNCTIONS
5475 && TREE_CODE (startbit) == INTEGER_CST
5476 && TREE_CODE (endbit) == INTEGER_CST
5477 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5478 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5480 emit_library_call (memset_libfunc, LCT_NORMAL,
5481 VOIDmode, 3,
5482 plus_constant (XEXP (targetx, 0),
5483 startb / BITS_PER_UNIT),
5484 Pmode,
5485 constm1_rtx, TYPE_MODE (integer_type_node),
5486 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5487 TYPE_MODE (sizetype));
5489 else
5490 emit_library_call (setbits_libfunc, LCT_NORMAL,
5491 VOIDmode, 4, XEXP (targetx, 0),
5492 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5493 startbit_rtx, TYPE_MODE (sizetype),
5494 endbit_rtx, TYPE_MODE (sizetype));
5496 if (REG_P (target))
5497 emit_move_insn (target, targetx);
5501 else
5502 abort ();
5505 /* Store the value of EXP (an expression tree)
5506 into a subfield of TARGET which has mode MODE and occupies
5507 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5508 If MODE is VOIDmode, it means that we are storing into a bit-field.
5510 If VALUE_MODE is VOIDmode, return nothing in particular.
5511 UNSIGNEDP is not used in this case.
5513 Otherwise, return an rtx for the value stored. This rtx
5514 has mode VALUE_MODE if that is convenient to do.
5515 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5517 TYPE is the type of the underlying object,
5519 ALIAS_SET is the alias set for the destination. This value will
5520 (in general) be different from that for TARGET, since TARGET is a
5521 reference to the containing structure. */
5523 static rtx
5524 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5525 alias_set)
5526 rtx target;
5527 HOST_WIDE_INT bitsize;
5528 HOST_WIDE_INT bitpos;
5529 enum machine_mode mode;
5530 tree exp;
5531 enum machine_mode value_mode;
5532 int unsignedp;
5533 tree type;
5534 int alias_set;
5536 HOST_WIDE_INT width_mask = 0;
5538 if (TREE_CODE (exp) == ERROR_MARK)
5539 return const0_rtx;
5541 /* If we have nothing to store, do nothing unless the expression has
5542 side-effects. */
5543 if (bitsize == 0)
5544 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5545 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5546 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5548 /* If we are storing into an unaligned field of an aligned union that is
5549 in a register, we may have the mode of TARGET being an integer mode but
5550 MODE == BLKmode. In that case, get an aligned object whose size and
5551 alignment are the same as TARGET and store TARGET into it (we can avoid
5552 the store if the field being stored is the entire width of TARGET). Then
5553 call ourselves recursively to store the field into a BLKmode version of
5554 that object. Finally, load from the object into TARGET. This is not
5555 very efficient in general, but should only be slightly more expensive
5556 than the otherwise-required unaligned accesses. Perhaps this can be
5557 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5558 twice, once with emit_move_insn and once via store_field. */
5560 if (mode == BLKmode
5561 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5563 rtx object = assign_temp (type, 0, 1, 1);
5564 rtx blk_object = adjust_address (object, BLKmode, 0);
5566 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5567 emit_move_insn (object, target);
5569 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5570 alias_set);
5572 emit_move_insn (target, object);
5574 /* We want to return the BLKmode version of the data. */
5575 return blk_object;
5578 if (GET_CODE (target) == CONCAT)
5580 /* We're storing into a struct containing a single __complex. */
5582 if (bitpos != 0)
5583 abort ();
5584 return store_expr (exp, target, 0);
5587 /* If the structure is in a register or if the component
5588 is a bit field, we cannot use addressing to access it.
5589 Use bit-field techniques or SUBREG to store in it. */
5591 if (mode == VOIDmode
5592 || (mode != BLKmode && ! direct_store[(int) mode]
5593 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5594 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5595 || GET_CODE (target) == REG
5596 || GET_CODE (target) == SUBREG
5597 /* If the field isn't aligned enough to store as an ordinary memref,
5598 store it as a bit field. */
5599 || (mode != BLKmode
5600 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5601 || bitpos % GET_MODE_ALIGNMENT (mode))
5602 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5603 || (bitpos % BITS_PER_UNIT != 0)))
5604 /* If the RHS and field are a constant size and the size of the
5605 RHS isn't the same size as the bitfield, we must use bitfield
5606 operations. */
5607 || (bitsize >= 0
5608 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5609 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5611 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5613 /* If BITSIZE is narrower than the size of the type of EXP
5614 we will be narrowing TEMP. Normally, what's wanted are the
5615 low-order bits. However, if EXP's type is a record and this is
5616 big-endian machine, we want the upper BITSIZE bits. */
5617 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5618 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5619 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5620 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5621 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5622 - bitsize),
5623 NULL_RTX, 1);
5625 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5626 MODE. */
5627 if (mode != VOIDmode && mode != BLKmode
5628 && mode != TYPE_MODE (TREE_TYPE (exp)))
5629 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5631 /* If the modes of TARGET and TEMP are both BLKmode, both
5632 must be in memory and BITPOS must be aligned on a byte
5633 boundary. If so, we simply do a block copy. */
5634 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5636 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5637 || bitpos % BITS_PER_UNIT != 0)
5638 abort ();
5640 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5641 emit_block_move (target, temp,
5642 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5643 / BITS_PER_UNIT),
5644 BLOCK_OP_NORMAL);
5646 return value_mode == VOIDmode ? const0_rtx : target;
5649 /* Store the value in the bitfield. */
5650 store_bit_field (target, bitsize, bitpos, mode, temp,
5651 int_size_in_bytes (type));
5653 if (value_mode != VOIDmode)
5655 /* The caller wants an rtx for the value.
5656 If possible, avoid refetching from the bitfield itself. */
5657 if (width_mask != 0
5658 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5660 tree count;
5661 enum machine_mode tmode;
5663 tmode = GET_MODE (temp);
5664 if (tmode == VOIDmode)
5665 tmode = value_mode;
5667 if (unsignedp)
5668 return expand_and (tmode, temp,
5669 gen_int_mode (width_mask, tmode),
5670 NULL_RTX);
5672 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5673 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5674 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5677 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5678 NULL_RTX, value_mode, VOIDmode,
5679 int_size_in_bytes (type));
5681 return const0_rtx;
5683 else
5685 rtx addr = XEXP (target, 0);
5686 rtx to_rtx = target;
5688 /* If a value is wanted, it must be the lhs;
5689 so make the address stable for multiple use. */
5691 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5692 && ! CONSTANT_ADDRESS_P (addr)
5693 /* A frame-pointer reference is already stable. */
5694 && ! (GET_CODE (addr) == PLUS
5695 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5696 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5697 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5698 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5700 /* Now build a reference to just the desired component. */
5702 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5704 if (to_rtx == target)
5705 to_rtx = copy_rtx (to_rtx);
5707 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5708 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5709 set_mem_alias_set (to_rtx, alias_set);
5711 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5715 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5716 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5717 codes and find the ultimate containing object, which we return.
5719 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5720 bit position, and *PUNSIGNEDP to the signedness of the field.
5721 If the position of the field is variable, we store a tree
5722 giving the variable offset (in units) in *POFFSET.
5723 This offset is in addition to the bit position.
5724 If the position is not variable, we store 0 in *POFFSET.
5726 If any of the extraction expressions is volatile,
5727 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5729 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5730 is a mode that can be used to access the field. In that case, *PBITSIZE
5731 is redundant.
5733 If the field describes a variable-sized object, *PMODE is set to
5734 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5735 this case, but the address of the object can be found. */
5737 tree
5738 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5739 punsignedp, pvolatilep)
5740 tree exp;
5741 HOST_WIDE_INT *pbitsize;
5742 HOST_WIDE_INT *pbitpos;
5743 tree *poffset;
5744 enum machine_mode *pmode;
5745 int *punsignedp;
5746 int *pvolatilep;
5748 tree size_tree = 0;
5749 enum machine_mode mode = VOIDmode;
5750 tree offset = size_zero_node;
5751 tree bit_offset = bitsize_zero_node;
5752 tree placeholder_ptr = 0;
5753 tree tem;
5755 /* First get the mode, signedness, and size. We do this from just the
5756 outermost expression. */
5757 if (TREE_CODE (exp) == COMPONENT_REF)
5759 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5760 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5761 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5763 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5765 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5767 size_tree = TREE_OPERAND (exp, 1);
5768 *punsignedp = TREE_UNSIGNED (exp);
5770 else
5772 mode = TYPE_MODE (TREE_TYPE (exp));
5773 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5775 if (mode == BLKmode)
5776 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5777 else
5778 *pbitsize = GET_MODE_BITSIZE (mode);
5781 if (size_tree != 0)
5783 if (! host_integerp (size_tree, 1))
5784 mode = BLKmode, *pbitsize = -1;
5785 else
5786 *pbitsize = tree_low_cst (size_tree, 1);
5789 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5790 and find the ultimate containing object. */
5791 while (1)
5793 if (TREE_CODE (exp) == BIT_FIELD_REF)
5794 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5795 else if (TREE_CODE (exp) == COMPONENT_REF)
5797 tree field = TREE_OPERAND (exp, 1);
5798 tree this_offset = DECL_FIELD_OFFSET (field);
5800 /* If this field hasn't been filled in yet, don't go
5801 past it. This should only happen when folding expressions
5802 made during type construction. */
5803 if (this_offset == 0)
5804 break;
5805 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5806 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5808 offset = size_binop (PLUS_EXPR, offset, this_offset);
5809 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5810 DECL_FIELD_BIT_OFFSET (field));
5812 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5815 else if (TREE_CODE (exp) == ARRAY_REF
5816 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5818 tree index = TREE_OPERAND (exp, 1);
5819 tree array = TREE_OPERAND (exp, 0);
5820 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5821 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5822 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5824 /* We assume all arrays have sizes that are a multiple of a byte.
5825 First subtract the lower bound, if any, in the type of the
5826 index, then convert to sizetype and multiply by the size of the
5827 array element. */
5828 if (low_bound != 0 && ! integer_zerop (low_bound))
5829 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5830 index, low_bound));
5832 /* If the index has a self-referential type, pass it to a
5833 WITH_RECORD_EXPR; if the component size is, pass our
5834 component to one. */
5835 if (CONTAINS_PLACEHOLDER_P (index))
5836 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5837 if (CONTAINS_PLACEHOLDER_P (unit_size))
5838 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5840 offset = size_binop (PLUS_EXPR, offset,
5841 size_binop (MULT_EXPR,
5842 convert (sizetype, index),
5843 unit_size));
5846 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5848 tree new = find_placeholder (exp, &placeholder_ptr);
5850 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5851 We might have been called from tree optimization where we
5852 haven't set up an object yet. */
5853 if (new == 0)
5854 break;
5855 else
5856 exp = new;
5858 continue;
5861 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5862 conversions that don't change the mode, and all view conversions
5863 except those that need to "step up" the alignment. */
5864 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5865 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5866 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5867 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5868 && STRICT_ALIGNMENT
5869 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5870 < BIGGEST_ALIGNMENT)
5871 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5872 || TYPE_ALIGN_OK (TREE_TYPE
5873 (TREE_OPERAND (exp, 0))))))
5874 && ! ((TREE_CODE (exp) == NOP_EXPR
5875 || TREE_CODE (exp) == CONVERT_EXPR)
5876 && (TYPE_MODE (TREE_TYPE (exp))
5877 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5878 break;
5880 /* If any reference in the chain is volatile, the effect is volatile. */
5881 if (TREE_THIS_VOLATILE (exp))
5882 *pvolatilep = 1;
5884 exp = TREE_OPERAND (exp, 0);
5887 /* If OFFSET is constant, see if we can return the whole thing as a
5888 constant bit position. Otherwise, split it up. */
5889 if (host_integerp (offset, 0)
5890 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5891 bitsize_unit_node))
5892 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5893 && host_integerp (tem, 0))
5894 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5895 else
5896 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5898 *pmode = mode;
5899 return exp;
5902 /* Return 1 if T is an expression that get_inner_reference handles. */
5905 handled_component_p (t)
5906 tree t;
5908 switch (TREE_CODE (t))
5910 case BIT_FIELD_REF:
5911 case COMPONENT_REF:
5912 case ARRAY_REF:
5913 case ARRAY_RANGE_REF:
5914 case NON_LVALUE_EXPR:
5915 case VIEW_CONVERT_EXPR:
5916 return 1;
5918 case NOP_EXPR:
5919 case CONVERT_EXPR:
5920 return (TYPE_MODE (TREE_TYPE (t))
5921 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5923 default:
5924 return 0;
5928 /* Given an rtx VALUE that may contain additions and multiplications, return
5929 an equivalent value that just refers to a register, memory, or constant.
5930 This is done by generating instructions to perform the arithmetic and
5931 returning a pseudo-register containing the value.
5933 The returned value may be a REG, SUBREG, MEM or constant. */
5936 force_operand (value, target)
5937 rtx value, target;
5939 rtx op1, op2;
5940 /* Use subtarget as the target for operand 0 of a binary operation. */
5941 rtx subtarget = get_subtarget (target);
5942 enum rtx_code code = GET_CODE (value);
5944 /* Check for a PIC address load. */
5945 if ((code == PLUS || code == MINUS)
5946 && XEXP (value, 0) == pic_offset_table_rtx
5947 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5948 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5949 || GET_CODE (XEXP (value, 1)) == CONST))
5951 if (!subtarget)
5952 subtarget = gen_reg_rtx (GET_MODE (value));
5953 emit_move_insn (subtarget, value);
5954 return subtarget;
5957 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5959 if (!target)
5960 target = gen_reg_rtx (GET_MODE (value));
5961 convert_move (target, force_operand (XEXP (value, 0), NULL),
5962 code == ZERO_EXTEND);
5963 return target;
5966 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5968 op2 = XEXP (value, 1);
5969 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5970 subtarget = 0;
5971 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5973 code = PLUS;
5974 op2 = negate_rtx (GET_MODE (value), op2);
5977 /* Check for an addition with OP2 a constant integer and our first
5978 operand a PLUS of a virtual register and something else. In that
5979 case, we want to emit the sum of the virtual register and the
5980 constant first and then add the other value. This allows virtual
5981 register instantiation to simply modify the constant rather than
5982 creating another one around this addition. */
5983 if (code == PLUS && GET_CODE (op2) == CONST_INT
5984 && GET_CODE (XEXP (value, 0)) == PLUS
5985 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5986 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5987 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5989 rtx temp = expand_simple_binop (GET_MODE (value), code,
5990 XEXP (XEXP (value, 0), 0), op2,
5991 subtarget, 0, OPTAB_LIB_WIDEN);
5992 return expand_simple_binop (GET_MODE (value), code, temp,
5993 force_operand (XEXP (XEXP (value,
5994 0), 1), 0),
5995 target, 0, OPTAB_LIB_WIDEN);
5998 op1 = force_operand (XEXP (value, 0), subtarget);
5999 op2 = force_operand (op2, NULL_RTX);
6000 switch (code)
6002 case MULT:
6003 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6004 case DIV:
6005 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6006 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6007 target, 1, OPTAB_LIB_WIDEN);
6008 else
6009 return expand_divmod (0,
6010 FLOAT_MODE_P (GET_MODE (value))
6011 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6012 GET_MODE (value), op1, op2, target, 0);
6013 break;
6014 case MOD:
6015 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6016 target, 0);
6017 break;
6018 case UDIV:
6019 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6020 target, 1);
6021 break;
6022 case UMOD:
6023 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6024 target, 1);
6025 break;
6026 case ASHIFTRT:
6027 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6028 target, 0, OPTAB_LIB_WIDEN);
6029 break;
6030 default:
6031 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6032 target, 1, OPTAB_LIB_WIDEN);
6035 if (GET_RTX_CLASS (code) == '1')
6037 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6038 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6041 #ifdef INSN_SCHEDULING
6042 /* On machines that have insn scheduling, we want all memory reference to be
6043 explicit, so we need to deal with such paradoxical SUBREGs. */
6044 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6045 && (GET_MODE_SIZE (GET_MODE (value))
6046 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6047 value
6048 = simplify_gen_subreg (GET_MODE (value),
6049 force_reg (GET_MODE (SUBREG_REG (value)),
6050 force_operand (SUBREG_REG (value),
6051 NULL_RTX)),
6052 GET_MODE (SUBREG_REG (value)),
6053 SUBREG_BYTE (value));
6054 #endif
6056 return value;
6059 /* Subroutine of expand_expr: return nonzero iff there is no way that
6060 EXP can reference X, which is being modified. TOP_P is nonzero if this
6061 call is going to be used to determine whether we need a temporary
6062 for EXP, as opposed to a recursive call to this function.
6064 It is always safe for this routine to return zero since it merely
6065 searches for optimization opportunities. */
6068 safe_from_p (x, exp, top_p)
6069 rtx x;
6070 tree exp;
6071 int top_p;
6073 rtx exp_rtl = 0;
6074 int i, nops;
6075 static tree save_expr_list;
6077 if (x == 0
6078 /* If EXP has varying size, we MUST use a target since we currently
6079 have no way of allocating temporaries of variable size
6080 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6081 So we assume here that something at a higher level has prevented a
6082 clash. This is somewhat bogus, but the best we can do. Only
6083 do this when X is BLKmode and when we are at the top level. */
6084 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6085 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6086 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6087 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6088 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6089 != INTEGER_CST)
6090 && GET_MODE (x) == BLKmode)
6091 /* If X is in the outgoing argument area, it is always safe. */
6092 || (GET_CODE (x) == MEM
6093 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6094 || (GET_CODE (XEXP (x, 0)) == PLUS
6095 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6096 return 1;
6098 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6099 find the underlying pseudo. */
6100 if (GET_CODE (x) == SUBREG)
6102 x = SUBREG_REG (x);
6103 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6104 return 0;
6107 /* A SAVE_EXPR might appear many times in the expression passed to the
6108 top-level safe_from_p call, and if it has a complex subexpression,
6109 examining it multiple times could result in a combinatorial explosion.
6110 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6111 with optimization took about 28 minutes to compile -- even though it was
6112 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6113 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6114 we have processed. Note that the only test of top_p was above. */
6116 if (top_p)
6118 int rtn;
6119 tree t;
6121 save_expr_list = 0;
6123 rtn = safe_from_p (x, exp, 0);
6125 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6126 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6128 return rtn;
6131 /* Now look at our tree code and possibly recurse. */
6132 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6134 case 'd':
6135 exp_rtl = DECL_RTL_IF_SET (exp);
6136 break;
6138 case 'c':
6139 return 1;
6141 case 'x':
6142 if (TREE_CODE (exp) == TREE_LIST)
6144 while (1)
6146 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6147 return 0;
6148 exp = TREE_CHAIN (exp);
6149 if (!exp)
6150 return 1;
6151 if (TREE_CODE (exp) != TREE_LIST)
6152 return safe_from_p (x, exp, 0);
6155 else if (TREE_CODE (exp) == ERROR_MARK)
6156 return 1; /* An already-visited SAVE_EXPR? */
6157 else
6158 return 0;
6160 case '2':
6161 case '<':
6162 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6163 return 0;
6164 /* FALLTHRU */
6166 case '1':
6167 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6169 case 'e':
6170 case 'r':
6171 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6172 the expression. If it is set, we conflict iff we are that rtx or
6173 both are in memory. Otherwise, we check all operands of the
6174 expression recursively. */
6176 switch (TREE_CODE (exp))
6178 case ADDR_EXPR:
6179 /* If the operand is static or we are static, we can't conflict.
6180 Likewise if we don't conflict with the operand at all. */
6181 if (staticp (TREE_OPERAND (exp, 0))
6182 || TREE_STATIC (exp)
6183 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6184 return 1;
6186 /* Otherwise, the only way this can conflict is if we are taking
6187 the address of a DECL a that address if part of X, which is
6188 very rare. */
6189 exp = TREE_OPERAND (exp, 0);
6190 if (DECL_P (exp))
6192 if (!DECL_RTL_SET_P (exp)
6193 || GET_CODE (DECL_RTL (exp)) != MEM)
6194 return 0;
6195 else
6196 exp_rtl = XEXP (DECL_RTL (exp), 0);
6198 break;
6200 case INDIRECT_REF:
6201 if (GET_CODE (x) == MEM
6202 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6203 get_alias_set (exp)))
6204 return 0;
6205 break;
6207 case CALL_EXPR:
6208 /* Assume that the call will clobber all hard registers and
6209 all of memory. */
6210 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6211 || GET_CODE (x) == MEM)
6212 return 0;
6213 break;
6215 case RTL_EXPR:
6216 /* If a sequence exists, we would have to scan every instruction
6217 in the sequence to see if it was safe. This is probably not
6218 worthwhile. */
6219 if (RTL_EXPR_SEQUENCE (exp))
6220 return 0;
6222 exp_rtl = RTL_EXPR_RTL (exp);
6223 break;
6225 case WITH_CLEANUP_EXPR:
6226 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6227 break;
6229 case CLEANUP_POINT_EXPR:
6230 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6232 case SAVE_EXPR:
6233 exp_rtl = SAVE_EXPR_RTL (exp);
6234 if (exp_rtl)
6235 break;
6237 /* If we've already scanned this, don't do it again. Otherwise,
6238 show we've scanned it and record for clearing the flag if we're
6239 going on. */
6240 if (TREE_PRIVATE (exp))
6241 return 1;
6243 TREE_PRIVATE (exp) = 1;
6244 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6246 TREE_PRIVATE (exp) = 0;
6247 return 0;
6250 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6251 return 1;
6253 case BIND_EXPR:
6254 /* The only operand we look at is operand 1. The rest aren't
6255 part of the expression. */
6256 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6258 case METHOD_CALL_EXPR:
6259 /* This takes an rtx argument, but shouldn't appear here. */
6260 abort ();
6262 default:
6263 break;
6266 /* If we have an rtx, we do not need to scan our operands. */
6267 if (exp_rtl)
6268 break;
6270 nops = first_rtl_op (TREE_CODE (exp));
6271 for (i = 0; i < nops; i++)
6272 if (TREE_OPERAND (exp, i) != 0
6273 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6274 return 0;
6276 /* If this is a language-specific tree code, it may require
6277 special handling. */
6278 if ((unsigned int) TREE_CODE (exp)
6279 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6280 && !(*lang_hooks.safe_from_p) (x, exp))
6281 return 0;
6284 /* If we have an rtl, find any enclosed object. Then see if we conflict
6285 with it. */
6286 if (exp_rtl)
6288 if (GET_CODE (exp_rtl) == SUBREG)
6290 exp_rtl = SUBREG_REG (exp_rtl);
6291 if (GET_CODE (exp_rtl) == REG
6292 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6293 return 0;
6296 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6297 are memory and they conflict. */
6298 return ! (rtx_equal_p (x, exp_rtl)
6299 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6300 && true_dependence (exp_rtl, VOIDmode, x,
6301 rtx_addr_varies_p)));
6304 /* If we reach here, it is safe. */
6305 return 1;
6308 /* Subroutine of expand_expr: return rtx if EXP is a
6309 variable or parameter; else return 0. */
6311 static rtx
6312 var_rtx (exp)
6313 tree exp;
6315 STRIP_NOPS (exp);
6316 switch (TREE_CODE (exp))
6318 case PARM_DECL:
6319 case VAR_DECL:
6320 return DECL_RTL (exp);
6321 default:
6322 return 0;
6326 #ifdef MAX_INTEGER_COMPUTATION_MODE
6328 void
6329 check_max_integer_computation_mode (exp)
6330 tree exp;
6332 enum tree_code code;
6333 enum machine_mode mode;
6335 /* Strip any NOPs that don't change the mode. */
6336 STRIP_NOPS (exp);
6337 code = TREE_CODE (exp);
6339 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6340 if (code == NOP_EXPR
6341 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6342 return;
6344 /* First check the type of the overall operation. We need only look at
6345 unary, binary and relational operations. */
6346 if (TREE_CODE_CLASS (code) == '1'
6347 || TREE_CODE_CLASS (code) == '2'
6348 || TREE_CODE_CLASS (code) == '<')
6350 mode = TYPE_MODE (TREE_TYPE (exp));
6351 if (GET_MODE_CLASS (mode) == MODE_INT
6352 && mode > MAX_INTEGER_COMPUTATION_MODE)
6353 internal_error ("unsupported wide integer operation");
6356 /* Check operand of a unary op. */
6357 if (TREE_CODE_CLASS (code) == '1')
6359 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6360 if (GET_MODE_CLASS (mode) == MODE_INT
6361 && mode > MAX_INTEGER_COMPUTATION_MODE)
6362 internal_error ("unsupported wide integer operation");
6365 /* Check operands of a binary/comparison op. */
6366 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6368 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6369 if (GET_MODE_CLASS (mode) == MODE_INT
6370 && mode > MAX_INTEGER_COMPUTATION_MODE)
6371 internal_error ("unsupported wide integer operation");
6373 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6374 if (GET_MODE_CLASS (mode) == MODE_INT
6375 && mode > MAX_INTEGER_COMPUTATION_MODE)
6376 internal_error ("unsupported wide integer operation");
6379 #endif
6381 /* Return the highest power of two that EXP is known to be a multiple of.
6382 This is used in updating alignment of MEMs in array references. */
6384 static unsigned HOST_WIDE_INT
6385 highest_pow2_factor (exp)
6386 tree exp;
6388 unsigned HOST_WIDE_INT c0, c1;
6390 switch (TREE_CODE (exp))
6392 case INTEGER_CST:
6393 /* We can find the lowest bit that's a one. If the low
6394 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6395 We need to handle this case since we can find it in a COND_EXPR,
6396 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6397 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6398 later ICE. */
6399 if (TREE_CONSTANT_OVERFLOW (exp))
6400 return BIGGEST_ALIGNMENT;
6401 else
6403 /* Note: tree_low_cst is intentionally not used here,
6404 we don't care about the upper bits. */
6405 c0 = TREE_INT_CST_LOW (exp);
6406 c0 &= -c0;
6407 return c0 ? c0 : BIGGEST_ALIGNMENT;
6409 break;
6411 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6412 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6413 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6414 return MIN (c0, c1);
6416 case MULT_EXPR:
6417 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6418 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6419 return c0 * c1;
6421 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6422 case CEIL_DIV_EXPR:
6423 if (integer_pow2p (TREE_OPERAND (exp, 1))
6424 && host_integerp (TREE_OPERAND (exp, 1), 1))
6426 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6427 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6428 return MAX (1, c0 / c1);
6430 break;
6432 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6433 case SAVE_EXPR: case WITH_RECORD_EXPR:
6434 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6436 case COMPOUND_EXPR:
6437 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6439 case COND_EXPR:
6440 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6441 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6442 return MIN (c0, c1);
6444 default:
6445 break;
6448 return 1;
6451 /* Similar, except that it is known that the expression must be a multiple
6452 of the alignment of TYPE. */
6454 static unsigned HOST_WIDE_INT
6455 highest_pow2_factor_for_type (type, exp)
6456 tree type;
6457 tree exp;
6459 unsigned HOST_WIDE_INT type_align, factor;
6461 factor = highest_pow2_factor (exp);
6462 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6463 return MAX (factor, type_align);
6466 /* Return an object on the placeholder list that matches EXP, a
6467 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6468 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6469 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6470 is a location which initially points to a starting location in the
6471 placeholder list (zero means start of the list) and where a pointer into
6472 the placeholder list at which the object is found is placed. */
6474 tree
6475 find_placeholder (exp, plist)
6476 tree exp;
6477 tree *plist;
6479 tree type = TREE_TYPE (exp);
6480 tree placeholder_expr;
6482 for (placeholder_expr
6483 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6484 placeholder_expr != 0;
6485 placeholder_expr = TREE_CHAIN (placeholder_expr))
6487 tree need_type = TYPE_MAIN_VARIANT (type);
6488 tree elt;
6490 /* Find the outermost reference that is of the type we want. If none,
6491 see if any object has a type that is a pointer to the type we
6492 want. */
6493 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6494 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6495 || TREE_CODE (elt) == COND_EXPR)
6496 ? TREE_OPERAND (elt, 1)
6497 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6498 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6499 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6500 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6501 ? TREE_OPERAND (elt, 0) : 0))
6502 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6504 if (plist)
6505 *plist = placeholder_expr;
6506 return elt;
6509 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6511 = ((TREE_CODE (elt) == COMPOUND_EXPR
6512 || TREE_CODE (elt) == COND_EXPR)
6513 ? TREE_OPERAND (elt, 1)
6514 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6515 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6516 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6517 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6518 ? TREE_OPERAND (elt, 0) : 0))
6519 if (POINTER_TYPE_P (TREE_TYPE (elt))
6520 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6521 == need_type))
6523 if (plist)
6524 *plist = placeholder_expr;
6525 return build1 (INDIRECT_REF, need_type, elt);
6529 return 0;
6532 /* expand_expr: generate code for computing expression EXP.
6533 An rtx for the computed value is returned. The value is never null.
6534 In the case of a void EXP, const0_rtx is returned.
6536 The value may be stored in TARGET if TARGET is nonzero.
6537 TARGET is just a suggestion; callers must assume that
6538 the rtx returned may not be the same as TARGET.
6540 If TARGET is CONST0_RTX, it means that the value will be ignored.
6542 If TMODE is not VOIDmode, it suggests generating the
6543 result in mode TMODE. But this is done only when convenient.
6544 Otherwise, TMODE is ignored and the value generated in its natural mode.
6545 TMODE is just a suggestion; callers must assume that
6546 the rtx returned may not have mode TMODE.
6548 Note that TARGET may have neither TMODE nor MODE. In that case, it
6549 probably will not be used.
6551 If MODIFIER is EXPAND_SUM then when EXP is an addition
6552 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6553 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6554 products as above, or REG or MEM, or constant.
6555 Ordinarily in such cases we would output mul or add instructions
6556 and then return a pseudo reg containing the sum.
6558 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6559 it also marks a label as absolutely required (it can't be dead).
6560 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6561 This is used for outputting expressions used in initializers.
6563 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6564 with a constant address even if that address is not normally legitimate.
6565 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6567 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6568 a call parameter. Such targets require special care as we haven't yet
6569 marked TARGET so that it's safe from being trashed by libcalls. We
6570 don't want to use TARGET for anything but the final result;
6571 Intermediate values must go elsewhere. Additionally, calls to
6572 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6575 expand_expr (exp, target, tmode, modifier)
6576 tree exp;
6577 rtx target;
6578 enum machine_mode tmode;
6579 enum expand_modifier modifier;
6581 rtx op0, op1, temp;
6582 tree type = TREE_TYPE (exp);
6583 int unsignedp = TREE_UNSIGNED (type);
6584 enum machine_mode mode;
6585 enum tree_code code = TREE_CODE (exp);
6586 optab this_optab;
6587 rtx subtarget, original_target;
6588 int ignore;
6589 tree context;
6591 /* Handle ERROR_MARK before anybody tries to access its type. */
6592 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6594 op0 = CONST0_RTX (tmode);
6595 if (op0 != 0)
6596 return op0;
6597 return const0_rtx;
6600 mode = TYPE_MODE (type);
6601 /* Use subtarget as the target for operand 0 of a binary operation. */
6602 subtarget = get_subtarget (target);
6603 original_target = target;
6604 ignore = (target == const0_rtx
6605 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6606 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6607 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6608 && TREE_CODE (type) == VOID_TYPE));
6610 /* If we are going to ignore this result, we need only do something
6611 if there is a side-effect somewhere in the expression. If there
6612 is, short-circuit the most common cases here. Note that we must
6613 not call expand_expr with anything but const0_rtx in case this
6614 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6616 if (ignore)
6618 if (! TREE_SIDE_EFFECTS (exp))
6619 return const0_rtx;
6621 /* Ensure we reference a volatile object even if value is ignored, but
6622 don't do this if all we are doing is taking its address. */
6623 if (TREE_THIS_VOLATILE (exp)
6624 && TREE_CODE (exp) != FUNCTION_DECL
6625 && mode != VOIDmode && mode != BLKmode
6626 && modifier != EXPAND_CONST_ADDRESS)
6628 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6629 if (GET_CODE (temp) == MEM)
6630 temp = copy_to_reg (temp);
6631 return const0_rtx;
6634 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6635 || code == INDIRECT_REF || code == BUFFER_REF)
6636 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6637 modifier);
6639 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6640 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6642 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6643 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6644 return const0_rtx;
6646 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6647 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6648 /* If the second operand has no side effects, just evaluate
6649 the first. */
6650 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6651 modifier);
6652 else if (code == BIT_FIELD_REF)
6654 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6655 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6656 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6657 return const0_rtx;
6660 target = 0;
6663 #ifdef MAX_INTEGER_COMPUTATION_MODE
6664 /* Only check stuff here if the mode we want is different from the mode
6665 of the expression; if it's the same, check_max_integer_computation_mode
6666 will handle it. Do we really need to check this stuff at all? */
6668 if (target
6669 && GET_MODE (target) != mode
6670 && TREE_CODE (exp) != INTEGER_CST
6671 && TREE_CODE (exp) != PARM_DECL
6672 && TREE_CODE (exp) != ARRAY_REF
6673 && TREE_CODE (exp) != ARRAY_RANGE_REF
6674 && TREE_CODE (exp) != COMPONENT_REF
6675 && TREE_CODE (exp) != BIT_FIELD_REF
6676 && TREE_CODE (exp) != INDIRECT_REF
6677 && TREE_CODE (exp) != CALL_EXPR
6678 && TREE_CODE (exp) != VAR_DECL
6679 && TREE_CODE (exp) != RTL_EXPR)
6681 enum machine_mode mode = GET_MODE (target);
6683 if (GET_MODE_CLASS (mode) == MODE_INT
6684 && mode > MAX_INTEGER_COMPUTATION_MODE)
6685 internal_error ("unsupported wide integer operation");
6688 if (tmode != mode
6689 && TREE_CODE (exp) != INTEGER_CST
6690 && TREE_CODE (exp) != PARM_DECL
6691 && TREE_CODE (exp) != ARRAY_REF
6692 && TREE_CODE (exp) != ARRAY_RANGE_REF
6693 && TREE_CODE (exp) != COMPONENT_REF
6694 && TREE_CODE (exp) != BIT_FIELD_REF
6695 && TREE_CODE (exp) != INDIRECT_REF
6696 && TREE_CODE (exp) != VAR_DECL
6697 && TREE_CODE (exp) != CALL_EXPR
6698 && TREE_CODE (exp) != RTL_EXPR
6699 && GET_MODE_CLASS (tmode) == MODE_INT
6700 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6701 internal_error ("unsupported wide integer operation");
6703 check_max_integer_computation_mode (exp);
6704 #endif
6706 /* If will do cse, generate all results into pseudo registers
6707 since 1) that allows cse to find more things
6708 and 2) otherwise cse could produce an insn the machine
6709 cannot support. An exception is a CONSTRUCTOR into a multi-word
6710 MEM: that's much more likely to be most efficient into the MEM.
6711 Another is a CALL_EXPR which must return in memory. */
6713 if (! cse_not_expected && mode != BLKmode && target
6714 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6715 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6716 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6717 target = 0;
6719 switch (code)
6721 case LABEL_DECL:
6723 tree function = decl_function_context (exp);
6724 /* Labels in containing functions, or labels used from initializers,
6725 must be forced. */
6726 if (modifier == EXPAND_INITIALIZER
6727 || (function != current_function_decl
6728 && function != inline_function_decl
6729 && function != 0))
6730 temp = force_label_rtx (exp);
6731 else
6732 temp = label_rtx (exp);
6734 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6735 if (function != current_function_decl
6736 && function != inline_function_decl && function != 0)
6737 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6738 return temp;
6741 case PARM_DECL:
6742 if (!DECL_RTL_SET_P (exp))
6744 error_with_decl (exp, "prior parameter's size depends on `%s'");
6745 return CONST0_RTX (mode);
6748 /* ... fall through ... */
6750 case VAR_DECL:
6751 /* If a static var's type was incomplete when the decl was written,
6752 but the type is complete now, lay out the decl now. */
6753 if (DECL_SIZE (exp) == 0
6754 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6755 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6756 layout_decl (exp, 0);
6758 /* ... fall through ... */
6760 case FUNCTION_DECL:
6761 case RESULT_DECL:
6762 if (DECL_RTL (exp) == 0)
6763 abort ();
6765 /* Ensure variable marked as used even if it doesn't go through
6766 a parser. If it hasn't be used yet, write out an external
6767 definition. */
6768 if (! TREE_USED (exp))
6770 assemble_external (exp);
6771 TREE_USED (exp) = 1;
6774 /* Show we haven't gotten RTL for this yet. */
6775 temp = 0;
6777 /* Handle variables inherited from containing functions. */
6778 context = decl_function_context (exp);
6780 /* We treat inline_function_decl as an alias for the current function
6781 because that is the inline function whose vars, types, etc.
6782 are being merged into the current function.
6783 See expand_inline_function. */
6785 if (context != 0 && context != current_function_decl
6786 && context != inline_function_decl
6787 /* If var is static, we don't need a static chain to access it. */
6788 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6789 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6791 rtx addr;
6793 /* Mark as non-local and addressable. */
6794 DECL_NONLOCAL (exp) = 1;
6795 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6796 abort ();
6797 (*lang_hooks.mark_addressable) (exp);
6798 if (GET_CODE (DECL_RTL (exp)) != MEM)
6799 abort ();
6800 addr = XEXP (DECL_RTL (exp), 0);
6801 if (GET_CODE (addr) == MEM)
6802 addr
6803 = replace_equiv_address (addr,
6804 fix_lexical_addr (XEXP (addr, 0), exp));
6805 else
6806 addr = fix_lexical_addr (addr, exp);
6808 temp = replace_equiv_address (DECL_RTL (exp), addr);
6811 /* This is the case of an array whose size is to be determined
6812 from its initializer, while the initializer is still being parsed.
6813 See expand_decl. */
6815 else if (GET_CODE (DECL_RTL (exp)) == MEM
6816 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6817 temp = validize_mem (DECL_RTL (exp));
6819 /* If DECL_RTL is memory, we are in the normal case and either
6820 the address is not valid or it is not a register and -fforce-addr
6821 is specified, get the address into a register. */
6823 else if (GET_CODE (DECL_RTL (exp)) == MEM
6824 && modifier != EXPAND_CONST_ADDRESS
6825 && modifier != EXPAND_SUM
6826 && modifier != EXPAND_INITIALIZER
6827 && (! memory_address_p (DECL_MODE (exp),
6828 XEXP (DECL_RTL (exp), 0))
6829 || (flag_force_addr
6830 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6831 temp = replace_equiv_address (DECL_RTL (exp),
6832 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6834 /* If we got something, return it. But first, set the alignment
6835 if the address is a register. */
6836 if (temp != 0)
6838 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6839 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6841 return temp;
6844 /* If the mode of DECL_RTL does not match that of the decl, it
6845 must be a promoted value. We return a SUBREG of the wanted mode,
6846 but mark it so that we know that it was already extended. */
6848 if (GET_CODE (DECL_RTL (exp)) == REG
6849 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6851 /* Get the signedness used for this variable. Ensure we get the
6852 same mode we got when the variable was declared. */
6853 if (GET_MODE (DECL_RTL (exp))
6854 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6855 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6856 abort ();
6858 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6859 SUBREG_PROMOTED_VAR_P (temp) = 1;
6860 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6861 return temp;
6864 return DECL_RTL (exp);
6866 case INTEGER_CST:
6867 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6868 TREE_INT_CST_HIGH (exp), mode);
6870 /* ??? If overflow is set, fold will have done an incomplete job,
6871 which can result in (plus xx (const_int 0)), which can get
6872 simplified by validate_replace_rtx during virtual register
6873 instantiation, which can result in unrecognizable insns.
6874 Avoid this by forcing all overflows into registers. */
6875 if (TREE_CONSTANT_OVERFLOW (exp)
6876 && modifier != EXPAND_INITIALIZER)
6877 temp = force_reg (mode, temp);
6879 return temp;
6881 case VECTOR_CST:
6882 return const_vector_from_tree (exp);
6884 case CONST_DECL:
6885 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6887 case REAL_CST:
6888 /* If optimized, generate immediate CONST_DOUBLE
6889 which will be turned into memory by reload if necessary.
6891 We used to force a register so that loop.c could see it. But
6892 this does not allow gen_* patterns to perform optimizations with
6893 the constants. It also produces two insns in cases like "x = 1.0;".
6894 On most machines, floating-point constants are not permitted in
6895 many insns, so we'd end up copying it to a register in any case.
6897 Now, we do the copying in expand_binop, if appropriate. */
6898 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6899 TYPE_MODE (TREE_TYPE (exp)));
6901 case COMPLEX_CST:
6902 case STRING_CST:
6903 temp = output_constant_def (exp, 1);
6905 /* temp contains a constant address.
6906 On RISC machines where a constant address isn't valid,
6907 make some insns to get that address into a register. */
6908 if (modifier != EXPAND_CONST_ADDRESS
6909 && modifier != EXPAND_INITIALIZER
6910 && modifier != EXPAND_SUM
6911 && (! memory_address_p (mode, XEXP (temp, 0))
6912 || flag_force_addr))
6913 return replace_equiv_address (temp,
6914 copy_rtx (XEXP (temp, 0)));
6915 return temp;
6917 case EXPR_WITH_FILE_LOCATION:
6919 rtx to_return;
6920 location_t saved_loc = input_location;
6921 input_filename = EXPR_WFL_FILENAME (exp);
6922 input_line = EXPR_WFL_LINENO (exp);
6923 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6924 emit_line_note (input_filename, input_line);
6925 /* Possibly avoid switching back and forth here. */
6926 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6927 input_location = saved_loc;
6928 return to_return;
6931 case SAVE_EXPR:
6932 context = decl_function_context (exp);
6934 /* If this SAVE_EXPR was at global context, assume we are an
6935 initialization function and move it into our context. */
6936 if (context == 0)
6937 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6939 /* We treat inline_function_decl as an alias for the current function
6940 because that is the inline function whose vars, types, etc.
6941 are being merged into the current function.
6942 See expand_inline_function. */
6943 if (context == current_function_decl || context == inline_function_decl)
6944 context = 0;
6946 /* If this is non-local, handle it. */
6947 if (context)
6949 /* The following call just exists to abort if the context is
6950 not of a containing function. */
6951 find_function_data (context);
6953 temp = SAVE_EXPR_RTL (exp);
6954 if (temp && GET_CODE (temp) == REG)
6956 put_var_into_stack (exp, /*rescan=*/true);
6957 temp = SAVE_EXPR_RTL (exp);
6959 if (temp == 0 || GET_CODE (temp) != MEM)
6960 abort ();
6961 return
6962 replace_equiv_address (temp,
6963 fix_lexical_addr (XEXP (temp, 0), exp));
6965 if (SAVE_EXPR_RTL (exp) == 0)
6967 if (mode == VOIDmode)
6968 temp = const0_rtx;
6969 else
6970 temp = assign_temp (build_qualified_type (type,
6971 (TYPE_QUALS (type)
6972 | TYPE_QUAL_CONST)),
6973 3, 0, 0);
6975 SAVE_EXPR_RTL (exp) = temp;
6976 if (!optimize && GET_CODE (temp) == REG)
6977 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6978 save_expr_regs);
6980 /* If the mode of TEMP does not match that of the expression, it
6981 must be a promoted value. We pass store_expr a SUBREG of the
6982 wanted mode but mark it so that we know that it was already
6983 extended. */
6985 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6987 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6988 promote_mode (type, mode, &unsignedp, 0);
6989 SUBREG_PROMOTED_VAR_P (temp) = 1;
6990 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6993 if (temp == const0_rtx)
6994 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6995 else
6996 store_expr (TREE_OPERAND (exp, 0), temp,
6997 modifier == EXPAND_STACK_PARM ? 2 : 0);
6999 TREE_USED (exp) = 1;
7002 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7003 must be a promoted value. We return a SUBREG of the wanted mode,
7004 but mark it so that we know that it was already extended. */
7006 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7007 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7009 /* Compute the signedness and make the proper SUBREG. */
7010 promote_mode (type, mode, &unsignedp, 0);
7011 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7012 SUBREG_PROMOTED_VAR_P (temp) = 1;
7013 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7014 return temp;
7017 return SAVE_EXPR_RTL (exp);
7019 case UNSAVE_EXPR:
7021 rtx temp;
7022 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7023 TREE_OPERAND (exp, 0)
7024 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7025 return temp;
7028 case PLACEHOLDER_EXPR:
7030 tree old_list = placeholder_list;
7031 tree placeholder_expr = 0;
7033 exp = find_placeholder (exp, &placeholder_expr);
7034 if (exp == 0)
7035 abort ();
7037 placeholder_list = TREE_CHAIN (placeholder_expr);
7038 temp = expand_expr (exp, original_target, tmode, modifier);
7039 placeholder_list = old_list;
7040 return temp;
7043 case WITH_RECORD_EXPR:
7044 /* Put the object on the placeholder list, expand our first operand,
7045 and pop the list. */
7046 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7047 placeholder_list);
7048 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7049 modifier);
7050 placeholder_list = TREE_CHAIN (placeholder_list);
7051 return target;
7053 case GOTO_EXPR:
7054 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7055 expand_goto (TREE_OPERAND (exp, 0));
7056 else
7057 expand_computed_goto (TREE_OPERAND (exp, 0));
7058 return const0_rtx;
7060 case EXIT_EXPR:
7061 expand_exit_loop_if_false (NULL,
7062 invert_truthvalue (TREE_OPERAND (exp, 0)));
7063 return const0_rtx;
7065 case LABELED_BLOCK_EXPR:
7066 if (LABELED_BLOCK_BODY (exp))
7067 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7068 /* Should perhaps use expand_label, but this is simpler and safer. */
7069 do_pending_stack_adjust ();
7070 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7071 return const0_rtx;
7073 case EXIT_BLOCK_EXPR:
7074 if (EXIT_BLOCK_RETURN (exp))
7075 sorry ("returned value in block_exit_expr");
7076 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7077 return const0_rtx;
7079 case LOOP_EXPR:
7080 push_temp_slots ();
7081 expand_start_loop (1);
7082 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7083 expand_end_loop ();
7084 pop_temp_slots ();
7086 return const0_rtx;
7088 case BIND_EXPR:
7090 tree vars = TREE_OPERAND (exp, 0);
7092 /* Need to open a binding contour here because
7093 if there are any cleanups they must be contained here. */
7094 expand_start_bindings (2);
7096 /* Mark the corresponding BLOCK for output in its proper place. */
7097 if (TREE_OPERAND (exp, 2) != 0
7098 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7099 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7101 /* If VARS have not yet been expanded, expand them now. */
7102 while (vars)
7104 if (!DECL_RTL_SET_P (vars))
7105 expand_decl (vars);
7106 expand_decl_init (vars);
7107 vars = TREE_CHAIN (vars);
7110 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7112 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7114 return temp;
7117 case RTL_EXPR:
7118 if (RTL_EXPR_SEQUENCE (exp))
7120 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7121 abort ();
7122 emit_insn (RTL_EXPR_SEQUENCE (exp));
7123 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7125 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7126 free_temps_for_rtl_expr (exp);
7127 return RTL_EXPR_RTL (exp);
7129 case CONSTRUCTOR:
7130 /* If we don't need the result, just ensure we evaluate any
7131 subexpressions. */
7132 if (ignore)
7134 tree elt;
7136 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7137 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7139 return const0_rtx;
7142 /* All elts simple constants => refer to a constant in memory. But
7143 if this is a non-BLKmode mode, let it store a field at a time
7144 since that should make a CONST_INT or CONST_DOUBLE when we
7145 fold. Likewise, if we have a target we can use, it is best to
7146 store directly into the target unless the type is large enough
7147 that memcpy will be used. If we are making an initializer and
7148 all operands are constant, put it in memory as well.
7150 FIXME: Avoid trying to fill vector constructors piece-meal.
7151 Output them with output_constant_def below unless we're sure
7152 they're zeros. This should go away when vector initializers
7153 are treated like VECTOR_CST instead of arrays.
7155 else if ((TREE_STATIC (exp)
7156 && ((mode == BLKmode
7157 && ! (target != 0 && safe_from_p (target, exp, 1)))
7158 || TREE_ADDRESSABLE (exp)
7159 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7160 && (! MOVE_BY_PIECES_P
7161 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7162 TYPE_ALIGN (type)))
7163 && ((TREE_CODE (type) == VECTOR_TYPE
7164 && !is_zeros_p (exp))
7165 || ! mostly_zeros_p (exp)))))
7166 || ((modifier == EXPAND_INITIALIZER
7167 || modifier == EXPAND_CONST_ADDRESS)
7168 && TREE_CONSTANT (exp)))
7170 rtx constructor = output_constant_def (exp, 1);
7172 if (modifier != EXPAND_CONST_ADDRESS
7173 && modifier != EXPAND_INITIALIZER
7174 && modifier != EXPAND_SUM)
7175 constructor = validize_mem (constructor);
7177 return constructor;
7179 else
7181 /* Handle calls that pass values in multiple non-contiguous
7182 locations. The Irix 6 ABI has examples of this. */
7183 if (target == 0 || ! safe_from_p (target, exp, 1)
7184 || GET_CODE (target) == PARALLEL
7185 || modifier == EXPAND_STACK_PARM)
7186 target
7187 = assign_temp (build_qualified_type (type,
7188 (TYPE_QUALS (type)
7189 | (TREE_READONLY (exp)
7190 * TYPE_QUAL_CONST))),
7191 0, TREE_ADDRESSABLE (exp), 1);
7193 store_constructor (exp, target, 0, int_expr_size (exp));
7194 return target;
7197 case INDIRECT_REF:
7199 tree exp1 = TREE_OPERAND (exp, 0);
7200 tree index;
7201 tree string = string_constant (exp1, &index);
7203 /* Try to optimize reads from const strings. */
7204 if (string
7205 && TREE_CODE (string) == STRING_CST
7206 && TREE_CODE (index) == INTEGER_CST
7207 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7208 && GET_MODE_CLASS (mode) == MODE_INT
7209 && GET_MODE_SIZE (mode) == 1
7210 && modifier != EXPAND_WRITE)
7211 return gen_int_mode (TREE_STRING_POINTER (string)
7212 [TREE_INT_CST_LOW (index)], mode);
7214 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7215 op0 = memory_address (mode, op0);
7216 temp = gen_rtx_MEM (mode, op0);
7217 set_mem_attributes (temp, exp, 0);
7219 /* If we are writing to this object and its type is a record with
7220 readonly fields, we must mark it as readonly so it will
7221 conflict with readonly references to those fields. */
7222 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7223 RTX_UNCHANGING_P (temp) = 1;
7225 return temp;
7228 case ARRAY_REF:
7229 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7230 abort ();
7233 tree array = TREE_OPERAND (exp, 0);
7234 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7235 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7236 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7237 HOST_WIDE_INT i;
7239 /* Optimize the special-case of a zero lower bound.
7241 We convert the low_bound to sizetype to avoid some problems
7242 with constant folding. (E.g. suppose the lower bound is 1,
7243 and its mode is QI. Without the conversion, (ARRAY
7244 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7245 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7247 if (! integer_zerop (low_bound))
7248 index = size_diffop (index, convert (sizetype, low_bound));
7250 /* Fold an expression like: "foo"[2].
7251 This is not done in fold so it won't happen inside &.
7252 Don't fold if this is for wide characters since it's too
7253 difficult to do correctly and this is a very rare case. */
7255 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7256 && TREE_CODE (array) == STRING_CST
7257 && TREE_CODE (index) == INTEGER_CST
7258 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7259 && GET_MODE_CLASS (mode) == MODE_INT
7260 && GET_MODE_SIZE (mode) == 1)
7261 return gen_int_mode (TREE_STRING_POINTER (array)
7262 [TREE_INT_CST_LOW (index)], mode);
7264 /* If this is a constant index into a constant array,
7265 just get the value from the array. Handle both the cases when
7266 we have an explicit constructor and when our operand is a variable
7267 that was declared const. */
7269 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7270 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7271 && TREE_CODE (index) == INTEGER_CST
7272 && 0 > compare_tree_int (index,
7273 list_length (CONSTRUCTOR_ELTS
7274 (TREE_OPERAND (exp, 0)))))
7276 tree elem;
7278 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7279 i = TREE_INT_CST_LOW (index);
7280 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7283 if (elem)
7284 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7285 modifier);
7288 else if (optimize >= 1
7289 && modifier != EXPAND_CONST_ADDRESS
7290 && modifier != EXPAND_INITIALIZER
7291 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7292 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7293 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7295 if (TREE_CODE (index) == INTEGER_CST)
7297 tree init = DECL_INITIAL (array);
7299 if (TREE_CODE (init) == CONSTRUCTOR)
7301 tree elem;
7303 for (elem = CONSTRUCTOR_ELTS (init);
7304 (elem
7305 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7306 elem = TREE_CHAIN (elem))
7309 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7310 return expand_expr (fold (TREE_VALUE (elem)), target,
7311 tmode, modifier);
7313 else if (TREE_CODE (init) == STRING_CST
7314 && 0 > compare_tree_int (index,
7315 TREE_STRING_LENGTH (init)))
7317 tree type = TREE_TYPE (TREE_TYPE (init));
7318 enum machine_mode mode = TYPE_MODE (type);
7320 if (GET_MODE_CLASS (mode) == MODE_INT
7321 && GET_MODE_SIZE (mode) == 1)
7322 return gen_int_mode (TREE_STRING_POINTER (init)
7323 [TREE_INT_CST_LOW (index)], mode);
7328 goto normal_inner_ref;
7330 case COMPONENT_REF:
7331 /* If the operand is a CONSTRUCTOR, we can just extract the
7332 appropriate field if it is present. */
7333 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7335 tree elt;
7337 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7338 elt = TREE_CHAIN (elt))
7339 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7340 /* We can normally use the value of the field in the
7341 CONSTRUCTOR. However, if this is a bitfield in
7342 an integral mode that we can fit in a HOST_WIDE_INT,
7343 we must mask only the number of bits in the bitfield,
7344 since this is done implicitly by the constructor. If
7345 the bitfield does not meet either of those conditions,
7346 we can't do this optimization. */
7347 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7348 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7349 == MODE_INT)
7350 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7351 <= HOST_BITS_PER_WIDE_INT))))
7353 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7354 && modifier == EXPAND_STACK_PARM)
7355 target = 0;
7356 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7357 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7359 HOST_WIDE_INT bitsize
7360 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7361 enum machine_mode imode
7362 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7364 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7366 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7367 op0 = expand_and (imode, op0, op1, target);
7369 else
7371 tree count
7372 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7375 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7376 target, 0);
7377 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7378 target, 0);
7382 return op0;
7385 goto normal_inner_ref;
7387 case BIT_FIELD_REF:
7388 case ARRAY_RANGE_REF:
7389 normal_inner_ref:
7391 enum machine_mode mode1;
7392 HOST_WIDE_INT bitsize, bitpos;
7393 tree offset;
7394 int volatilep = 0;
7395 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7396 &mode1, &unsignedp, &volatilep);
7397 rtx orig_op0;
7399 /* If we got back the original object, something is wrong. Perhaps
7400 we are evaluating an expression too early. In any event, don't
7401 infinitely recurse. */
7402 if (tem == exp)
7403 abort ();
7405 /* If TEM's type is a union of variable size, pass TARGET to the inner
7406 computation, since it will need a temporary and TARGET is known
7407 to have to do. This occurs in unchecked conversion in Ada. */
7409 orig_op0 = op0
7410 = expand_expr (tem,
7411 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7412 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7413 != INTEGER_CST)
7414 && modifier != EXPAND_STACK_PARM
7415 ? target : NULL_RTX),
7416 VOIDmode,
7417 (modifier == EXPAND_INITIALIZER
7418 || modifier == EXPAND_CONST_ADDRESS
7419 || modifier == EXPAND_STACK_PARM)
7420 ? modifier : EXPAND_NORMAL);
7422 /* If this is a constant, put it into a register if it is a
7423 legitimate constant and OFFSET is 0 and memory if it isn't. */
7424 if (CONSTANT_P (op0))
7426 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7427 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7428 && offset == 0)
7429 op0 = force_reg (mode, op0);
7430 else
7431 op0 = validize_mem (force_const_mem (mode, op0));
7434 if (offset != 0)
7436 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7437 EXPAND_SUM);
7439 /* If this object is in a register, put it into memory.
7440 This case can't occur in C, but can in Ada if we have
7441 unchecked conversion of an expression from a scalar type to
7442 an array or record type. */
7443 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7444 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7446 /* If the operand is a SAVE_EXPR, we can deal with this by
7447 forcing the SAVE_EXPR into memory. */
7448 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7450 put_var_into_stack (TREE_OPERAND (exp, 0),
7451 /*rescan=*/true);
7452 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7454 else
7456 tree nt
7457 = build_qualified_type (TREE_TYPE (tem),
7458 (TYPE_QUALS (TREE_TYPE (tem))
7459 | TYPE_QUAL_CONST));
7460 rtx memloc = assign_temp (nt, 1, 1, 1);
7462 emit_move_insn (memloc, op0);
7463 op0 = memloc;
7467 if (GET_CODE (op0) != MEM)
7468 abort ();
7470 #ifdef POINTERS_EXTEND_UNSIGNED
7471 if (GET_MODE (offset_rtx) != Pmode)
7472 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7473 #else
7474 if (GET_MODE (offset_rtx) != ptr_mode)
7475 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7476 #endif
7478 /* A constant address in OP0 can have VOIDmode, we must not try
7479 to call force_reg for that case. Avoid that case. */
7480 if (GET_CODE (op0) == MEM
7481 && GET_MODE (op0) == BLKmode
7482 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7483 && bitsize != 0
7484 && (bitpos % bitsize) == 0
7485 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7486 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7488 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7489 bitpos = 0;
7492 op0 = offset_address (op0, offset_rtx,
7493 highest_pow2_factor (offset));
7496 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7497 record its alignment as BIGGEST_ALIGNMENT. */
7498 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7499 && is_aligning_offset (offset, tem))
7500 set_mem_align (op0, BIGGEST_ALIGNMENT);
7502 /* Don't forget about volatility even if this is a bitfield. */
7503 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7505 if (op0 == orig_op0)
7506 op0 = copy_rtx (op0);
7508 MEM_VOLATILE_P (op0) = 1;
7511 /* The following code doesn't handle CONCAT.
7512 Assume only bitpos == 0 can be used for CONCAT, due to
7513 one element arrays having the same mode as its element. */
7514 if (GET_CODE (op0) == CONCAT)
7516 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7517 abort ();
7518 return op0;
7521 /* In cases where an aligned union has an unaligned object
7522 as a field, we might be extracting a BLKmode value from
7523 an integer-mode (e.g., SImode) object. Handle this case
7524 by doing the extract into an object as wide as the field
7525 (which we know to be the width of a basic mode), then
7526 storing into memory, and changing the mode to BLKmode. */
7527 if (mode1 == VOIDmode
7528 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7529 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7530 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7531 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7532 && modifier != EXPAND_CONST_ADDRESS
7533 && modifier != EXPAND_INITIALIZER)
7534 /* If the field isn't aligned enough to fetch as a memref,
7535 fetch it as a bit field. */
7536 || (mode1 != BLKmode
7537 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7538 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7539 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7540 || (bitpos % BITS_PER_UNIT != 0)))
7541 /* If the type and the field are a constant size and the
7542 size of the type isn't the same size as the bitfield,
7543 we must use bitfield operations. */
7544 || (bitsize >= 0
7545 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7546 == INTEGER_CST)
7547 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7548 bitsize)))
7550 enum machine_mode ext_mode = mode;
7552 if (ext_mode == BLKmode
7553 && ! (target != 0 && GET_CODE (op0) == MEM
7554 && GET_CODE (target) == MEM
7555 && bitpos % BITS_PER_UNIT == 0))
7556 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7558 if (ext_mode == BLKmode)
7560 /* In this case, BITPOS must start at a byte boundary and
7561 TARGET, if specified, must be a MEM. */
7562 if (GET_CODE (op0) != MEM
7563 || (target != 0 && GET_CODE (target) != MEM)
7564 || bitpos % BITS_PER_UNIT != 0)
7565 abort ();
7567 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7568 if (target == 0)
7569 target = assign_temp (type, 0, 1, 1);
7571 emit_block_move (target, op0,
7572 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7573 / BITS_PER_UNIT),
7574 (modifier == EXPAND_STACK_PARM
7575 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7577 return target;
7580 op0 = validize_mem (op0);
7582 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7583 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7585 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7586 (modifier == EXPAND_STACK_PARM
7587 ? NULL_RTX : target),
7588 ext_mode, ext_mode,
7589 int_size_in_bytes (TREE_TYPE (tem)));
7591 /* If the result is a record type and BITSIZE is narrower than
7592 the mode of OP0, an integral mode, and this is a big endian
7593 machine, we must put the field into the high-order bits. */
7594 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7595 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7596 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7597 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7598 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7599 - bitsize),
7600 op0, 1);
7602 if (mode == BLKmode)
7604 rtx new = assign_temp (build_qualified_type
7605 ((*lang_hooks.types.type_for_mode)
7606 (ext_mode, 0),
7607 TYPE_QUAL_CONST), 0, 1, 1);
7609 emit_move_insn (new, op0);
7610 op0 = copy_rtx (new);
7611 PUT_MODE (op0, BLKmode);
7612 set_mem_attributes (op0, exp, 1);
7615 return op0;
7618 /* If the result is BLKmode, use that to access the object
7619 now as well. */
7620 if (mode == BLKmode)
7621 mode1 = BLKmode;
7623 /* Get a reference to just this component. */
7624 if (modifier == EXPAND_CONST_ADDRESS
7625 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7626 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7627 else
7628 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7630 if (op0 == orig_op0)
7631 op0 = copy_rtx (op0);
7633 set_mem_attributes (op0, exp, 0);
7634 if (GET_CODE (XEXP (op0, 0)) == REG)
7635 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7637 MEM_VOLATILE_P (op0) |= volatilep;
7638 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7639 || modifier == EXPAND_CONST_ADDRESS
7640 || modifier == EXPAND_INITIALIZER)
7641 return op0;
7642 else if (target == 0)
7643 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7645 convert_move (target, op0, unsignedp);
7646 return target;
7649 case VTABLE_REF:
7651 rtx insn, before = get_last_insn (), vtbl_ref;
7653 /* Evaluate the interior expression. */
7654 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7655 tmode, modifier);
7657 /* Get or create an instruction off which to hang a note. */
7658 if (REG_P (subtarget))
7660 target = subtarget;
7661 insn = get_last_insn ();
7662 if (insn == before)
7663 abort ();
7664 if (! INSN_P (insn))
7665 insn = prev_nonnote_insn (insn);
7667 else
7669 target = gen_reg_rtx (GET_MODE (subtarget));
7670 insn = emit_move_insn (target, subtarget);
7673 /* Collect the data for the note. */
7674 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7675 vtbl_ref = plus_constant (vtbl_ref,
7676 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7677 /* Discard the initial CONST that was added. */
7678 vtbl_ref = XEXP (vtbl_ref, 0);
7680 REG_NOTES (insn)
7681 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7683 return target;
7686 /* Intended for a reference to a buffer of a file-object in Pascal.
7687 But it's not certain that a special tree code will really be
7688 necessary for these. INDIRECT_REF might work for them. */
7689 case BUFFER_REF:
7690 abort ();
7692 case IN_EXPR:
7694 /* Pascal set IN expression.
7696 Algorithm:
7697 rlo = set_low - (set_low%bits_per_word);
7698 the_word = set [ (index - rlo)/bits_per_word ];
7699 bit_index = index % bits_per_word;
7700 bitmask = 1 << bit_index;
7701 return !!(the_word & bitmask); */
7703 tree set = TREE_OPERAND (exp, 0);
7704 tree index = TREE_OPERAND (exp, 1);
7705 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7706 tree set_type = TREE_TYPE (set);
7707 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7708 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7709 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7710 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7711 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7712 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7713 rtx setaddr = XEXP (setval, 0);
7714 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7715 rtx rlow;
7716 rtx diff, quo, rem, addr, bit, result;
7718 /* If domain is empty, answer is no. Likewise if index is constant
7719 and out of bounds. */
7720 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7721 && TREE_CODE (set_low_bound) == INTEGER_CST
7722 && tree_int_cst_lt (set_high_bound, set_low_bound))
7723 || (TREE_CODE (index) == INTEGER_CST
7724 && TREE_CODE (set_low_bound) == INTEGER_CST
7725 && tree_int_cst_lt (index, set_low_bound))
7726 || (TREE_CODE (set_high_bound) == INTEGER_CST
7727 && TREE_CODE (index) == INTEGER_CST
7728 && tree_int_cst_lt (set_high_bound, index))))
7729 return const0_rtx;
7731 if (target == 0)
7732 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7734 /* If we get here, we have to generate the code for both cases
7735 (in range and out of range). */
7737 op0 = gen_label_rtx ();
7738 op1 = gen_label_rtx ();
7740 if (! (GET_CODE (index_val) == CONST_INT
7741 && GET_CODE (lo_r) == CONST_INT))
7742 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7743 GET_MODE (index_val), iunsignedp, op1);
7745 if (! (GET_CODE (index_val) == CONST_INT
7746 && GET_CODE (hi_r) == CONST_INT))
7747 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7748 GET_MODE (index_val), iunsignedp, op1);
7750 /* Calculate the element number of bit zero in the first word
7751 of the set. */
7752 if (GET_CODE (lo_r) == CONST_INT)
7753 rlow = GEN_INT (INTVAL (lo_r)
7754 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7755 else
7756 rlow = expand_binop (index_mode, and_optab, lo_r,
7757 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7758 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7760 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7761 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7763 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7764 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7765 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7766 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7768 addr = memory_address (byte_mode,
7769 expand_binop (index_mode, add_optab, diff,
7770 setaddr, NULL_RTX, iunsignedp,
7771 OPTAB_LIB_WIDEN));
7773 /* Extract the bit we want to examine. */
7774 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7775 gen_rtx_MEM (byte_mode, addr),
7776 make_tree (TREE_TYPE (index), rem),
7777 NULL_RTX, 1);
7778 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7779 GET_MODE (target) == byte_mode ? target : 0,
7780 1, OPTAB_LIB_WIDEN);
7782 if (result != target)
7783 convert_move (target, result, 1);
7785 /* Output the code to handle the out-of-range case. */
7786 emit_jump (op0);
7787 emit_label (op1);
7788 emit_move_insn (target, const0_rtx);
7789 emit_label (op0);
7790 return target;
7793 case WITH_CLEANUP_EXPR:
7794 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7796 WITH_CLEANUP_EXPR_RTL (exp)
7797 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7798 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7799 CLEANUP_EH_ONLY (exp));
7801 /* That's it for this cleanup. */
7802 TREE_OPERAND (exp, 1) = 0;
7804 return WITH_CLEANUP_EXPR_RTL (exp);
7806 case CLEANUP_POINT_EXPR:
7808 /* Start a new binding layer that will keep track of all cleanup
7809 actions to be performed. */
7810 expand_start_bindings (2);
7812 target_temp_slot_level = temp_slot_level;
7814 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7815 /* If we're going to use this value, load it up now. */
7816 if (! ignore)
7817 op0 = force_not_mem (op0);
7818 preserve_temp_slots (op0);
7819 expand_end_bindings (NULL_TREE, 0, 0);
7821 return op0;
7823 case CALL_EXPR:
7824 /* Check for a built-in function. */
7825 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7826 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7827 == FUNCTION_DECL)
7828 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7830 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7831 == BUILT_IN_FRONTEND)
7832 return (*lang_hooks.expand_expr) (exp, original_target,
7833 tmode, modifier);
7834 else
7835 return expand_builtin (exp, target, subtarget, tmode, ignore);
7838 return expand_call (exp, target, ignore);
7840 case NON_LVALUE_EXPR:
7841 case NOP_EXPR:
7842 case CONVERT_EXPR:
7843 case REFERENCE_EXPR:
7844 if (TREE_OPERAND (exp, 0) == error_mark_node)
7845 return const0_rtx;
7847 if (TREE_CODE (type) == UNION_TYPE)
7849 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7851 /* If both input and output are BLKmode, this conversion isn't doing
7852 anything except possibly changing memory attribute. */
7853 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7855 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7856 modifier);
7858 result = copy_rtx (result);
7859 set_mem_attributes (result, exp, 0);
7860 return result;
7863 if (target == 0)
7864 target = assign_temp (type, 0, 1, 1);
7866 if (GET_CODE (target) == MEM)
7867 /* Store data into beginning of memory target. */
7868 store_expr (TREE_OPERAND (exp, 0),
7869 adjust_address (target, TYPE_MODE (valtype), 0),
7870 modifier == EXPAND_STACK_PARM ? 2 : 0);
7872 else if (GET_CODE (target) == REG)
7873 /* Store this field into a union of the proper type. */
7874 store_field (target,
7875 MIN ((int_size_in_bytes (TREE_TYPE
7876 (TREE_OPERAND (exp, 0)))
7877 * BITS_PER_UNIT),
7878 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7879 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7880 VOIDmode, 0, type, 0);
7881 else
7882 abort ();
7884 /* Return the entire union. */
7885 return target;
7888 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7890 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7891 modifier);
7893 /* If the signedness of the conversion differs and OP0 is
7894 a promoted SUBREG, clear that indication since we now
7895 have to do the proper extension. */
7896 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7897 && GET_CODE (op0) == SUBREG)
7898 SUBREG_PROMOTED_VAR_P (op0) = 0;
7900 return op0;
7903 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7904 if (GET_MODE (op0) == mode)
7905 return op0;
7907 /* If OP0 is a constant, just convert it into the proper mode. */
7908 if (CONSTANT_P (op0))
7910 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7911 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7913 if (modifier == EXPAND_INITIALIZER)
7914 return simplify_gen_subreg (mode, op0, inner_mode,
7915 subreg_lowpart_offset (mode,
7916 inner_mode));
7917 else
7918 return convert_modes (mode, inner_mode, op0,
7919 TREE_UNSIGNED (inner_type));
7922 if (modifier == EXPAND_INITIALIZER)
7923 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7925 if (target == 0)
7926 return
7927 convert_to_mode (mode, op0,
7928 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7929 else
7930 convert_move (target, op0,
7931 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7932 return target;
7934 case VIEW_CONVERT_EXPR:
7935 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7937 /* If the input and output modes are both the same, we are done.
7938 Otherwise, if neither mode is BLKmode and both are integral and within
7939 a word, we can use gen_lowpart. If neither is true, make sure the
7940 operand is in memory and convert the MEM to the new mode. */
7941 if (TYPE_MODE (type) == GET_MODE (op0))
7943 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7944 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7945 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7946 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7947 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7948 op0 = gen_lowpart (TYPE_MODE (type), op0);
7949 else if (GET_CODE (op0) != MEM)
7951 /* If the operand is not a MEM, force it into memory. Since we
7952 are going to be be changing the mode of the MEM, don't call
7953 force_const_mem for constants because we don't allow pool
7954 constants to change mode. */
7955 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7957 if (TREE_ADDRESSABLE (exp))
7958 abort ();
7960 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7961 target
7962 = assign_stack_temp_for_type
7963 (TYPE_MODE (inner_type),
7964 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7966 emit_move_insn (target, op0);
7967 op0 = target;
7970 /* At this point, OP0 is in the correct mode. If the output type is such
7971 that the operand is known to be aligned, indicate that it is.
7972 Otherwise, we need only be concerned about alignment for non-BLKmode
7973 results. */
7974 if (GET_CODE (op0) == MEM)
7976 op0 = copy_rtx (op0);
7978 if (TYPE_ALIGN_OK (type))
7979 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7980 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7981 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7983 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7984 HOST_WIDE_INT temp_size
7985 = MAX (int_size_in_bytes (inner_type),
7986 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7987 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7988 temp_size, 0, type);
7989 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7991 if (TREE_ADDRESSABLE (exp))
7992 abort ();
7994 if (GET_MODE (op0) == BLKmode)
7995 emit_block_move (new_with_op0_mode, op0,
7996 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7997 (modifier == EXPAND_STACK_PARM
7998 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7999 else
8000 emit_move_insn (new_with_op0_mode, op0);
8002 op0 = new;
8005 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8008 return op0;
8010 case PLUS_EXPR:
8011 this_optab = ! unsignedp && flag_trapv
8012 && (GET_MODE_CLASS (mode) == MODE_INT)
8013 ? addv_optab : add_optab;
8015 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8016 something else, make sure we add the register to the constant and
8017 then to the other thing. This case can occur during strength
8018 reduction and doing it this way will produce better code if the
8019 frame pointer or argument pointer is eliminated.
8021 fold-const.c will ensure that the constant is always in the inner
8022 PLUS_EXPR, so the only case we need to do anything about is if
8023 sp, ap, or fp is our second argument, in which case we must swap
8024 the innermost first argument and our second argument. */
8026 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8027 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8028 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8029 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8030 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8031 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8033 tree t = TREE_OPERAND (exp, 1);
8035 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8036 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8039 /* If the result is to be ptr_mode and we are adding an integer to
8040 something, we might be forming a constant. So try to use
8041 plus_constant. If it produces a sum and we can't accept it,
8042 use force_operand. This allows P = &ARR[const] to generate
8043 efficient code on machines where a SYMBOL_REF is not a valid
8044 address.
8046 If this is an EXPAND_SUM call, always return the sum. */
8047 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8048 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8050 if (modifier == EXPAND_STACK_PARM)
8051 target = 0;
8052 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8053 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8054 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8056 rtx constant_part;
8058 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8059 EXPAND_SUM);
8060 /* Use immed_double_const to ensure that the constant is
8061 truncated according to the mode of OP1, then sign extended
8062 to a HOST_WIDE_INT. Using the constant directly can result
8063 in non-canonical RTL in a 64x32 cross compile. */
8064 constant_part
8065 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8066 (HOST_WIDE_INT) 0,
8067 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8068 op1 = plus_constant (op1, INTVAL (constant_part));
8069 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8070 op1 = force_operand (op1, target);
8071 return op1;
8074 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8075 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8076 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8078 rtx constant_part;
8080 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8081 (modifier == EXPAND_INITIALIZER
8082 ? EXPAND_INITIALIZER : EXPAND_SUM));
8083 if (! CONSTANT_P (op0))
8085 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8086 VOIDmode, modifier);
8087 /* Don't go to both_summands if modifier
8088 says it's not right to return a PLUS. */
8089 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8090 goto binop2;
8091 goto both_summands;
8093 /* Use immed_double_const to ensure that the constant is
8094 truncated according to the mode of OP1, then sign extended
8095 to a HOST_WIDE_INT. Using the constant directly can result
8096 in non-canonical RTL in a 64x32 cross compile. */
8097 constant_part
8098 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8099 (HOST_WIDE_INT) 0,
8100 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8101 op0 = plus_constant (op0, INTVAL (constant_part));
8102 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8103 op0 = force_operand (op0, target);
8104 return op0;
8108 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8109 subtarget = 0;
8111 /* No sense saving up arithmetic to be done
8112 if it's all in the wrong mode to form part of an address.
8113 And force_operand won't know whether to sign-extend or
8114 zero-extend. */
8115 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8116 || mode != ptr_mode)
8118 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8119 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8120 if (op0 == const0_rtx)
8121 return op1;
8122 if (op1 == const0_rtx)
8123 return op0;
8124 goto binop2;
8127 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8128 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8130 /* We come here from MINUS_EXPR when the second operand is a
8131 constant. */
8132 both_summands:
8133 /* Make sure any term that's a sum with a constant comes last. */
8134 if (GET_CODE (op0) == PLUS
8135 && CONSTANT_P (XEXP (op0, 1)))
8137 temp = op0;
8138 op0 = op1;
8139 op1 = temp;
8141 /* If adding to a sum including a constant,
8142 associate it to put the constant outside. */
8143 if (GET_CODE (op1) == PLUS
8144 && CONSTANT_P (XEXP (op1, 1)))
8146 rtx constant_term = const0_rtx;
8148 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8149 if (temp != 0)
8150 op0 = temp;
8151 /* Ensure that MULT comes first if there is one. */
8152 else if (GET_CODE (op0) == MULT)
8153 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8154 else
8155 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8157 /* Let's also eliminate constants from op0 if possible. */
8158 op0 = eliminate_constant_term (op0, &constant_term);
8160 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8161 their sum should be a constant. Form it into OP1, since the
8162 result we want will then be OP0 + OP1. */
8164 temp = simplify_binary_operation (PLUS, mode, constant_term,
8165 XEXP (op1, 1));
8166 if (temp != 0)
8167 op1 = temp;
8168 else
8169 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8172 /* Put a constant term last and put a multiplication first. */
8173 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8174 temp = op1, op1 = op0, op0 = temp;
8176 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8177 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8179 case MINUS_EXPR:
8180 /* For initializers, we are allowed to return a MINUS of two
8181 symbolic constants. Here we handle all cases when both operands
8182 are constant. */
8183 /* Handle difference of two symbolic constants,
8184 for the sake of an initializer. */
8185 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8186 && really_constant_p (TREE_OPERAND (exp, 0))
8187 && really_constant_p (TREE_OPERAND (exp, 1)))
8189 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8190 modifier);
8191 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8192 modifier);
8194 /* If the last operand is a CONST_INT, use plus_constant of
8195 the negated constant. Else make the MINUS. */
8196 if (GET_CODE (op1) == CONST_INT)
8197 return plus_constant (op0, - INTVAL (op1));
8198 else
8199 return gen_rtx_MINUS (mode, op0, op1);
8202 this_optab = ! unsignedp && flag_trapv
8203 && (GET_MODE_CLASS(mode) == MODE_INT)
8204 ? subv_optab : sub_optab;
8206 /* No sense saving up arithmetic to be done
8207 if it's all in the wrong mode to form part of an address.
8208 And force_operand won't know whether to sign-extend or
8209 zero-extend. */
8210 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8211 || mode != ptr_mode)
8212 goto binop;
8214 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8215 subtarget = 0;
8217 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8218 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8220 /* Convert A - const to A + (-const). */
8221 if (GET_CODE (op1) == CONST_INT)
8223 op1 = negate_rtx (mode, op1);
8224 goto both_summands;
8227 goto binop2;
8229 case MULT_EXPR:
8230 /* If first operand is constant, swap them.
8231 Thus the following special case checks need only
8232 check the second operand. */
8233 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8235 tree t1 = TREE_OPERAND (exp, 0);
8236 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8237 TREE_OPERAND (exp, 1) = t1;
8240 /* Attempt to return something suitable for generating an
8241 indexed address, for machines that support that. */
8243 if (modifier == EXPAND_SUM && mode == ptr_mode
8244 && host_integerp (TREE_OPERAND (exp, 1), 0))
8246 tree exp1 = TREE_OPERAND (exp, 1);
8248 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8249 EXPAND_SUM);
8251 /* If we knew for certain that this is arithmetic for an array
8252 reference, and we knew the bounds of the array, then we could
8253 apply the distributive law across (PLUS X C) for constant C.
8254 Without such knowledge, we risk overflowing the computation
8255 when both X and C are large, but X+C isn't. */
8256 /* ??? Could perhaps special-case EXP being unsigned and C being
8257 positive. In that case we are certain that X+C is no smaller
8258 than X and so the transformed expression will overflow iff the
8259 original would have. */
8261 if (GET_CODE (op0) != REG)
8262 op0 = force_operand (op0, NULL_RTX);
8263 if (GET_CODE (op0) != REG)
8264 op0 = copy_to_mode_reg (mode, op0);
8266 return gen_rtx_MULT (mode, op0,
8267 gen_int_mode (tree_low_cst (exp1, 0),
8268 TYPE_MODE (TREE_TYPE (exp1))));
8271 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8272 subtarget = 0;
8274 if (modifier == EXPAND_STACK_PARM)
8275 target = 0;
8277 /* Check for multiplying things that have been extended
8278 from a narrower type. If this machine supports multiplying
8279 in that narrower type with a result in the desired type,
8280 do it that way, and avoid the explicit type-conversion. */
8281 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8282 && TREE_CODE (type) == INTEGER_TYPE
8283 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8284 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8285 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8286 && int_fits_type_p (TREE_OPERAND (exp, 1),
8287 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8288 /* Don't use a widening multiply if a shift will do. */
8289 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8290 > HOST_BITS_PER_WIDE_INT)
8291 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8293 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8294 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8296 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8297 /* If both operands are extended, they must either both
8298 be zero-extended or both be sign-extended. */
8299 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8301 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8303 enum machine_mode innermode
8304 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8305 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8306 ? smul_widen_optab : umul_widen_optab);
8307 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8308 ? umul_widen_optab : smul_widen_optab);
8309 if (mode == GET_MODE_WIDER_MODE (innermode))
8311 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8313 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8314 NULL_RTX, VOIDmode, 0);
8315 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8316 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8317 VOIDmode, 0);
8318 else
8319 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8320 NULL_RTX, VOIDmode, 0);
8321 goto binop2;
8323 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8324 && innermode == word_mode)
8326 rtx htem;
8327 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8328 NULL_RTX, VOIDmode, 0);
8329 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8330 op1 = convert_modes (innermode, mode,
8331 expand_expr (TREE_OPERAND (exp, 1),
8332 NULL_RTX, VOIDmode, 0),
8333 unsignedp);
8334 else
8335 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8336 NULL_RTX, VOIDmode, 0);
8337 temp = expand_binop (mode, other_optab, op0, op1, target,
8338 unsignedp, OPTAB_LIB_WIDEN);
8339 htem = expand_mult_highpart_adjust (innermode,
8340 gen_highpart (innermode, temp),
8341 op0, op1,
8342 gen_highpart (innermode, temp),
8343 unsignedp);
8344 emit_move_insn (gen_highpart (innermode, temp), htem);
8345 return temp;
8349 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8350 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8351 return expand_mult (mode, op0, op1, target, unsignedp);
8353 case TRUNC_DIV_EXPR:
8354 case FLOOR_DIV_EXPR:
8355 case CEIL_DIV_EXPR:
8356 case ROUND_DIV_EXPR:
8357 case EXACT_DIV_EXPR:
8358 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8359 subtarget = 0;
8360 if (modifier == EXPAND_STACK_PARM)
8361 target = 0;
8362 /* Possible optimization: compute the dividend with EXPAND_SUM
8363 then if the divisor is constant can optimize the case
8364 where some terms of the dividend have coeffs divisible by it. */
8365 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8366 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8367 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8369 case RDIV_EXPR:
8370 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8371 expensive divide. If not, combine will rebuild the original
8372 computation. */
8373 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8374 && TREE_CODE (type) == REAL_TYPE
8375 && !real_onep (TREE_OPERAND (exp, 0)))
8376 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8377 build (RDIV_EXPR, type,
8378 build_real (type, dconst1),
8379 TREE_OPERAND (exp, 1))),
8380 target, tmode, modifier);
8381 this_optab = sdiv_optab;
8382 goto binop;
8384 case TRUNC_MOD_EXPR:
8385 case FLOOR_MOD_EXPR:
8386 case CEIL_MOD_EXPR:
8387 case ROUND_MOD_EXPR:
8388 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8389 subtarget = 0;
8390 if (modifier == EXPAND_STACK_PARM)
8391 target = 0;
8392 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8393 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8394 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8396 case FIX_ROUND_EXPR:
8397 case FIX_FLOOR_EXPR:
8398 case FIX_CEIL_EXPR:
8399 abort (); /* Not used for C. */
8401 case FIX_TRUNC_EXPR:
8402 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403 if (target == 0 || modifier == EXPAND_STACK_PARM)
8404 target = gen_reg_rtx (mode);
8405 expand_fix (target, op0, unsignedp);
8406 return target;
8408 case FLOAT_EXPR:
8409 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8410 if (target == 0 || modifier == EXPAND_STACK_PARM)
8411 target = gen_reg_rtx (mode);
8412 /* expand_float can't figure out what to do if FROM has VOIDmode.
8413 So give it the correct mode. With -O, cse will optimize this. */
8414 if (GET_MODE (op0) == VOIDmode)
8415 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8416 op0);
8417 expand_float (target, op0,
8418 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8419 return target;
8421 case NEGATE_EXPR:
8422 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8423 if (modifier == EXPAND_STACK_PARM)
8424 target = 0;
8425 temp = expand_unop (mode,
8426 ! unsignedp && flag_trapv
8427 && (GET_MODE_CLASS(mode) == MODE_INT)
8428 ? negv_optab : neg_optab, op0, target, 0);
8429 if (temp == 0)
8430 abort ();
8431 return temp;
8433 case ABS_EXPR:
8434 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8435 if (modifier == EXPAND_STACK_PARM)
8436 target = 0;
8438 /* Handle complex values specially. */
8439 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8440 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8441 return expand_complex_abs (mode, op0, target, unsignedp);
8443 /* Unsigned abs is simply the operand. Testing here means we don't
8444 risk generating incorrect code below. */
8445 if (TREE_UNSIGNED (type))
8446 return op0;
8448 return expand_abs (mode, op0, target, unsignedp,
8449 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8451 case MAX_EXPR:
8452 case MIN_EXPR:
8453 target = original_target;
8454 if (target == 0
8455 || modifier == EXPAND_STACK_PARM
8456 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8457 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8458 || GET_MODE (target) != mode
8459 || (GET_CODE (target) == REG
8460 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8461 target = gen_reg_rtx (mode);
8462 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8463 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8465 /* First try to do it with a special MIN or MAX instruction.
8466 If that does not win, use a conditional jump to select the proper
8467 value. */
8468 this_optab = (TREE_UNSIGNED (type)
8469 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8470 : (code == MIN_EXPR ? smin_optab : smax_optab));
8472 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8473 OPTAB_WIDEN);
8474 if (temp != 0)
8475 return temp;
8477 /* At this point, a MEM target is no longer useful; we will get better
8478 code without it. */
8480 if (GET_CODE (target) == MEM)
8481 target = gen_reg_rtx (mode);
8483 if (target != op0)
8484 emit_move_insn (target, op0);
8486 op0 = gen_label_rtx ();
8488 /* If this mode is an integer too wide to compare properly,
8489 compare word by word. Rely on cse to optimize constant cases. */
8490 if (GET_MODE_CLASS (mode) == MODE_INT
8491 && ! can_compare_p (GE, mode, ccp_jump))
8493 if (code == MAX_EXPR)
8494 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8495 target, op1, NULL_RTX, op0);
8496 else
8497 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8498 op1, target, NULL_RTX, op0);
8500 else
8502 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8503 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8504 unsignedp, mode, NULL_RTX, NULL_RTX,
8505 op0);
8507 emit_move_insn (target, op1);
8508 emit_label (op0);
8509 return target;
8511 case BIT_NOT_EXPR:
8512 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8513 if (modifier == EXPAND_STACK_PARM)
8514 target = 0;
8515 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8516 if (temp == 0)
8517 abort ();
8518 return temp;
8520 case FFS_EXPR:
8521 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8522 if (modifier == EXPAND_STACK_PARM)
8523 target = 0;
8524 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8525 if (temp == 0)
8526 abort ();
8527 return temp;
8529 case CLZ_EXPR:
8530 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8531 temp = expand_unop (mode, clz_optab, op0, target, 1);
8532 if (temp == 0)
8533 abort ();
8534 return temp;
8536 case CTZ_EXPR:
8537 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8538 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8539 if (temp == 0)
8540 abort ();
8541 return temp;
8543 case POPCOUNT_EXPR:
8544 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8545 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8546 if (temp == 0)
8547 abort ();
8548 return temp;
8550 case PARITY_EXPR:
8551 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8552 temp = expand_unop (mode, parity_optab, op0, target, 1);
8553 if (temp == 0)
8554 abort ();
8555 return temp;
8557 /* ??? Can optimize bitwise operations with one arg constant.
8558 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8559 and (a bitwise1 b) bitwise2 b (etc)
8560 but that is probably not worth while. */
8562 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8563 boolean values when we want in all cases to compute both of them. In
8564 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8565 as actual zero-or-1 values and then bitwise anding. In cases where
8566 there cannot be any side effects, better code would be made by
8567 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8568 how to recognize those cases. */
8570 case TRUTH_AND_EXPR:
8571 case BIT_AND_EXPR:
8572 this_optab = and_optab;
8573 goto binop;
8575 case TRUTH_OR_EXPR:
8576 case BIT_IOR_EXPR:
8577 this_optab = ior_optab;
8578 goto binop;
8580 case TRUTH_XOR_EXPR:
8581 case BIT_XOR_EXPR:
8582 this_optab = xor_optab;
8583 goto binop;
8585 case LSHIFT_EXPR:
8586 case RSHIFT_EXPR:
8587 case LROTATE_EXPR:
8588 case RROTATE_EXPR:
8589 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8590 subtarget = 0;
8591 if (modifier == EXPAND_STACK_PARM)
8592 target = 0;
8593 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8594 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8595 unsignedp);
8597 /* Could determine the answer when only additive constants differ. Also,
8598 the addition of one can be handled by changing the condition. */
8599 case LT_EXPR:
8600 case LE_EXPR:
8601 case GT_EXPR:
8602 case GE_EXPR:
8603 case EQ_EXPR:
8604 case NE_EXPR:
8605 case UNORDERED_EXPR:
8606 case ORDERED_EXPR:
8607 case UNLT_EXPR:
8608 case UNLE_EXPR:
8609 case UNGT_EXPR:
8610 case UNGE_EXPR:
8611 case UNEQ_EXPR:
8612 temp = do_store_flag (exp,
8613 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8614 tmode != VOIDmode ? tmode : mode, 0);
8615 if (temp != 0)
8616 return temp;
8618 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8619 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8620 && original_target
8621 && GET_CODE (original_target) == REG
8622 && (GET_MODE (original_target)
8623 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8625 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8626 VOIDmode, 0);
8628 /* If temp is constant, we can just compute the result. */
8629 if (GET_CODE (temp) == CONST_INT)
8631 if (INTVAL (temp) != 0)
8632 emit_move_insn (target, const1_rtx);
8633 else
8634 emit_move_insn (target, const0_rtx);
8636 return target;
8639 if (temp != original_target)
8641 enum machine_mode mode1 = GET_MODE (temp);
8642 if (mode1 == VOIDmode)
8643 mode1 = tmode != VOIDmode ? tmode : mode;
8645 temp = copy_to_mode_reg (mode1, temp);
8648 op1 = gen_label_rtx ();
8649 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8650 GET_MODE (temp), unsignedp, op1);
8651 emit_move_insn (temp, const1_rtx);
8652 emit_label (op1);
8653 return temp;
8656 /* If no set-flag instruction, must generate a conditional
8657 store into a temporary variable. Drop through
8658 and handle this like && and ||. */
8660 case TRUTH_ANDIF_EXPR:
8661 case TRUTH_ORIF_EXPR:
8662 if (! ignore
8663 && (target == 0
8664 || modifier == EXPAND_STACK_PARM
8665 || ! safe_from_p (target, exp, 1)
8666 /* Make sure we don't have a hard reg (such as function's return
8667 value) live across basic blocks, if not optimizing. */
8668 || (!optimize && GET_CODE (target) == REG
8669 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8670 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8672 if (target)
8673 emit_clr_insn (target);
8675 op1 = gen_label_rtx ();
8676 jumpifnot (exp, op1);
8678 if (target)
8679 emit_0_to_1_insn (target);
8681 emit_label (op1);
8682 return ignore ? const0_rtx : target;
8684 case TRUTH_NOT_EXPR:
8685 if (modifier == EXPAND_STACK_PARM)
8686 target = 0;
8687 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8688 /* The parser is careful to generate TRUTH_NOT_EXPR
8689 only with operands that are always zero or one. */
8690 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8691 target, 1, OPTAB_LIB_WIDEN);
8692 if (temp == 0)
8693 abort ();
8694 return temp;
8696 case COMPOUND_EXPR:
8697 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8698 emit_queue ();
8699 return expand_expr (TREE_OPERAND (exp, 1),
8700 (ignore ? const0_rtx : target),
8701 VOIDmode, modifier);
8703 case COND_EXPR:
8704 /* If we would have a "singleton" (see below) were it not for a
8705 conversion in each arm, bring that conversion back out. */
8706 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8707 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8708 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8709 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8711 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8712 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8714 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8715 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8716 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8717 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8718 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8719 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8720 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8721 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8722 return expand_expr (build1 (NOP_EXPR, type,
8723 build (COND_EXPR, TREE_TYPE (iftrue),
8724 TREE_OPERAND (exp, 0),
8725 iftrue, iffalse)),
8726 target, tmode, modifier);
8730 /* Note that COND_EXPRs whose type is a structure or union
8731 are required to be constructed to contain assignments of
8732 a temporary variable, so that we can evaluate them here
8733 for side effect only. If type is void, we must do likewise. */
8735 /* If an arm of the branch requires a cleanup,
8736 only that cleanup is performed. */
8738 tree singleton = 0;
8739 tree binary_op = 0, unary_op = 0;
8741 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8742 convert it to our mode, if necessary. */
8743 if (integer_onep (TREE_OPERAND (exp, 1))
8744 && integer_zerop (TREE_OPERAND (exp, 2))
8745 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8747 if (ignore)
8749 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8750 modifier);
8751 return const0_rtx;
8754 if (modifier == EXPAND_STACK_PARM)
8755 target = 0;
8756 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8757 if (GET_MODE (op0) == mode)
8758 return op0;
8760 if (target == 0)
8761 target = gen_reg_rtx (mode);
8762 convert_move (target, op0, unsignedp);
8763 return target;
8766 /* Check for X ? A + B : A. If we have this, we can copy A to the
8767 output and conditionally add B. Similarly for unary operations.
8768 Don't do this if X has side-effects because those side effects
8769 might affect A or B and the "?" operation is a sequence point in
8770 ANSI. (operand_equal_p tests for side effects.) */
8772 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8773 && operand_equal_p (TREE_OPERAND (exp, 2),
8774 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8775 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8776 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8777 && operand_equal_p (TREE_OPERAND (exp, 1),
8778 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8779 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8780 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8781 && operand_equal_p (TREE_OPERAND (exp, 2),
8782 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8783 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8784 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8785 && operand_equal_p (TREE_OPERAND (exp, 1),
8786 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8787 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8789 /* If we are not to produce a result, we have no target. Otherwise,
8790 if a target was specified use it; it will not be used as an
8791 intermediate target unless it is safe. If no target, use a
8792 temporary. */
8794 if (ignore)
8795 temp = 0;
8796 else if (modifier == EXPAND_STACK_PARM)
8797 temp = assign_temp (type, 0, 0, 1);
8798 else if (original_target
8799 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8800 || (singleton && GET_CODE (original_target) == REG
8801 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8802 && original_target == var_rtx (singleton)))
8803 && GET_MODE (original_target) == mode
8804 #ifdef HAVE_conditional_move
8805 && (! can_conditionally_move_p (mode)
8806 || GET_CODE (original_target) == REG
8807 || TREE_ADDRESSABLE (type))
8808 #endif
8809 && (GET_CODE (original_target) != MEM
8810 || TREE_ADDRESSABLE (type)))
8811 temp = original_target;
8812 else if (TREE_ADDRESSABLE (type))
8813 abort ();
8814 else
8815 temp = assign_temp (type, 0, 0, 1);
8817 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8818 do the test of X as a store-flag operation, do this as
8819 A + ((X != 0) << log C). Similarly for other simple binary
8820 operators. Only do for C == 1 if BRANCH_COST is low. */
8821 if (temp && singleton && binary_op
8822 && (TREE_CODE (binary_op) == PLUS_EXPR
8823 || TREE_CODE (binary_op) == MINUS_EXPR
8824 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8825 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8826 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8827 : integer_onep (TREE_OPERAND (binary_op, 1)))
8828 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8830 rtx result;
8831 tree cond;
8832 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8833 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8834 ? addv_optab : add_optab)
8835 : TREE_CODE (binary_op) == MINUS_EXPR
8836 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8837 ? subv_optab : sub_optab)
8838 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8839 : xor_optab);
8841 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8842 if (singleton == TREE_OPERAND (exp, 1))
8843 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8844 else
8845 cond = TREE_OPERAND (exp, 0);
8847 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8848 ? temp : NULL_RTX),
8849 mode, BRANCH_COST <= 1);
8851 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8852 result = expand_shift (LSHIFT_EXPR, mode, result,
8853 build_int_2 (tree_log2
8854 (TREE_OPERAND
8855 (binary_op, 1)),
8857 (safe_from_p (temp, singleton, 1)
8858 ? temp : NULL_RTX), 0);
8860 if (result)
8862 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8863 return expand_binop (mode, boptab, op1, result, temp,
8864 unsignedp, OPTAB_LIB_WIDEN);
8868 do_pending_stack_adjust ();
8869 NO_DEFER_POP;
8870 op0 = gen_label_rtx ();
8872 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8874 if (temp != 0)
8876 /* If the target conflicts with the other operand of the
8877 binary op, we can't use it. Also, we can't use the target
8878 if it is a hard register, because evaluating the condition
8879 might clobber it. */
8880 if ((binary_op
8881 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8882 || (GET_CODE (temp) == REG
8883 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8884 temp = gen_reg_rtx (mode);
8885 store_expr (singleton, temp,
8886 modifier == EXPAND_STACK_PARM ? 2 : 0);
8888 else
8889 expand_expr (singleton,
8890 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8891 if (singleton == TREE_OPERAND (exp, 1))
8892 jumpif (TREE_OPERAND (exp, 0), op0);
8893 else
8894 jumpifnot (TREE_OPERAND (exp, 0), op0);
8896 start_cleanup_deferral ();
8897 if (binary_op && temp == 0)
8898 /* Just touch the other operand. */
8899 expand_expr (TREE_OPERAND (binary_op, 1),
8900 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8901 else if (binary_op)
8902 store_expr (build (TREE_CODE (binary_op), type,
8903 make_tree (type, temp),
8904 TREE_OPERAND (binary_op, 1)),
8905 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8906 else
8907 store_expr (build1 (TREE_CODE (unary_op), type,
8908 make_tree (type, temp)),
8909 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8910 op1 = op0;
8912 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8913 comparison operator. If we have one of these cases, set the
8914 output to A, branch on A (cse will merge these two references),
8915 then set the output to FOO. */
8916 else if (temp
8917 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8918 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8919 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8920 TREE_OPERAND (exp, 1), 0)
8921 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8922 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8923 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8925 if (GET_CODE (temp) == REG
8926 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8927 temp = gen_reg_rtx (mode);
8928 store_expr (TREE_OPERAND (exp, 1), temp,
8929 modifier == EXPAND_STACK_PARM ? 2 : 0);
8930 jumpif (TREE_OPERAND (exp, 0), op0);
8932 start_cleanup_deferral ();
8933 store_expr (TREE_OPERAND (exp, 2), temp,
8934 modifier == EXPAND_STACK_PARM ? 2 : 0);
8935 op1 = op0;
8937 else if (temp
8938 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8939 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8940 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8941 TREE_OPERAND (exp, 2), 0)
8942 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8943 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8944 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8946 if (GET_CODE (temp) == REG
8947 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8948 temp = gen_reg_rtx (mode);
8949 store_expr (TREE_OPERAND (exp, 2), temp,
8950 modifier == EXPAND_STACK_PARM ? 2 : 0);
8951 jumpifnot (TREE_OPERAND (exp, 0), op0);
8953 start_cleanup_deferral ();
8954 store_expr (TREE_OPERAND (exp, 1), temp,
8955 modifier == EXPAND_STACK_PARM ? 2 : 0);
8956 op1 = op0;
8958 else
8960 op1 = gen_label_rtx ();
8961 jumpifnot (TREE_OPERAND (exp, 0), op0);
8963 start_cleanup_deferral ();
8965 /* One branch of the cond can be void, if it never returns. For
8966 example A ? throw : E */
8967 if (temp != 0
8968 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8969 store_expr (TREE_OPERAND (exp, 1), temp,
8970 modifier == EXPAND_STACK_PARM ? 2 : 0);
8971 else
8972 expand_expr (TREE_OPERAND (exp, 1),
8973 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8974 end_cleanup_deferral ();
8975 emit_queue ();
8976 emit_jump_insn (gen_jump (op1));
8977 emit_barrier ();
8978 emit_label (op0);
8979 start_cleanup_deferral ();
8980 if (temp != 0
8981 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8982 store_expr (TREE_OPERAND (exp, 2), temp,
8983 modifier == EXPAND_STACK_PARM ? 2 : 0);
8984 else
8985 expand_expr (TREE_OPERAND (exp, 2),
8986 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8989 end_cleanup_deferral ();
8991 emit_queue ();
8992 emit_label (op1);
8993 OK_DEFER_POP;
8995 return temp;
8998 case TARGET_EXPR:
9000 /* Something needs to be initialized, but we didn't know
9001 where that thing was when building the tree. For example,
9002 it could be the return value of a function, or a parameter
9003 to a function which lays down in the stack, or a temporary
9004 variable which must be passed by reference.
9006 We guarantee that the expression will either be constructed
9007 or copied into our original target. */
9009 tree slot = TREE_OPERAND (exp, 0);
9010 tree cleanups = NULL_TREE;
9011 tree exp1;
9013 if (TREE_CODE (slot) != VAR_DECL)
9014 abort ();
9016 if (! ignore)
9017 target = original_target;
9019 /* Set this here so that if we get a target that refers to a
9020 register variable that's already been used, put_reg_into_stack
9021 knows that it should fix up those uses. */
9022 TREE_USED (slot) = 1;
9024 if (target == 0)
9026 if (DECL_RTL_SET_P (slot))
9028 target = DECL_RTL (slot);
9029 /* If we have already expanded the slot, so don't do
9030 it again. (mrs) */
9031 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9032 return target;
9034 else
9036 target = assign_temp (type, 2, 0, 1);
9037 /* All temp slots at this level must not conflict. */
9038 preserve_temp_slots (target);
9039 SET_DECL_RTL (slot, target);
9040 if (TREE_ADDRESSABLE (slot))
9041 put_var_into_stack (slot, /*rescan=*/false);
9043 /* Since SLOT is not known to the called function
9044 to belong to its stack frame, we must build an explicit
9045 cleanup. This case occurs when we must build up a reference
9046 to pass the reference as an argument. In this case,
9047 it is very likely that such a reference need not be
9048 built here. */
9050 if (TREE_OPERAND (exp, 2) == 0)
9051 TREE_OPERAND (exp, 2)
9052 = (*lang_hooks.maybe_build_cleanup) (slot);
9053 cleanups = TREE_OPERAND (exp, 2);
9056 else
9058 /* This case does occur, when expanding a parameter which
9059 needs to be constructed on the stack. The target
9060 is the actual stack address that we want to initialize.
9061 The function we call will perform the cleanup in this case. */
9063 /* If we have already assigned it space, use that space,
9064 not target that we were passed in, as our target
9065 parameter is only a hint. */
9066 if (DECL_RTL_SET_P (slot))
9068 target = DECL_RTL (slot);
9069 /* If we have already expanded the slot, so don't do
9070 it again. (mrs) */
9071 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9072 return target;
9074 else
9076 SET_DECL_RTL (slot, target);
9077 /* If we must have an addressable slot, then make sure that
9078 the RTL that we just stored in slot is OK. */
9079 if (TREE_ADDRESSABLE (slot))
9080 put_var_into_stack (slot, /*rescan=*/true);
9084 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9085 /* Mark it as expanded. */
9086 TREE_OPERAND (exp, 1) = NULL_TREE;
9088 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9090 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9092 return target;
9095 case INIT_EXPR:
9097 tree lhs = TREE_OPERAND (exp, 0);
9098 tree rhs = TREE_OPERAND (exp, 1);
9100 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9101 return temp;
9104 case MODIFY_EXPR:
9106 /* If lhs is complex, expand calls in rhs before computing it.
9107 That's so we don't compute a pointer and save it over a
9108 call. If lhs is simple, compute it first so we can give it
9109 as a target if the rhs is just a call. This avoids an
9110 extra temp and copy and that prevents a partial-subsumption
9111 which makes bad code. Actually we could treat
9112 component_ref's of vars like vars. */
9114 tree lhs = TREE_OPERAND (exp, 0);
9115 tree rhs = TREE_OPERAND (exp, 1);
9117 temp = 0;
9119 /* Check for |= or &= of a bitfield of size one into another bitfield
9120 of size 1. In this case, (unless we need the result of the
9121 assignment) we can do this more efficiently with a
9122 test followed by an assignment, if necessary.
9124 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9125 things change so we do, this code should be enhanced to
9126 support it. */
9127 if (ignore
9128 && TREE_CODE (lhs) == COMPONENT_REF
9129 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9130 || TREE_CODE (rhs) == BIT_AND_EXPR)
9131 && TREE_OPERAND (rhs, 0) == lhs
9132 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9133 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9134 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9136 rtx label = gen_label_rtx ();
9138 do_jump (TREE_OPERAND (rhs, 1),
9139 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9140 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9141 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9142 (TREE_CODE (rhs) == BIT_IOR_EXPR
9143 ? integer_one_node
9144 : integer_zero_node)),
9145 0, 0);
9146 do_pending_stack_adjust ();
9147 emit_label (label);
9148 return const0_rtx;
9151 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9153 return temp;
9156 case RETURN_EXPR:
9157 if (!TREE_OPERAND (exp, 0))
9158 expand_null_return ();
9159 else
9160 expand_return (TREE_OPERAND (exp, 0));
9161 return const0_rtx;
9163 case PREINCREMENT_EXPR:
9164 case PREDECREMENT_EXPR:
9165 return expand_increment (exp, 0, ignore);
9167 case POSTINCREMENT_EXPR:
9168 case POSTDECREMENT_EXPR:
9169 /* Faster to treat as pre-increment if result is not used. */
9170 return expand_increment (exp, ! ignore, ignore);
9172 case ADDR_EXPR:
9173 if (modifier == EXPAND_STACK_PARM)
9174 target = 0;
9175 /* Are we taking the address of a nested function? */
9176 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9177 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9178 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9179 && ! TREE_STATIC (exp))
9181 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9182 op0 = force_operand (op0, target);
9184 /* If we are taking the address of something erroneous, just
9185 return a zero. */
9186 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9187 return const0_rtx;
9188 /* If we are taking the address of a constant and are at the
9189 top level, we have to use output_constant_def since we can't
9190 call force_const_mem at top level. */
9191 else if (cfun == 0
9192 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9193 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9194 == 'c')))
9195 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9196 else
9198 /* We make sure to pass const0_rtx down if we came in with
9199 ignore set, to avoid doing the cleanups twice for something. */
9200 op0 = expand_expr (TREE_OPERAND (exp, 0),
9201 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9202 (modifier == EXPAND_INITIALIZER
9203 ? modifier : EXPAND_CONST_ADDRESS));
9205 /* If we are going to ignore the result, OP0 will have been set
9206 to const0_rtx, so just return it. Don't get confused and
9207 think we are taking the address of the constant. */
9208 if (ignore)
9209 return op0;
9211 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9212 clever and returns a REG when given a MEM. */
9213 op0 = protect_from_queue (op0, 1);
9215 /* We would like the object in memory. If it is a constant, we can
9216 have it be statically allocated into memory. For a non-constant,
9217 we need to allocate some memory and store the value into it. */
9219 if (CONSTANT_P (op0))
9220 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9221 op0);
9222 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9223 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9224 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9226 /* If the operand is a SAVE_EXPR, we can deal with this by
9227 forcing the SAVE_EXPR into memory. */
9228 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9230 put_var_into_stack (TREE_OPERAND (exp, 0),
9231 /*rescan=*/true);
9232 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9234 else
9236 /* If this object is in a register, it can't be BLKmode. */
9237 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9238 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9240 if (GET_CODE (op0) == PARALLEL)
9241 /* Handle calls that pass values in multiple
9242 non-contiguous locations. The Irix 6 ABI has examples
9243 of this. */
9244 emit_group_store (memloc, op0,
9245 int_size_in_bytes (inner_type));
9246 else
9247 emit_move_insn (memloc, op0);
9249 op0 = memloc;
9253 if (GET_CODE (op0) != MEM)
9254 abort ();
9256 mark_temp_addr_taken (op0);
9257 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9259 op0 = XEXP (op0, 0);
9260 #ifdef POINTERS_EXTEND_UNSIGNED
9261 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9262 && mode == ptr_mode)
9263 op0 = convert_memory_address (ptr_mode, op0);
9264 #endif
9265 return op0;
9268 /* If OP0 is not aligned as least as much as the type requires, we
9269 need to make a temporary, copy OP0 to it, and take the address of
9270 the temporary. We want to use the alignment of the type, not of
9271 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9272 the test for BLKmode means that can't happen. The test for
9273 BLKmode is because we never make mis-aligned MEMs with
9274 non-BLKmode.
9276 We don't need to do this at all if the machine doesn't have
9277 strict alignment. */
9278 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9279 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9280 > MEM_ALIGN (op0))
9281 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9283 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9284 rtx new;
9286 if (TYPE_ALIGN_OK (inner_type))
9287 abort ();
9289 if (TREE_ADDRESSABLE (inner_type))
9291 /* We can't make a bitwise copy of this object, so fail. */
9292 error ("cannot take the address of an unaligned member");
9293 return const0_rtx;
9296 new = assign_stack_temp_for_type
9297 (TYPE_MODE (inner_type),
9298 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9299 : int_size_in_bytes (inner_type),
9300 1, build_qualified_type (inner_type,
9301 (TYPE_QUALS (inner_type)
9302 | TYPE_QUAL_CONST)));
9304 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9305 (modifier == EXPAND_STACK_PARM
9306 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9308 op0 = new;
9311 op0 = force_operand (XEXP (op0, 0), target);
9314 if (flag_force_addr
9315 && GET_CODE (op0) != REG
9316 && modifier != EXPAND_CONST_ADDRESS
9317 && modifier != EXPAND_INITIALIZER
9318 && modifier != EXPAND_SUM)
9319 op0 = force_reg (Pmode, op0);
9321 if (GET_CODE (op0) == REG
9322 && ! REG_USERVAR_P (op0))
9323 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9325 #ifdef POINTERS_EXTEND_UNSIGNED
9326 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9327 && mode == ptr_mode)
9328 op0 = convert_memory_address (ptr_mode, op0);
9329 #endif
9331 return op0;
9333 case ENTRY_VALUE_EXPR:
9334 abort ();
9336 /* COMPLEX type for Extended Pascal & Fortran */
9337 case COMPLEX_EXPR:
9339 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9340 rtx insns;
9342 /* Get the rtx code of the operands. */
9343 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9344 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9346 if (! target)
9347 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9349 start_sequence ();
9351 /* Move the real (op0) and imaginary (op1) parts to their location. */
9352 emit_move_insn (gen_realpart (mode, target), op0);
9353 emit_move_insn (gen_imagpart (mode, target), op1);
9355 insns = get_insns ();
9356 end_sequence ();
9358 /* Complex construction should appear as a single unit. */
9359 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9360 each with a separate pseudo as destination.
9361 It's not correct for flow to treat them as a unit. */
9362 if (GET_CODE (target) != CONCAT)
9363 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9364 else
9365 emit_insn (insns);
9367 return target;
9370 case REALPART_EXPR:
9371 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9372 return gen_realpart (mode, op0);
9374 case IMAGPART_EXPR:
9375 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9376 return gen_imagpart (mode, op0);
9378 case CONJ_EXPR:
9380 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9381 rtx imag_t;
9382 rtx insns;
9384 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9386 if (! target)
9387 target = gen_reg_rtx (mode);
9389 start_sequence ();
9391 /* Store the realpart and the negated imagpart to target. */
9392 emit_move_insn (gen_realpart (partmode, target),
9393 gen_realpart (partmode, op0));
9395 imag_t = gen_imagpart (partmode, target);
9396 temp = expand_unop (partmode,
9397 ! unsignedp && flag_trapv
9398 && (GET_MODE_CLASS(partmode) == MODE_INT)
9399 ? negv_optab : neg_optab,
9400 gen_imagpart (partmode, op0), imag_t, 0);
9401 if (temp != imag_t)
9402 emit_move_insn (imag_t, temp);
9404 insns = get_insns ();
9405 end_sequence ();
9407 /* Conjugate should appear as a single unit
9408 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9409 each with a separate pseudo as destination.
9410 It's not correct for flow to treat them as a unit. */
9411 if (GET_CODE (target) != CONCAT)
9412 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9413 else
9414 emit_insn (insns);
9416 return target;
9419 case TRY_CATCH_EXPR:
9421 tree handler = TREE_OPERAND (exp, 1);
9423 expand_eh_region_start ();
9425 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9427 expand_eh_region_end_cleanup (handler);
9429 return op0;
9432 case TRY_FINALLY_EXPR:
9434 tree try_block = TREE_OPERAND (exp, 0);
9435 tree finally_block = TREE_OPERAND (exp, 1);
9437 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9439 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9440 is not sufficient, so we cannot expand the block twice.
9441 So we play games with GOTO_SUBROUTINE_EXPR to let us
9442 expand the thing only once. */
9443 /* When not optimizing, we go ahead with this form since
9444 (1) user breakpoints operate more predictably without
9445 code duplication, and
9446 (2) we're not running any of the global optimizers
9447 that would explode in time/space with the highly
9448 connected CFG created by the indirect branching. */
9450 rtx finally_label = gen_label_rtx ();
9451 rtx done_label = gen_label_rtx ();
9452 rtx return_link = gen_reg_rtx (Pmode);
9453 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9454 (tree) finally_label, (tree) return_link);
9455 TREE_SIDE_EFFECTS (cleanup) = 1;
9457 /* Start a new binding layer that will keep track of all cleanup
9458 actions to be performed. */
9459 expand_start_bindings (2);
9460 target_temp_slot_level = temp_slot_level;
9462 expand_decl_cleanup (NULL_TREE, cleanup);
9463 op0 = expand_expr (try_block, target, tmode, modifier);
9465 preserve_temp_slots (op0);
9466 expand_end_bindings (NULL_TREE, 0, 0);
9467 emit_jump (done_label);
9468 emit_label (finally_label);
9469 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9470 emit_indirect_jump (return_link);
9471 emit_label (done_label);
9473 else
9475 expand_start_bindings (2);
9476 target_temp_slot_level = temp_slot_level;
9478 expand_decl_cleanup (NULL_TREE, finally_block);
9479 op0 = expand_expr (try_block, target, tmode, modifier);
9481 preserve_temp_slots (op0);
9482 expand_end_bindings (NULL_TREE, 0, 0);
9485 return op0;
9488 case GOTO_SUBROUTINE_EXPR:
9490 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9491 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9492 rtx return_address = gen_label_rtx ();
9493 emit_move_insn (return_link,
9494 gen_rtx_LABEL_REF (Pmode, return_address));
9495 emit_jump (subr);
9496 emit_label (return_address);
9497 return const0_rtx;
9500 case VA_ARG_EXPR:
9501 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9503 case EXC_PTR_EXPR:
9504 return get_exception_pointer (cfun);
9506 case FDESC_EXPR:
9507 /* Function descriptors are not valid except for as
9508 initialization constants, and should not be expanded. */
9509 abort ();
9511 default:
9512 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9515 /* Here to do an ordinary binary operator, generating an instruction
9516 from the optab already placed in `this_optab'. */
9517 binop:
9518 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9519 subtarget = 0;
9520 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9521 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9522 binop2:
9523 if (modifier == EXPAND_STACK_PARM)
9524 target = 0;
9525 temp = expand_binop (mode, this_optab, op0, op1, target,
9526 unsignedp, OPTAB_LIB_WIDEN);
9527 if (temp == 0)
9528 abort ();
9529 return temp;
9532 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9533 when applied to the address of EXP produces an address known to be
9534 aligned more than BIGGEST_ALIGNMENT. */
9536 static int
9537 is_aligning_offset (offset, exp)
9538 tree offset;
9539 tree exp;
9541 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9542 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9543 || TREE_CODE (offset) == NOP_EXPR
9544 || TREE_CODE (offset) == CONVERT_EXPR
9545 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9546 offset = TREE_OPERAND (offset, 0);
9548 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9549 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9550 if (TREE_CODE (offset) != BIT_AND_EXPR
9551 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9552 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9553 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9554 return 0;
9556 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9557 It must be NEGATE_EXPR. Then strip any more conversions. */
9558 offset = TREE_OPERAND (offset, 0);
9559 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9560 || TREE_CODE (offset) == NOP_EXPR
9561 || TREE_CODE (offset) == CONVERT_EXPR)
9562 offset = TREE_OPERAND (offset, 0);
9564 if (TREE_CODE (offset) != NEGATE_EXPR)
9565 return 0;
9567 offset = TREE_OPERAND (offset, 0);
9568 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9569 || TREE_CODE (offset) == NOP_EXPR
9570 || TREE_CODE (offset) == CONVERT_EXPR)
9571 offset = TREE_OPERAND (offset, 0);
9573 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9574 whose type is the same as EXP. */
9575 return (TREE_CODE (offset) == ADDR_EXPR
9576 && (TREE_OPERAND (offset, 0) == exp
9577 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9578 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9579 == TREE_TYPE (exp)))));
9582 /* Return the tree node if an ARG corresponds to a string constant or zero
9583 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9584 in bytes within the string that ARG is accessing. The type of the
9585 offset will be `sizetype'. */
9587 tree
9588 string_constant (arg, ptr_offset)
9589 tree arg;
9590 tree *ptr_offset;
9592 STRIP_NOPS (arg);
9594 if (TREE_CODE (arg) == ADDR_EXPR
9595 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9597 *ptr_offset = size_zero_node;
9598 return TREE_OPERAND (arg, 0);
9600 else if (TREE_CODE (arg) == PLUS_EXPR)
9602 tree arg0 = TREE_OPERAND (arg, 0);
9603 tree arg1 = TREE_OPERAND (arg, 1);
9605 STRIP_NOPS (arg0);
9606 STRIP_NOPS (arg1);
9608 if (TREE_CODE (arg0) == ADDR_EXPR
9609 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9611 *ptr_offset = convert (sizetype, arg1);
9612 return TREE_OPERAND (arg0, 0);
9614 else if (TREE_CODE (arg1) == ADDR_EXPR
9615 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9617 *ptr_offset = convert (sizetype, arg0);
9618 return TREE_OPERAND (arg1, 0);
9622 return 0;
9625 /* Expand code for a post- or pre- increment or decrement
9626 and return the RTX for the result.
9627 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9629 static rtx
9630 expand_increment (exp, post, ignore)
9631 tree exp;
9632 int post, ignore;
9634 rtx op0, op1;
9635 rtx temp, value;
9636 tree incremented = TREE_OPERAND (exp, 0);
9637 optab this_optab = add_optab;
9638 int icode;
9639 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9640 int op0_is_copy = 0;
9641 int single_insn = 0;
9642 /* 1 means we can't store into OP0 directly,
9643 because it is a subreg narrower than a word,
9644 and we don't dare clobber the rest of the word. */
9645 int bad_subreg = 0;
9647 /* Stabilize any component ref that might need to be
9648 evaluated more than once below. */
9649 if (!post
9650 || TREE_CODE (incremented) == BIT_FIELD_REF
9651 || (TREE_CODE (incremented) == COMPONENT_REF
9652 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9653 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9654 incremented = stabilize_reference (incremented);
9655 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9656 ones into save exprs so that they don't accidentally get evaluated
9657 more than once by the code below. */
9658 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9659 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9660 incremented = save_expr (incremented);
9662 /* Compute the operands as RTX.
9663 Note whether OP0 is the actual lvalue or a copy of it:
9664 I believe it is a copy iff it is a register or subreg
9665 and insns were generated in computing it. */
9667 temp = get_last_insn ();
9668 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9670 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9671 in place but instead must do sign- or zero-extension during assignment,
9672 so we copy it into a new register and let the code below use it as
9673 a copy.
9675 Note that we can safely modify this SUBREG since it is know not to be
9676 shared (it was made by the expand_expr call above). */
9678 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9680 if (post)
9681 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9682 else
9683 bad_subreg = 1;
9685 else if (GET_CODE (op0) == SUBREG
9686 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9688 /* We cannot increment this SUBREG in place. If we are
9689 post-incrementing, get a copy of the old value. Otherwise,
9690 just mark that we cannot increment in place. */
9691 if (post)
9692 op0 = copy_to_reg (op0);
9693 else
9694 bad_subreg = 1;
9697 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9698 && temp != get_last_insn ());
9699 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9701 /* Decide whether incrementing or decrementing. */
9702 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9703 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9704 this_optab = sub_optab;
9706 /* Convert decrement by a constant into a negative increment. */
9707 if (this_optab == sub_optab
9708 && GET_CODE (op1) == CONST_INT)
9710 op1 = GEN_INT (-INTVAL (op1));
9711 this_optab = add_optab;
9714 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9715 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9717 /* For a preincrement, see if we can do this with a single instruction. */
9718 if (!post)
9720 icode = (int) this_optab->handlers[(int) mode].insn_code;
9721 if (icode != (int) CODE_FOR_nothing
9722 /* Make sure that OP0 is valid for operands 0 and 1
9723 of the insn we want to queue. */
9724 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9725 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9726 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9727 single_insn = 1;
9730 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9731 then we cannot just increment OP0. We must therefore contrive to
9732 increment the original value. Then, for postincrement, we can return
9733 OP0 since it is a copy of the old value. For preincrement, expand here
9734 unless we can do it with a single insn.
9736 Likewise if storing directly into OP0 would clobber high bits
9737 we need to preserve (bad_subreg). */
9738 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9740 /* This is the easiest way to increment the value wherever it is.
9741 Problems with multiple evaluation of INCREMENTED are prevented
9742 because either (1) it is a component_ref or preincrement,
9743 in which case it was stabilized above, or (2) it is an array_ref
9744 with constant index in an array in a register, which is
9745 safe to reevaluate. */
9746 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9747 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9748 ? MINUS_EXPR : PLUS_EXPR),
9749 TREE_TYPE (exp),
9750 incremented,
9751 TREE_OPERAND (exp, 1));
9753 while (TREE_CODE (incremented) == NOP_EXPR
9754 || TREE_CODE (incremented) == CONVERT_EXPR)
9756 newexp = convert (TREE_TYPE (incremented), newexp);
9757 incremented = TREE_OPERAND (incremented, 0);
9760 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9761 return post ? op0 : temp;
9764 if (post)
9766 /* We have a true reference to the value in OP0.
9767 If there is an insn to add or subtract in this mode, queue it.
9768 Queueing the increment insn avoids the register shuffling
9769 that often results if we must increment now and first save
9770 the old value for subsequent use. */
9772 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9773 op0 = stabilize (op0);
9774 #endif
9776 icode = (int) this_optab->handlers[(int) mode].insn_code;
9777 if (icode != (int) CODE_FOR_nothing
9778 /* Make sure that OP0 is valid for operands 0 and 1
9779 of the insn we want to queue. */
9780 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9781 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9783 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9784 op1 = force_reg (mode, op1);
9786 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9788 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9790 rtx addr = (general_operand (XEXP (op0, 0), mode)
9791 ? force_reg (Pmode, XEXP (op0, 0))
9792 : copy_to_reg (XEXP (op0, 0)));
9793 rtx temp, result;
9795 op0 = replace_equiv_address (op0, addr);
9796 temp = force_reg (GET_MODE (op0), op0);
9797 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9798 op1 = force_reg (mode, op1);
9800 /* The increment queue is LIFO, thus we have to `queue'
9801 the instructions in reverse order. */
9802 enqueue_insn (op0, gen_move_insn (op0, temp));
9803 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9804 return result;
9808 /* Preincrement, or we can't increment with one simple insn. */
9809 if (post)
9810 /* Save a copy of the value before inc or dec, to return it later. */
9811 temp = value = copy_to_reg (op0);
9812 else
9813 /* Arrange to return the incremented value. */
9814 /* Copy the rtx because expand_binop will protect from the queue,
9815 and the results of that would be invalid for us to return
9816 if our caller does emit_queue before using our result. */
9817 temp = copy_rtx (value = op0);
9819 /* Increment however we can. */
9820 op1 = expand_binop (mode, this_optab, value, op1, op0,
9821 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9823 /* Make sure the value is stored into OP0. */
9824 if (op1 != op0)
9825 emit_move_insn (op0, op1);
9827 return temp;
9830 /* Generate code to calculate EXP using a store-flag instruction
9831 and return an rtx for the result. EXP is either a comparison
9832 or a TRUTH_NOT_EXPR whose operand is a comparison.
9834 If TARGET is nonzero, store the result there if convenient.
9836 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9837 cheap.
9839 Return zero if there is no suitable set-flag instruction
9840 available on this machine.
9842 Once expand_expr has been called on the arguments of the comparison,
9843 we are committed to doing the store flag, since it is not safe to
9844 re-evaluate the expression. We emit the store-flag insn by calling
9845 emit_store_flag, but only expand the arguments if we have a reason
9846 to believe that emit_store_flag will be successful. If we think that
9847 it will, but it isn't, we have to simulate the store-flag with a
9848 set/jump/set sequence. */
9850 static rtx
9851 do_store_flag (exp, target, mode, only_cheap)
9852 tree exp;
9853 rtx target;
9854 enum machine_mode mode;
9855 int only_cheap;
9857 enum rtx_code code;
9858 tree arg0, arg1, type;
9859 tree tem;
9860 enum machine_mode operand_mode;
9861 int invert = 0;
9862 int unsignedp;
9863 rtx op0, op1;
9864 enum insn_code icode;
9865 rtx subtarget = target;
9866 rtx result, label;
9868 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9869 result at the end. We can't simply invert the test since it would
9870 have already been inverted if it were valid. This case occurs for
9871 some floating-point comparisons. */
9873 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9874 invert = 1, exp = TREE_OPERAND (exp, 0);
9876 arg0 = TREE_OPERAND (exp, 0);
9877 arg1 = TREE_OPERAND (exp, 1);
9879 /* Don't crash if the comparison was erroneous. */
9880 if (arg0 == error_mark_node || arg1 == error_mark_node)
9881 return const0_rtx;
9883 type = TREE_TYPE (arg0);
9884 operand_mode = TYPE_MODE (type);
9885 unsignedp = TREE_UNSIGNED (type);
9887 /* We won't bother with BLKmode store-flag operations because it would mean
9888 passing a lot of information to emit_store_flag. */
9889 if (operand_mode == BLKmode)
9890 return 0;
9892 /* We won't bother with store-flag operations involving function pointers
9893 when function pointers must be canonicalized before comparisons. */
9894 #ifdef HAVE_canonicalize_funcptr_for_compare
9895 if (HAVE_canonicalize_funcptr_for_compare
9896 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9897 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9898 == FUNCTION_TYPE))
9899 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9900 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9901 == FUNCTION_TYPE))))
9902 return 0;
9903 #endif
9905 STRIP_NOPS (arg0);
9906 STRIP_NOPS (arg1);
9908 /* Get the rtx comparison code to use. We know that EXP is a comparison
9909 operation of some type. Some comparisons against 1 and -1 can be
9910 converted to comparisons with zero. Do so here so that the tests
9911 below will be aware that we have a comparison with zero. These
9912 tests will not catch constants in the first operand, but constants
9913 are rarely passed as the first operand. */
9915 switch (TREE_CODE (exp))
9917 case EQ_EXPR:
9918 code = EQ;
9919 break;
9920 case NE_EXPR:
9921 code = NE;
9922 break;
9923 case LT_EXPR:
9924 if (integer_onep (arg1))
9925 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9926 else
9927 code = unsignedp ? LTU : LT;
9928 break;
9929 case LE_EXPR:
9930 if (! unsignedp && integer_all_onesp (arg1))
9931 arg1 = integer_zero_node, code = LT;
9932 else
9933 code = unsignedp ? LEU : LE;
9934 break;
9935 case GT_EXPR:
9936 if (! unsignedp && integer_all_onesp (arg1))
9937 arg1 = integer_zero_node, code = GE;
9938 else
9939 code = unsignedp ? GTU : GT;
9940 break;
9941 case GE_EXPR:
9942 if (integer_onep (arg1))
9943 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9944 else
9945 code = unsignedp ? GEU : GE;
9946 break;
9948 case UNORDERED_EXPR:
9949 code = UNORDERED;
9950 break;
9951 case ORDERED_EXPR:
9952 code = ORDERED;
9953 break;
9954 case UNLT_EXPR:
9955 code = UNLT;
9956 break;
9957 case UNLE_EXPR:
9958 code = UNLE;
9959 break;
9960 case UNGT_EXPR:
9961 code = UNGT;
9962 break;
9963 case UNGE_EXPR:
9964 code = UNGE;
9965 break;
9966 case UNEQ_EXPR:
9967 code = UNEQ;
9968 break;
9970 default:
9971 abort ();
9974 /* Put a constant second. */
9975 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9977 tem = arg0; arg0 = arg1; arg1 = tem;
9978 code = swap_condition (code);
9981 /* If this is an equality or inequality test of a single bit, we can
9982 do this by shifting the bit being tested to the low-order bit and
9983 masking the result with the constant 1. If the condition was EQ,
9984 we xor it with 1. This does not require an scc insn and is faster
9985 than an scc insn even if we have it. */
9987 if ((code == NE || code == EQ)
9988 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9989 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9991 tree inner = TREE_OPERAND (arg0, 0);
9992 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9993 int ops_unsignedp;
9995 /* If INNER is a right shift of a constant and it plus BITNUM does
9996 not overflow, adjust BITNUM and INNER. */
9998 if (TREE_CODE (inner) == RSHIFT_EXPR
9999 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10000 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10001 && bitnum < TYPE_PRECISION (type)
10002 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10003 bitnum - TYPE_PRECISION (type)))
10005 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10006 inner = TREE_OPERAND (inner, 0);
10009 /* If we are going to be able to omit the AND below, we must do our
10010 operations as unsigned. If we must use the AND, we have a choice.
10011 Normally unsigned is faster, but for some machines signed is. */
10012 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10013 #ifdef LOAD_EXTEND_OP
10014 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10015 #else
10017 #endif
10020 if (! get_subtarget (subtarget)
10021 || GET_MODE (subtarget) != operand_mode
10022 || ! safe_from_p (subtarget, inner, 1))
10023 subtarget = 0;
10025 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10027 if (bitnum != 0)
10028 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10029 size_int (bitnum), subtarget, ops_unsignedp);
10031 if (GET_MODE (op0) != mode)
10032 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10034 if ((code == EQ && ! invert) || (code == NE && invert))
10035 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10036 ops_unsignedp, OPTAB_LIB_WIDEN);
10038 /* Put the AND last so it can combine with more things. */
10039 if (bitnum != TYPE_PRECISION (type) - 1)
10040 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10042 return op0;
10045 /* Now see if we are likely to be able to do this. Return if not. */
10046 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10047 return 0;
10049 icode = setcc_gen_code[(int) code];
10050 if (icode == CODE_FOR_nothing
10051 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10053 /* We can only do this if it is one of the special cases that
10054 can be handled without an scc insn. */
10055 if ((code == LT && integer_zerop (arg1))
10056 || (! only_cheap && code == GE && integer_zerop (arg1)))
10058 else if (BRANCH_COST >= 0
10059 && ! only_cheap && (code == NE || code == EQ)
10060 && TREE_CODE (type) != REAL_TYPE
10061 && ((abs_optab->handlers[(int) operand_mode].insn_code
10062 != CODE_FOR_nothing)
10063 || (ffs_optab->handlers[(int) operand_mode].insn_code
10064 != CODE_FOR_nothing)))
10066 else
10067 return 0;
10070 if (! get_subtarget (target)
10071 || GET_MODE (subtarget) != operand_mode
10072 || ! safe_from_p (subtarget, arg1, 1))
10073 subtarget = 0;
10075 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10076 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10078 if (target == 0)
10079 target = gen_reg_rtx (mode);
10081 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10082 because, if the emit_store_flag does anything it will succeed and
10083 OP0 and OP1 will not be used subsequently. */
10085 result = emit_store_flag (target, code,
10086 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10087 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10088 operand_mode, unsignedp, 1);
10090 if (result)
10092 if (invert)
10093 result = expand_binop (mode, xor_optab, result, const1_rtx,
10094 result, 0, OPTAB_LIB_WIDEN);
10095 return result;
10098 /* If this failed, we have to do this with set/compare/jump/set code. */
10099 if (GET_CODE (target) != REG
10100 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10101 target = gen_reg_rtx (GET_MODE (target));
10103 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10104 result = compare_from_rtx (op0, op1, code, unsignedp,
10105 operand_mode, NULL_RTX);
10106 if (GET_CODE (result) == CONST_INT)
10107 return (((result == const0_rtx && ! invert)
10108 || (result != const0_rtx && invert))
10109 ? const0_rtx : const1_rtx);
10111 /* The code of RESULT may not match CODE if compare_from_rtx
10112 decided to swap its operands and reverse the original code.
10114 We know that compare_from_rtx returns either a CONST_INT or
10115 a new comparison code, so it is safe to just extract the
10116 code from RESULT. */
10117 code = GET_CODE (result);
10119 label = gen_label_rtx ();
10120 if (bcc_gen_fctn[(int) code] == 0)
10121 abort ();
10123 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10124 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10125 emit_label (label);
10127 return target;
10131 /* Stubs in case we haven't got a casesi insn. */
10132 #ifndef HAVE_casesi
10133 # define HAVE_casesi 0
10134 # define gen_casesi(a, b, c, d, e) (0)
10135 # define CODE_FOR_casesi CODE_FOR_nothing
10136 #endif
10138 /* If the machine does not have a case insn that compares the bounds,
10139 this means extra overhead for dispatch tables, which raises the
10140 threshold for using them. */
10141 #ifndef CASE_VALUES_THRESHOLD
10142 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10143 #endif /* CASE_VALUES_THRESHOLD */
10145 unsigned int
10146 case_values_threshold ()
10148 return CASE_VALUES_THRESHOLD;
10151 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10152 0 otherwise (i.e. if there is no casesi instruction). */
10154 try_casesi (index_type, index_expr, minval, range,
10155 table_label, default_label)
10156 tree index_type, index_expr, minval, range;
10157 rtx table_label ATTRIBUTE_UNUSED;
10158 rtx default_label;
10160 enum machine_mode index_mode = SImode;
10161 int index_bits = GET_MODE_BITSIZE (index_mode);
10162 rtx op1, op2, index;
10163 enum machine_mode op_mode;
10165 if (! HAVE_casesi)
10166 return 0;
10168 /* Convert the index to SImode. */
10169 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10171 enum machine_mode omode = TYPE_MODE (index_type);
10172 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10174 /* We must handle the endpoints in the original mode. */
10175 index_expr = build (MINUS_EXPR, index_type,
10176 index_expr, minval);
10177 minval = integer_zero_node;
10178 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10179 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10180 omode, 1, default_label);
10181 /* Now we can safely truncate. */
10182 index = convert_to_mode (index_mode, index, 0);
10184 else
10186 if (TYPE_MODE (index_type) != index_mode)
10188 index_expr = convert ((*lang_hooks.types.type_for_size)
10189 (index_bits, 0), index_expr);
10190 index_type = TREE_TYPE (index_expr);
10193 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10195 emit_queue ();
10196 index = protect_from_queue (index, 0);
10197 do_pending_stack_adjust ();
10199 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10200 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10201 (index, op_mode))
10202 index = copy_to_mode_reg (op_mode, index);
10204 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10206 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10207 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10208 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10209 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10210 (op1, op_mode))
10211 op1 = copy_to_mode_reg (op_mode, op1);
10213 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10215 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10216 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10217 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10218 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10219 (op2, op_mode))
10220 op2 = copy_to_mode_reg (op_mode, op2);
10222 emit_jump_insn (gen_casesi (index, op1, op2,
10223 table_label, default_label));
10224 return 1;
10227 /* Attempt to generate a tablejump instruction; same concept. */
10228 #ifndef HAVE_tablejump
10229 #define HAVE_tablejump 0
10230 #define gen_tablejump(x, y) (0)
10231 #endif
10233 /* Subroutine of the next function.
10235 INDEX is the value being switched on, with the lowest value
10236 in the table already subtracted.
10237 MODE is its expected mode (needed if INDEX is constant).
10238 RANGE is the length of the jump table.
10239 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10241 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10242 index value is out of range. */
10244 static void
10245 do_tablejump (index, mode, range, table_label, default_label)
10246 rtx index, range, table_label, default_label;
10247 enum machine_mode mode;
10249 rtx temp, vector;
10251 if (INTVAL (range) > cfun->max_jumptable_ents)
10252 cfun->max_jumptable_ents = INTVAL (range);
10254 /* Do an unsigned comparison (in the proper mode) between the index
10255 expression and the value which represents the length of the range.
10256 Since we just finished subtracting the lower bound of the range
10257 from the index expression, this comparison allows us to simultaneously
10258 check that the original index expression value is both greater than
10259 or equal to the minimum value of the range and less than or equal to
10260 the maximum value of the range. */
10262 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10263 default_label);
10265 /* If index is in range, it must fit in Pmode.
10266 Convert to Pmode so we can index with it. */
10267 if (mode != Pmode)
10268 index = convert_to_mode (Pmode, index, 1);
10270 /* Don't let a MEM slip thru, because then INDEX that comes
10271 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10272 and break_out_memory_refs will go to work on it and mess it up. */
10273 #ifdef PIC_CASE_VECTOR_ADDRESS
10274 if (flag_pic && GET_CODE (index) != REG)
10275 index = copy_to_mode_reg (Pmode, index);
10276 #endif
10278 /* If flag_force_addr were to affect this address
10279 it could interfere with the tricky assumptions made
10280 about addresses that contain label-refs,
10281 which may be valid only very near the tablejump itself. */
10282 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10283 GET_MODE_SIZE, because this indicates how large insns are. The other
10284 uses should all be Pmode, because they are addresses. This code
10285 could fail if addresses and insns are not the same size. */
10286 index = gen_rtx_PLUS (Pmode,
10287 gen_rtx_MULT (Pmode, index,
10288 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10289 gen_rtx_LABEL_REF (Pmode, table_label));
10290 #ifdef PIC_CASE_VECTOR_ADDRESS
10291 if (flag_pic)
10292 index = PIC_CASE_VECTOR_ADDRESS (index);
10293 else
10294 #endif
10295 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10296 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10297 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10298 RTX_UNCHANGING_P (vector) = 1;
10299 MEM_NOTRAP_P (vector) = 1;
10300 convert_move (temp, vector, 0);
10302 emit_jump_insn (gen_tablejump (temp, table_label));
10304 /* If we are generating PIC code or if the table is PC-relative, the
10305 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10306 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10307 emit_barrier ();
10311 try_tablejump (index_type, index_expr, minval, range,
10312 table_label, default_label)
10313 tree index_type, index_expr, minval, range;
10314 rtx table_label, default_label;
10316 rtx index;
10318 if (! HAVE_tablejump)
10319 return 0;
10321 index_expr = fold (build (MINUS_EXPR, index_type,
10322 convert (index_type, index_expr),
10323 convert (index_type, minval)));
10324 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10325 emit_queue ();
10326 index = protect_from_queue (index, 0);
10327 do_pending_stack_adjust ();
10329 do_tablejump (index, TYPE_MODE (index_type),
10330 convert_modes (TYPE_MODE (index_type),
10331 TYPE_MODE (TREE_TYPE (range)),
10332 expand_expr (range, NULL_RTX,
10333 VOIDmode, 0),
10334 TREE_UNSIGNED (TREE_TYPE (range))),
10335 table_label, default_label);
10336 return 1;
10339 /* Nonzero if the mode is a valid vector mode for this architecture.
10340 This returns nonzero even if there is no hardware support for the
10341 vector mode, but we can emulate with narrower modes. */
10344 vector_mode_valid_p (mode)
10345 enum machine_mode mode;
10347 enum mode_class class = GET_MODE_CLASS (mode);
10348 enum machine_mode innermode;
10350 /* Doh! What's going on? */
10351 if (class != MODE_VECTOR_INT
10352 && class != MODE_VECTOR_FLOAT)
10353 return 0;
10355 /* Hardware support. Woo hoo! */
10356 if (VECTOR_MODE_SUPPORTED_P (mode))
10357 return 1;
10359 innermode = GET_MODE_INNER (mode);
10361 /* We should probably return 1 if requesting V4DI and we have no DI,
10362 but we have V2DI, but this is probably very unlikely. */
10364 /* If we have support for the inner mode, we can safely emulate it.
10365 We may not have V2DI, but me can emulate with a pair of DIs. */
10366 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10369 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10370 static rtx
10371 const_vector_from_tree (exp)
10372 tree exp;
10374 rtvec v;
10375 int units, i;
10376 tree link, elt;
10377 enum machine_mode inner, mode;
10379 mode = TYPE_MODE (TREE_TYPE (exp));
10381 if (is_zeros_p (exp))
10382 return CONST0_RTX (mode);
10384 units = GET_MODE_NUNITS (mode);
10385 inner = GET_MODE_INNER (mode);
10387 v = rtvec_alloc (units);
10389 link = TREE_VECTOR_CST_ELTS (exp);
10390 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10392 elt = TREE_VALUE (link);
10394 if (TREE_CODE (elt) == REAL_CST)
10395 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10396 inner);
10397 else
10398 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10399 TREE_INT_CST_HIGH (elt),
10400 inner);
10403 return gen_rtx_raw_CONST_VECTOR (mode, v);
10406 #include "gt-expr.h"