expr.c (expand_assignment): Extend offset_rtx with convert_to_mode not with convert_m...
[official-gcc.git] / gcc / expr.c
bloba2b547e47a5e7fcd3cf915dad6d57e125813223a
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
169 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
170 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
171 static int is_aligning_offset PARAMS ((tree, tree));
172 static rtx expand_increment PARAMS ((tree, int, int));
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
174 #ifdef PUSH_ROUNDING
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
176 #endif
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
178 static rtx const_vector_from_tree PARAMS ((tree));
180 /* Record for each mode whether we can move a register directly to or
181 from an object of that mode in memory. If we can't, we won't try
182 to use that mode directly when accessing a field of that mode. */
184 static char direct_load[NUM_MACHINE_MODES];
185 static char direct_store[NUM_MACHINE_MODES];
187 /* Record for each mode whether we can float-extend from memory. */
189 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
191 /* If a memory-to-memory move would take MOVE_RATIO or more simple
192 move-instruction sequences, we will do a movstr or libcall instead. */
194 #ifndef MOVE_RATIO
195 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 #define MOVE_RATIO 2
197 #else
198 /* If we are optimizing for space (-Os), cut down the default move ratio. */
199 #define MOVE_RATIO (optimize_size ? 3 : 15)
200 #endif
201 #endif
203 /* This macro is used to determine whether move_by_pieces should be called
204 to perform a structure copy. */
205 #ifndef MOVE_BY_PIECES_P
206 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
207 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
208 #endif
210 /* If a clear memory operation would take CLEAR_RATIO or more simple
211 move-instruction sequences, we will do a clrstr or libcall instead. */
213 #ifndef CLEAR_RATIO
214 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
215 #define CLEAR_RATIO 2
216 #else
217 /* If we are optimizing for space, cut down the default clear ratio. */
218 #define CLEAR_RATIO (optimize_size ? 3 : 15)
219 #endif
220 #endif
222 /* This macro is used to determine whether clear_by_pieces should be
223 called to clear storage. */
224 #ifndef CLEAR_BY_PIECES_P
225 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
226 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
227 #endif
229 /* This macro is used to determine whether store_by_pieces should be
230 called to "memset" storage with byte values other than zero, or
231 to "memcpy" storage when the source is a constant string. */
232 #ifndef STORE_BY_PIECES_P
233 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
234 #endif
236 /* This array records the insn_code of insns to perform block moves. */
237 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239 /* This array records the insn_code of insns to perform block clears. */
240 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
244 #ifndef SLOW_UNALIGNED_ACCESS
245 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
246 #endif
248 /* This is run once per compilation to set up which modes can be used
249 directly in memory and to initialize the block move optab. */
251 void
252 init_expr_once ()
254 rtx insn, pat;
255 enum machine_mode mode;
256 int num_clobbers;
257 rtx mem, mem1;
258 rtx reg;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
277 int regno;
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
295 REGNO (reg) = regno;
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
328 enum insn_code ic;
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
332 continue;
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
344 void
345 init_expr ()
347 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
349 pending_chain = 0;
350 pending_stack_adjust = 0;
351 stack_pointer_delta = 0;
352 inhibit_defer_pop = 0;
353 saveregs_value = 0;
354 apply_args_value = 0;
355 forced_labels = 0;
358 /* Small sanity check that the queue is empty at the end of a function. */
360 void
361 finish_expr_for_function ()
363 if (pending_chain)
364 abort ();
367 /* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
370 /* Queue up to increment (or change) VAR later. BODY says how:
371 BODY should be the same thing you would pass to emit_insn
372 to increment right away. It will go to emit_insn later on.
374 The value is a QUEUED expression to be used in place of VAR
375 where you want to guarantee the pre-incrementation value of VAR. */
377 static rtx
378 enqueue_insn (var, body)
379 rtx var, body;
381 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
382 body, pending_chain);
383 return pending_chain;
386 /* Use protect_from_queue to convert a QUEUED expression
387 into something that you can put immediately into an instruction.
388 If the queued incrementation has not happened yet,
389 protect_from_queue returns the variable itself.
390 If the incrementation has happened, protect_from_queue returns a temp
391 that contains a copy of the old value of the variable.
393 Any time an rtx which might possibly be a QUEUED is to be put
394 into an instruction, it must be passed through protect_from_queue first.
395 QUEUED expressions are not meaningful in instructions.
397 Do not pass a value through protect_from_queue and then hold
398 on to it for a while before putting it in an instruction!
399 If the queue is flushed in between, incorrect code will result. */
402 protect_from_queue (x, modify)
403 rtx x;
404 int modify;
406 RTX_CODE code = GET_CODE (x);
408 #if 0 /* A QUEUED can hang around after the queue is forced out. */
409 /* Shortcut for most common case. */
410 if (pending_chain == 0)
411 return x;
412 #endif
414 if (code != QUEUED)
416 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
417 use of autoincrement. Make a copy of the contents of the memory
418 location rather than a copy of the address, but not if the value is
419 of mode BLKmode. Don't modify X in place since it might be
420 shared. */
421 if (code == MEM && GET_MODE (x) != BLKmode
422 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
424 rtx y = XEXP (x, 0);
425 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
427 if (QUEUED_INSN (y))
429 rtx temp = gen_reg_rtx (GET_MODE (x));
431 emit_insn_before (gen_move_insn (temp, new),
432 QUEUED_INSN (y));
433 return temp;
436 /* Copy the address into a pseudo, so that the returned value
437 remains correct across calls to emit_queue. */
438 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
441 /* Otherwise, recursively protect the subexpressions of all
442 the kinds of rtx's that can contain a QUEUED. */
443 if (code == MEM)
445 rtx tem = protect_from_queue (XEXP (x, 0), 0);
446 if (tem != XEXP (x, 0))
448 x = copy_rtx (x);
449 XEXP (x, 0) = tem;
452 else if (code == PLUS || code == MULT)
454 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
455 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
456 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
458 x = copy_rtx (x);
459 XEXP (x, 0) = new0;
460 XEXP (x, 1) = new1;
463 return x;
465 /* If the increment has not happened, use the variable itself. Copy it
466 into a new pseudo so that the value remains correct across calls to
467 emit_queue. */
468 if (QUEUED_INSN (x) == 0)
469 return copy_to_reg (QUEUED_VAR (x));
470 /* If the increment has happened and a pre-increment copy exists,
471 use that copy. */
472 if (QUEUED_COPY (x) != 0)
473 return QUEUED_COPY (x);
474 /* The increment has happened but we haven't set up a pre-increment copy.
475 Set one up now, and use it. */
476 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
477 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
478 QUEUED_INSN (x));
479 return QUEUED_COPY (x);
482 /* Return nonzero if X contains a QUEUED expression:
483 if it contains anything that will be altered by a queued increment.
484 We handle only combinations of MEM, PLUS, MINUS and MULT operators
485 since memory addresses generally contain only those. */
488 queued_subexp_p (x)
489 rtx x;
491 enum rtx_code code = GET_CODE (x);
492 switch (code)
494 case QUEUED:
495 return 1;
496 case MEM:
497 return queued_subexp_p (XEXP (x, 0));
498 case MULT:
499 case PLUS:
500 case MINUS:
501 return (queued_subexp_p (XEXP (x, 0))
502 || queued_subexp_p (XEXP (x, 1)));
503 default:
504 return 0;
508 /* Perform all the pending incrementations. */
510 void
511 emit_queue ()
513 rtx p;
514 while ((p = pending_chain))
516 rtx body = QUEUED_BODY (p);
518 switch (GET_CODE (body))
520 case INSN:
521 case JUMP_INSN:
522 case CALL_INSN:
523 case CODE_LABEL:
524 case BARRIER:
525 case NOTE:
526 QUEUED_INSN (p) = body;
527 emit_insn (body);
528 break;
530 #ifdef ENABLE_CHECKING
531 case SEQUENCE:
532 abort ();
533 break;
534 #endif
536 default:
537 QUEUED_INSN (p) = emit_insn (body);
538 break;
541 pending_chain = QUEUED_NEXT (p);
545 /* Copy data from FROM to TO, where the machine modes are not the same.
546 Both modes may be integer, or both may be floating.
547 UNSIGNEDP should be nonzero if FROM is an unsigned type.
548 This causes zero-extension instead of sign-extension. */
550 void
551 convert_move (to, from, unsignedp)
552 rtx to, from;
553 int unsignedp;
555 enum machine_mode to_mode = GET_MODE (to);
556 enum machine_mode from_mode = GET_MODE (from);
557 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
558 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
559 enum insn_code code;
560 rtx libcall;
562 /* rtx code for making an equivalent value. */
563 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
564 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
566 to = protect_from_queue (to, 1);
567 from = protect_from_queue (from, 0);
569 if (to_real != from_real)
570 abort ();
572 /* If FROM is a SUBREG that indicates that we have already done at least
573 the required extension, strip it. We don't handle such SUBREGs as
574 TO here. */
576 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
577 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
578 >= GET_MODE_SIZE (to_mode))
579 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
580 from = gen_lowpart (to_mode, from), from_mode = to_mode;
582 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
583 abort ();
585 if (to_mode == from_mode
586 || (from_mode == VOIDmode && CONSTANT_P (from)))
588 emit_move_insn (to, from);
589 return;
592 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
594 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
595 abort ();
597 if (VECTOR_MODE_P (to_mode))
598 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
599 else
600 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
602 emit_move_insn (to, from);
603 return;
606 if (to_real != from_real)
607 abort ();
609 if (to_real)
611 rtx value, insns;
613 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
615 /* Try converting directly if the insn is supported. */
616 if ((code = can_extend_p (to_mode, from_mode, 0))
617 != CODE_FOR_nothing)
619 emit_unop_insn (code, to, from, UNKNOWN);
620 return;
624 #ifdef HAVE_trunchfqf2
625 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
627 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
628 return;
630 #endif
631 #ifdef HAVE_trunctqfqf2
632 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
634 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
635 return;
637 #endif
638 #ifdef HAVE_truncsfqf2
639 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
641 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
642 return;
644 #endif
645 #ifdef HAVE_truncdfqf2
646 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
648 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
649 return;
651 #endif
652 #ifdef HAVE_truncxfqf2
653 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
655 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
656 return;
658 #endif
659 #ifdef HAVE_trunctfqf2
660 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
662 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
663 return;
665 #endif
667 #ifdef HAVE_trunctqfhf2
668 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
670 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
671 return;
673 #endif
674 #ifdef HAVE_truncsfhf2
675 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
677 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
678 return;
680 #endif
681 #ifdef HAVE_truncdfhf2
682 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
684 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
685 return;
687 #endif
688 #ifdef HAVE_truncxfhf2
689 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
691 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
692 return;
694 #endif
695 #ifdef HAVE_trunctfhf2
696 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
698 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
699 return;
701 #endif
703 #ifdef HAVE_truncsftqf2
704 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
706 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
707 return;
709 #endif
710 #ifdef HAVE_truncdftqf2
711 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
713 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
714 return;
716 #endif
717 #ifdef HAVE_truncxftqf2
718 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
720 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
721 return;
723 #endif
724 #ifdef HAVE_trunctftqf2
725 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
727 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
728 return;
730 #endif
732 #ifdef HAVE_truncdfsf2
733 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
735 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
736 return;
738 #endif
739 #ifdef HAVE_truncxfsf2
740 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
742 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
743 return;
745 #endif
746 #ifdef HAVE_trunctfsf2
747 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
749 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
750 return;
752 #endif
753 #ifdef HAVE_truncxfdf2
754 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
756 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
757 return;
759 #endif
760 #ifdef HAVE_trunctfdf2
761 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
763 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
764 return;
766 #endif
768 libcall = (rtx) 0;
769 switch (from_mode)
771 case SFmode:
772 switch (to_mode)
774 case DFmode:
775 libcall = extendsfdf2_libfunc;
776 break;
778 case XFmode:
779 libcall = extendsfxf2_libfunc;
780 break;
782 case TFmode:
783 libcall = extendsftf2_libfunc;
784 break;
786 default:
787 break;
789 break;
791 case DFmode:
792 switch (to_mode)
794 case SFmode:
795 libcall = truncdfsf2_libfunc;
796 break;
798 case XFmode:
799 libcall = extenddfxf2_libfunc;
800 break;
802 case TFmode:
803 libcall = extenddftf2_libfunc;
804 break;
806 default:
807 break;
809 break;
811 case XFmode:
812 switch (to_mode)
814 case SFmode:
815 libcall = truncxfsf2_libfunc;
816 break;
818 case DFmode:
819 libcall = truncxfdf2_libfunc;
820 break;
822 default:
823 break;
825 break;
827 case TFmode:
828 switch (to_mode)
830 case SFmode:
831 libcall = trunctfsf2_libfunc;
832 break;
834 case DFmode:
835 libcall = trunctfdf2_libfunc;
836 break;
838 default:
839 break;
841 break;
843 default:
844 break;
847 if (libcall == (rtx) 0)
848 /* This conversion is not implemented yet. */
849 abort ();
851 start_sequence ();
852 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
853 1, from, from_mode);
854 insns = get_insns ();
855 end_sequence ();
856 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
857 from));
858 return;
861 /* Now both modes are integers. */
863 /* Handle expanding beyond a word. */
864 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
865 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
867 rtx insns;
868 rtx lowpart;
869 rtx fill_value;
870 rtx lowfrom;
871 int i;
872 enum machine_mode lowpart_mode;
873 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
875 /* Try converting directly if the insn is supported. */
876 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
877 != CODE_FOR_nothing)
879 /* If FROM is a SUBREG, put it into a register. Do this
880 so that we always generate the same set of insns for
881 better cse'ing; if an intermediate assignment occurred,
882 we won't be doing the operation directly on the SUBREG. */
883 if (optimize > 0 && GET_CODE (from) == SUBREG)
884 from = force_reg (from_mode, from);
885 emit_unop_insn (code, to, from, equiv_code);
886 return;
888 /* Next, try converting via full word. */
889 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
890 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
891 != CODE_FOR_nothing))
893 if (GET_CODE (to) == REG)
894 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
895 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
896 emit_unop_insn (code, to,
897 gen_lowpart (word_mode, to), equiv_code);
898 return;
901 /* No special multiword conversion insn; do it by hand. */
902 start_sequence ();
904 /* Since we will turn this into a no conflict block, we must ensure
905 that the source does not overlap the target. */
907 if (reg_overlap_mentioned_p (to, from))
908 from = force_reg (from_mode, from);
910 /* Get a copy of FROM widened to a word, if necessary. */
911 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
912 lowpart_mode = word_mode;
913 else
914 lowpart_mode = from_mode;
916 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
918 lowpart = gen_lowpart (lowpart_mode, to);
919 emit_move_insn (lowpart, lowfrom);
921 /* Compute the value to put in each remaining word. */
922 if (unsignedp)
923 fill_value = const0_rtx;
924 else
926 #ifdef HAVE_slt
927 if (HAVE_slt
928 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
929 && STORE_FLAG_VALUE == -1)
931 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
932 lowpart_mode, 0);
933 fill_value = gen_reg_rtx (word_mode);
934 emit_insn (gen_slt (fill_value));
936 else
937 #endif
939 fill_value
940 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
941 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
942 NULL_RTX, 0);
943 fill_value = convert_to_mode (word_mode, fill_value, 1);
947 /* Fill the remaining words. */
948 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
950 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
951 rtx subword = operand_subword (to, index, 1, to_mode);
953 if (subword == 0)
954 abort ();
956 if (fill_value != subword)
957 emit_move_insn (subword, fill_value);
960 insns = get_insns ();
961 end_sequence ();
963 emit_no_conflict_block (insns, to, from, NULL_RTX,
964 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
965 return;
968 /* Truncating multi-word to a word or less. */
969 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
970 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
972 if (!((GET_CODE (from) == MEM
973 && ! MEM_VOLATILE_P (from)
974 && direct_load[(int) to_mode]
975 && ! mode_dependent_address_p (XEXP (from, 0)))
976 || GET_CODE (from) == REG
977 || GET_CODE (from) == SUBREG))
978 from = force_reg (from_mode, from);
979 convert_move (to, gen_lowpart (word_mode, from), 0);
980 return;
983 /* Handle pointer conversion. */ /* SPEE 900220. */
984 if (to_mode == PQImode)
986 if (from_mode != QImode)
987 from = convert_to_mode (QImode, from, unsignedp);
989 #ifdef HAVE_truncqipqi2
990 if (HAVE_truncqipqi2)
992 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
993 return;
995 #endif /* HAVE_truncqipqi2 */
996 abort ();
999 if (from_mode == PQImode)
1001 if (to_mode != QImode)
1003 from = convert_to_mode (QImode, from, unsignedp);
1004 from_mode = QImode;
1006 else
1008 #ifdef HAVE_extendpqiqi2
1009 if (HAVE_extendpqiqi2)
1011 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1012 return;
1014 #endif /* HAVE_extendpqiqi2 */
1015 abort ();
1019 if (to_mode == PSImode)
1021 if (from_mode != SImode)
1022 from = convert_to_mode (SImode, from, unsignedp);
1024 #ifdef HAVE_truncsipsi2
1025 if (HAVE_truncsipsi2)
1027 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1028 return;
1030 #endif /* HAVE_truncsipsi2 */
1031 abort ();
1034 if (from_mode == PSImode)
1036 if (to_mode != SImode)
1038 from = convert_to_mode (SImode, from, unsignedp);
1039 from_mode = SImode;
1041 else
1043 #ifdef HAVE_extendpsisi2
1044 if (! unsignedp && HAVE_extendpsisi2)
1046 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1047 return;
1049 #endif /* HAVE_extendpsisi2 */
1050 #ifdef HAVE_zero_extendpsisi2
1051 if (unsignedp && HAVE_zero_extendpsisi2)
1053 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1054 return;
1056 #endif /* HAVE_zero_extendpsisi2 */
1057 abort ();
1061 if (to_mode == PDImode)
1063 if (from_mode != DImode)
1064 from = convert_to_mode (DImode, from, unsignedp);
1066 #ifdef HAVE_truncdipdi2
1067 if (HAVE_truncdipdi2)
1069 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1070 return;
1072 #endif /* HAVE_truncdipdi2 */
1073 abort ();
1076 if (from_mode == PDImode)
1078 if (to_mode != DImode)
1080 from = convert_to_mode (DImode, from, unsignedp);
1081 from_mode = DImode;
1083 else
1085 #ifdef HAVE_extendpdidi2
1086 if (HAVE_extendpdidi2)
1088 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1089 return;
1091 #endif /* HAVE_extendpdidi2 */
1092 abort ();
1096 /* Now follow all the conversions between integers
1097 no more than a word long. */
1099 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1100 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1101 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1102 GET_MODE_BITSIZE (from_mode)))
1104 if (!((GET_CODE (from) == MEM
1105 && ! MEM_VOLATILE_P (from)
1106 && direct_load[(int) to_mode]
1107 && ! mode_dependent_address_p (XEXP (from, 0)))
1108 || GET_CODE (from) == REG
1109 || GET_CODE (from) == SUBREG))
1110 from = force_reg (from_mode, from);
1111 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1112 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1113 from = copy_to_reg (from);
1114 emit_move_insn (to, gen_lowpart (to_mode, from));
1115 return;
1118 /* Handle extension. */
1119 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1121 /* Convert directly if that works. */
1122 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1123 != CODE_FOR_nothing)
1125 if (flag_force_mem)
1126 from = force_not_mem (from);
1128 emit_unop_insn (code, to, from, equiv_code);
1129 return;
1131 else
1133 enum machine_mode intermediate;
1134 rtx tmp;
1135 tree shift_amount;
1137 /* Search for a mode to convert via. */
1138 for (intermediate = from_mode; intermediate != VOIDmode;
1139 intermediate = GET_MODE_WIDER_MODE (intermediate))
1140 if (((can_extend_p (to_mode, intermediate, unsignedp)
1141 != CODE_FOR_nothing)
1142 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1143 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1144 GET_MODE_BITSIZE (intermediate))))
1145 && (can_extend_p (intermediate, from_mode, unsignedp)
1146 != CODE_FOR_nothing))
1148 convert_move (to, convert_to_mode (intermediate, from,
1149 unsignedp), unsignedp);
1150 return;
1153 /* No suitable intermediate mode.
1154 Generate what we need with shifts. */
1155 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1156 - GET_MODE_BITSIZE (from_mode), 0);
1157 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1158 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1159 to, unsignedp);
1160 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1161 to, unsignedp);
1162 if (tmp != to)
1163 emit_move_insn (to, tmp);
1164 return;
1168 /* Support special truncate insns for certain modes. */
1170 if (from_mode == DImode && to_mode == SImode)
1172 #ifdef HAVE_truncdisi2
1173 if (HAVE_truncdisi2)
1175 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1176 return;
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1183 if (from_mode == DImode && to_mode == HImode)
1185 #ifdef HAVE_truncdihi2
1186 if (HAVE_truncdihi2)
1188 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1189 return;
1191 #endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1196 if (from_mode == DImode && to_mode == QImode)
1198 #ifdef HAVE_truncdiqi2
1199 if (HAVE_truncdiqi2)
1201 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1202 return;
1204 #endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1209 if (from_mode == SImode && to_mode == HImode)
1211 #ifdef HAVE_truncsihi2
1212 if (HAVE_truncsihi2)
1214 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1215 return;
1217 #endif
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 return;
1222 if (from_mode == SImode && to_mode == QImode)
1224 #ifdef HAVE_truncsiqi2
1225 if (HAVE_truncsiqi2)
1227 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1228 return;
1230 #endif
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 return;
1235 if (from_mode == HImode && to_mode == QImode)
1237 #ifdef HAVE_trunchiqi2
1238 if (HAVE_trunchiqi2)
1240 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1241 return;
1243 #endif
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 return;
1248 if (from_mode == TImode && to_mode == DImode)
1250 #ifdef HAVE_trunctidi2
1251 if (HAVE_trunctidi2)
1253 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1254 return;
1256 #endif
1257 convert_move (to, force_reg (from_mode, from), unsignedp);
1258 return;
1261 if (from_mode == TImode && to_mode == SImode)
1263 #ifdef HAVE_trunctisi2
1264 if (HAVE_trunctisi2)
1266 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1267 return;
1269 #endif
1270 convert_move (to, force_reg (from_mode, from), unsignedp);
1271 return;
1274 if (from_mode == TImode && to_mode == HImode)
1276 #ifdef HAVE_trunctihi2
1277 if (HAVE_trunctihi2)
1279 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1280 return;
1282 #endif
1283 convert_move (to, force_reg (from_mode, from), unsignedp);
1284 return;
1287 if (from_mode == TImode && to_mode == QImode)
1289 #ifdef HAVE_trunctiqi2
1290 if (HAVE_trunctiqi2)
1292 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1293 return;
1295 #endif
1296 convert_move (to, force_reg (from_mode, from), unsignedp);
1297 return;
1300 /* Handle truncation of volatile memrefs, and so on;
1301 the things that couldn't be truncated directly,
1302 and for which there was no special instruction. */
1303 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1305 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1306 emit_move_insn (to, temp);
1307 return;
1310 /* Mode combination is not recognized. */
1311 abort ();
1314 /* Return an rtx for a value that would result
1315 from converting X to mode MODE.
1316 Both X and MODE may be floating, or both integer.
1317 UNSIGNEDP is nonzero if X is an unsigned value.
1318 This can be done by referring to a part of X in place
1319 or by copying to a new temporary with conversion.
1321 This function *must not* call protect_from_queue
1322 except when putting X into an insn (in which case convert_move does it). */
1325 convert_to_mode (mode, x, unsignedp)
1326 enum machine_mode mode;
1327 rtx x;
1328 int unsignedp;
1330 return convert_modes (mode, VOIDmode, x, unsignedp);
1333 /* Return an rtx for a value that would result
1334 from converting X from mode OLDMODE to mode MODE.
1335 Both modes may be floating, or both integer.
1336 UNSIGNEDP is nonzero if X is an unsigned value.
1338 This can be done by referring to a part of X in place
1339 or by copying to a new temporary with conversion.
1341 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1343 This function *must not* call protect_from_queue
1344 except when putting X into an insn (in which case convert_move does it). */
1347 convert_modes (mode, oldmode, x, unsignedp)
1348 enum machine_mode mode, oldmode;
1349 rtx x;
1350 int unsignedp;
1352 rtx temp;
1354 /* If FROM is a SUBREG that indicates that we have already done at least
1355 the required extension, strip it. */
1357 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1358 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1359 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1360 x = gen_lowpart (mode, x);
1362 if (GET_MODE (x) != VOIDmode)
1363 oldmode = GET_MODE (x);
1365 if (mode == oldmode)
1366 return x;
1368 /* There is one case that we must handle specially: If we are converting
1369 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1370 we are to interpret the constant as unsigned, gen_lowpart will do
1371 the wrong if the constant appears negative. What we want to do is
1372 make the high-order word of the constant zero, not all ones. */
1374 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1375 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1376 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1378 HOST_WIDE_INT val = INTVAL (x);
1380 if (oldmode != VOIDmode
1381 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1383 int width = GET_MODE_BITSIZE (oldmode);
1385 /* We need to zero extend VAL. */
1386 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1389 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1392 /* We can do this with a gen_lowpart if both desired and current modes
1393 are integer, and this is either a constant integer, a register, or a
1394 non-volatile MEM. Except for the constant case where MODE is no
1395 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1397 if ((GET_CODE (x) == CONST_INT
1398 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1399 || (GET_MODE_CLASS (mode) == MODE_INT
1400 && GET_MODE_CLASS (oldmode) == MODE_INT
1401 && (GET_CODE (x) == CONST_DOUBLE
1402 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1403 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1404 && direct_load[(int) mode])
1405 || (GET_CODE (x) == REG
1406 && (! HARD_REGISTER_P (x)
1407 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1408 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1409 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1411 /* ?? If we don't know OLDMODE, we have to assume here that
1412 X does not need sign- or zero-extension. This may not be
1413 the case, but it's the best we can do. */
1414 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1415 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1417 HOST_WIDE_INT val = INTVAL (x);
1418 int width = GET_MODE_BITSIZE (oldmode);
1420 /* We must sign or zero-extend in this case. Start by
1421 zero-extending, then sign extend if we need to. */
1422 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1423 if (! unsignedp
1424 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1425 val |= (HOST_WIDE_INT) (-1) << width;
1427 return gen_int_mode (val, mode);
1430 return gen_lowpart (mode, x);
1433 temp = gen_reg_rtx (mode);
1434 convert_move (temp, x, unsignedp);
1435 return temp;
1438 /* This macro is used to determine what the largest unit size that
1439 move_by_pieces can use is. */
1441 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1442 move efficiently, as opposed to MOVE_MAX which is the maximum
1443 number of bytes we can move with a single instruction. */
1445 #ifndef MOVE_MAX_PIECES
1446 #define MOVE_MAX_PIECES MOVE_MAX
1447 #endif
1449 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1450 store efficiently. Due to internal GCC limitations, this is
1451 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1452 for an immediate constant. */
1454 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1456 /* Generate several move instructions to copy LEN bytes from block FROM to
1457 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1458 and TO through protect_from_queue before calling.
1460 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1461 used to push FROM to the stack.
1463 ALIGN is maximum alignment we can assume. */
1465 void
1466 move_by_pieces (to, from, len, align)
1467 rtx to, from;
1468 unsigned HOST_WIDE_INT len;
1469 unsigned int align;
1471 struct move_by_pieces data;
1472 rtx to_addr, from_addr = XEXP (from, 0);
1473 unsigned int max_size = MOVE_MAX_PIECES + 1;
1474 enum machine_mode mode = VOIDmode, tmode;
1475 enum insn_code icode;
1477 data.offset = 0;
1478 data.from_addr = from_addr;
1479 if (to)
1481 to_addr = XEXP (to, 0);
1482 data.to = to;
1483 data.autinc_to
1484 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1485 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1486 data.reverse
1487 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1489 else
1491 to_addr = NULL_RTX;
1492 data.to = NULL_RTX;
1493 data.autinc_to = 1;
1494 #ifdef STACK_GROWS_DOWNWARD
1495 data.reverse = 1;
1496 #else
1497 data.reverse = 0;
1498 #endif
1500 data.to_addr = to_addr;
1501 data.from = from;
1502 data.autinc_from
1503 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1504 || GET_CODE (from_addr) == POST_INC
1505 || GET_CODE (from_addr) == POST_DEC);
1507 data.explicit_inc_from = 0;
1508 data.explicit_inc_to = 0;
1509 if (data.reverse) data.offset = len;
1510 data.len = len;
1512 /* If copying requires more than two move insns,
1513 copy addresses to registers (to make displacements shorter)
1514 and use post-increment if available. */
1515 if (!(data.autinc_from && data.autinc_to)
1516 && move_by_pieces_ninsns (len, align) > 2)
1518 /* Find the mode of the largest move... */
1519 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1520 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1521 if (GET_MODE_SIZE (tmode) < max_size)
1522 mode = tmode;
1524 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1526 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1527 data.autinc_from = 1;
1528 data.explicit_inc_from = -1;
1530 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1532 data.from_addr = copy_addr_to_reg (from_addr);
1533 data.autinc_from = 1;
1534 data.explicit_inc_from = 1;
1536 if (!data.autinc_from && CONSTANT_P (from_addr))
1537 data.from_addr = copy_addr_to_reg (from_addr);
1538 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1540 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1541 data.autinc_to = 1;
1542 data.explicit_inc_to = -1;
1544 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1546 data.to_addr = copy_addr_to_reg (to_addr);
1547 data.autinc_to = 1;
1548 data.explicit_inc_to = 1;
1550 if (!data.autinc_to && CONSTANT_P (to_addr))
1551 data.to_addr = copy_addr_to_reg (to_addr);
1554 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1555 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1556 align = MOVE_MAX * BITS_PER_UNIT;
1558 /* First move what we can in the largest integer mode, then go to
1559 successively smaller modes. */
1561 while (max_size > 1)
1563 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1564 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1565 if (GET_MODE_SIZE (tmode) < max_size)
1566 mode = tmode;
1568 if (mode == VOIDmode)
1569 break;
1571 icode = mov_optab->handlers[(int) mode].insn_code;
1572 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1573 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1575 max_size = GET_MODE_SIZE (mode);
1578 /* The code above should have handled everything. */
1579 if (data.len > 0)
1580 abort ();
1583 /* Return number of insns required to move L bytes by pieces.
1584 ALIGN (in bits) is maximum alignment we can assume. */
1586 static unsigned HOST_WIDE_INT
1587 move_by_pieces_ninsns (l, align)
1588 unsigned HOST_WIDE_INT l;
1589 unsigned int align;
1591 unsigned HOST_WIDE_INT n_insns = 0;
1592 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1594 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1595 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1596 align = MOVE_MAX * BITS_PER_UNIT;
1598 while (max_size > 1)
1600 enum machine_mode mode = VOIDmode, tmode;
1601 enum insn_code icode;
1603 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1604 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1605 if (GET_MODE_SIZE (tmode) < max_size)
1606 mode = tmode;
1608 if (mode == VOIDmode)
1609 break;
1611 icode = mov_optab->handlers[(int) mode].insn_code;
1612 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1613 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1615 max_size = GET_MODE_SIZE (mode);
1618 if (l)
1619 abort ();
1620 return n_insns;
1623 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1624 with move instructions for mode MODE. GENFUN is the gen_... function
1625 to make a move insn for that mode. DATA has all the other info. */
1627 static void
1628 move_by_pieces_1 (genfun, mode, data)
1629 rtx (*genfun) PARAMS ((rtx, ...));
1630 enum machine_mode mode;
1631 struct move_by_pieces *data;
1633 unsigned int size = GET_MODE_SIZE (mode);
1634 rtx to1 = NULL_RTX, from1;
1636 while (data->len >= size)
1638 if (data->reverse)
1639 data->offset -= size;
1641 if (data->to)
1643 if (data->autinc_to)
1644 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1645 data->offset);
1646 else
1647 to1 = adjust_address (data->to, mode, data->offset);
1650 if (data->autinc_from)
1651 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1652 data->offset);
1653 else
1654 from1 = adjust_address (data->from, mode, data->offset);
1656 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1657 emit_insn (gen_add2_insn (data->to_addr,
1658 GEN_INT (-(HOST_WIDE_INT)size)));
1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1660 emit_insn (gen_add2_insn (data->from_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
1663 if (data->to)
1664 emit_insn ((*genfun) (to1, from1));
1665 else
1667 #ifdef PUSH_ROUNDING
1668 emit_single_push_insn (mode, from1, NULL);
1669 #else
1670 abort ();
1671 #endif
1674 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1675 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1676 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1677 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1679 if (! data->reverse)
1680 data->offset += size;
1682 data->len -= size;
1686 /* Emit code to move a block Y to a block X. This may be done with
1687 string-move instructions, with multiple scalar move instructions,
1688 or with a library call.
1690 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1691 SIZE is an rtx that says how long they are.
1692 ALIGN is the maximum alignment we can assume they have.
1693 METHOD describes what kind of copy this is, and what mechanisms may be used.
1695 Return the address of the new block, if memcpy is called and returns it,
1696 0 otherwise. */
1699 emit_block_move (x, y, size, method)
1700 rtx x, y, size;
1701 enum block_op_methods method;
1703 bool may_use_call;
1704 rtx retval = 0;
1705 unsigned int align;
1707 switch (method)
1709 case BLOCK_OP_NORMAL:
1710 may_use_call = true;
1711 break;
1713 case BLOCK_OP_CALL_PARM:
1714 may_use_call = block_move_libcall_safe_for_call_parm ();
1716 /* Make inhibit_defer_pop nonzero around the library call
1717 to force it to pop the arguments right away. */
1718 NO_DEFER_POP;
1719 break;
1721 case BLOCK_OP_NO_LIBCALL:
1722 may_use_call = false;
1723 break;
1725 default:
1726 abort ();
1729 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1731 if (GET_MODE (x) != BLKmode)
1732 abort ();
1733 if (GET_MODE (y) != BLKmode)
1734 abort ();
1736 x = protect_from_queue (x, 1);
1737 y = protect_from_queue (y, 0);
1738 size = protect_from_queue (size, 0);
1740 if (GET_CODE (x) != MEM)
1741 abort ();
1742 if (GET_CODE (y) != MEM)
1743 abort ();
1744 if (size == 0)
1745 abort ();
1747 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1748 can be incorrect is coming from __builtin_memcpy. */
1749 if (GET_CODE (size) == CONST_INT)
1751 x = shallow_copy_rtx (x);
1752 y = shallow_copy_rtx (y);
1753 set_mem_size (x, size);
1754 set_mem_size (y, size);
1757 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1758 move_by_pieces (x, y, INTVAL (size), align);
1759 else if (emit_block_move_via_movstr (x, y, size, align))
1761 else if (may_use_call)
1762 retval = emit_block_move_via_libcall (x, y, size);
1763 else
1764 emit_block_move_via_loop (x, y, size, align);
1766 if (method == BLOCK_OP_CALL_PARM)
1767 OK_DEFER_POP;
1769 return retval;
1772 /* A subroutine of emit_block_move. Returns true if calling the
1773 block move libcall will not clobber any parameters which may have
1774 already been placed on the stack. */
1776 static bool
1777 block_move_libcall_safe_for_call_parm ()
1779 if (PUSH_ARGS)
1780 return true;
1781 else
1783 /* Check to see whether memcpy takes all register arguments. */
1784 static enum {
1785 takes_regs_uninit, takes_regs_no, takes_regs_yes
1786 } takes_regs = takes_regs_uninit;
1788 switch (takes_regs)
1790 case takes_regs_uninit:
1792 CUMULATIVE_ARGS args_so_far;
1793 tree fn, arg;
1795 fn = emit_block_move_libcall_fn (false);
1796 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1798 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1799 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1801 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1802 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1803 if (!tmp || !REG_P (tmp))
1804 goto fail_takes_regs;
1805 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1806 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1807 NULL_TREE, 1))
1808 goto fail_takes_regs;
1809 #endif
1810 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1813 takes_regs = takes_regs_yes;
1814 /* FALLTHRU */
1816 case takes_regs_yes:
1817 return true;
1819 fail_takes_regs:
1820 takes_regs = takes_regs_no;
1821 /* FALLTHRU */
1822 case takes_regs_no:
1823 return false;
1825 default:
1826 abort ();
1831 /* A subroutine of emit_block_move. Expand a movstr pattern;
1832 return true if successful. */
1834 static bool
1835 emit_block_move_via_movstr (x, y, size, align)
1836 rtx x, y, size;
1837 unsigned int align;
1839 /* Try the most limited insn first, because there's no point
1840 including more than one in the machine description unless
1841 the more limited one has some advantage. */
1843 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1844 enum machine_mode mode;
1846 /* Since this is a move insn, we don't care about volatility. */
1847 volatile_ok = 1;
1849 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1850 mode = GET_MODE_WIDER_MODE (mode))
1852 enum insn_code code = movstr_optab[(int) mode];
1853 insn_operand_predicate_fn pred;
1855 if (code != CODE_FOR_nothing
1856 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1857 here because if SIZE is less than the mode mask, as it is
1858 returned by the macro, it will definitely be less than the
1859 actual mode mask. */
1860 && ((GET_CODE (size) == CONST_INT
1861 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1862 <= (GET_MODE_MASK (mode) >> 1)))
1863 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1864 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1865 || (*pred) (x, BLKmode))
1866 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1867 || (*pred) (y, BLKmode))
1868 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1869 || (*pred) (opalign, VOIDmode)))
1871 rtx op2;
1872 rtx last = get_last_insn ();
1873 rtx pat;
1875 op2 = convert_to_mode (mode, size, 1);
1876 pred = insn_data[(int) code].operand[2].predicate;
1877 if (pred != 0 && ! (*pred) (op2, mode))
1878 op2 = copy_to_mode_reg (mode, op2);
1880 /* ??? When called via emit_block_move_for_call, it'd be
1881 nice if there were some way to inform the backend, so
1882 that it doesn't fail the expansion because it thinks
1883 emitting the libcall would be more efficient. */
1885 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1886 if (pat)
1888 emit_insn (pat);
1889 volatile_ok = 0;
1890 return true;
1892 else
1893 delete_insns_since (last);
1897 volatile_ok = 0;
1898 return false;
1901 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1902 Return the return value from memcpy, 0 otherwise. */
1904 static rtx
1905 emit_block_move_via_libcall (dst, src, size)
1906 rtx dst, src, size;
1908 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1909 enum machine_mode size_mode;
1910 rtx retval;
1912 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1914 It is unsafe to save the value generated by protect_from_queue
1915 and reuse it later. Consider what happens if emit_queue is
1916 called before the return value from protect_from_queue is used.
1918 Expansion of the CALL_EXPR below will call emit_queue before
1919 we are finished emitting RTL for argument setup. So if we are
1920 not careful we could get the wrong value for an argument.
1922 To avoid this problem we go ahead and emit code to copy X, Y &
1923 SIZE into new pseudos. We can then place those new pseudos
1924 into an RTL_EXPR and use them later, even after a call to
1925 emit_queue.
1927 Note this is not strictly needed for library calls since they
1928 do not call emit_queue before loading their arguments. However,
1929 we may need to have library calls call emit_queue in the future
1930 since failing to do so could cause problems for targets which
1931 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1933 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1934 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1936 if (TARGET_MEM_FUNCTIONS)
1937 size_mode = TYPE_MODE (sizetype);
1938 else
1939 size_mode = TYPE_MODE (unsigned_type_node);
1940 size = convert_to_mode (size_mode, size, 1);
1941 size = copy_to_mode_reg (size_mode, size);
1943 /* It is incorrect to use the libcall calling conventions to call
1944 memcpy in this context. This could be a user call to memcpy and
1945 the user may wish to examine the return value from memcpy. For
1946 targets where libcalls and normal calls have different conventions
1947 for returning pointers, we could end up generating incorrect code.
1949 For convenience, we generate the call to bcopy this way as well. */
1951 dst_tree = make_tree (ptr_type_node, dst);
1952 src_tree = make_tree (ptr_type_node, src);
1953 if (TARGET_MEM_FUNCTIONS)
1954 size_tree = make_tree (sizetype, size);
1955 else
1956 size_tree = make_tree (unsigned_type_node, size);
1958 fn = emit_block_move_libcall_fn (true);
1959 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1960 if (TARGET_MEM_FUNCTIONS)
1962 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1963 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1965 else
1967 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1968 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1971 /* Now we have to build up the CALL_EXPR itself. */
1972 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1973 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1974 call_expr, arg_list, NULL_TREE);
1975 TREE_SIDE_EFFECTS (call_expr) = 1;
1977 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1979 /* If we are initializing a readonly value, show the above call
1980 clobbered it. Otherwise, a load from it may erroneously be
1981 hoisted from a loop. */
1982 if (RTX_UNCHANGING_P (dst))
1983 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1985 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1988 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1989 for the function we use for block copies. The first time FOR_CALL
1990 is true, we call assemble_external. */
1992 static GTY(()) tree block_move_fn;
1994 static tree
1995 emit_block_move_libcall_fn (for_call)
1996 int for_call;
1998 static bool emitted_extern;
1999 tree fn = block_move_fn, args;
2001 if (!fn)
2003 if (TARGET_MEM_FUNCTIONS)
2005 fn = get_identifier ("memcpy");
2006 args = build_function_type_list (ptr_type_node, ptr_type_node,
2007 const_ptr_type_node, sizetype,
2008 NULL_TREE);
2010 else
2012 fn = get_identifier ("bcopy");
2013 args = build_function_type_list (void_type_node, const_ptr_type_node,
2014 ptr_type_node, unsigned_type_node,
2015 NULL_TREE);
2018 fn = build_decl (FUNCTION_DECL, fn, args);
2019 DECL_EXTERNAL (fn) = 1;
2020 TREE_PUBLIC (fn) = 1;
2021 DECL_ARTIFICIAL (fn) = 1;
2022 TREE_NOTHROW (fn) = 1;
2024 block_move_fn = fn;
2027 if (for_call && !emitted_extern)
2029 emitted_extern = true;
2030 make_decl_rtl (fn, NULL);
2031 assemble_external (fn);
2034 return fn;
2037 /* A subroutine of emit_block_move. Copy the data via an explicit
2038 loop. This is used only when libcalls are forbidden. */
2039 /* ??? It'd be nice to copy in hunks larger than QImode. */
2041 static void
2042 emit_block_move_via_loop (x, y, size, align)
2043 rtx x, y, size;
2044 unsigned int align ATTRIBUTE_UNUSED;
2046 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2047 enum machine_mode iter_mode;
2049 iter_mode = GET_MODE (size);
2050 if (iter_mode == VOIDmode)
2051 iter_mode = word_mode;
2053 top_label = gen_label_rtx ();
2054 cmp_label = gen_label_rtx ();
2055 iter = gen_reg_rtx (iter_mode);
2057 emit_move_insn (iter, const0_rtx);
2059 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2060 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2061 do_pending_stack_adjust ();
2063 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2065 emit_jump (cmp_label);
2066 emit_label (top_label);
2068 tmp = convert_modes (Pmode, iter_mode, iter, true);
2069 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2070 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2071 x = change_address (x, QImode, x_addr);
2072 y = change_address (y, QImode, y_addr);
2074 emit_move_insn (x, y);
2076 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2077 true, OPTAB_LIB_WIDEN);
2078 if (tmp != iter)
2079 emit_move_insn (iter, tmp);
2081 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2082 emit_label (cmp_label);
2084 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2085 true, top_label);
2087 emit_note (NULL, NOTE_INSN_LOOP_END);
2090 /* Copy all or part of a value X into registers starting at REGNO.
2091 The number of registers to be filled is NREGS. */
2093 void
2094 move_block_to_reg (regno, x, nregs, mode)
2095 int regno;
2096 rtx x;
2097 int nregs;
2098 enum machine_mode mode;
2100 int i;
2101 #ifdef HAVE_load_multiple
2102 rtx pat;
2103 rtx last;
2104 #endif
2106 if (nregs == 0)
2107 return;
2109 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2110 x = validize_mem (force_const_mem (mode, x));
2112 /* See if the machine can do this with a load multiple insn. */
2113 #ifdef HAVE_load_multiple
2114 if (HAVE_load_multiple)
2116 last = get_last_insn ();
2117 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2118 GEN_INT (nregs));
2119 if (pat)
2121 emit_insn (pat);
2122 return;
2124 else
2125 delete_insns_since (last);
2127 #endif
2129 for (i = 0; i < nregs; i++)
2130 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2131 operand_subword_force (x, i, mode));
2134 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2135 The number of registers to be filled is NREGS. SIZE indicates the number
2136 of bytes in the object X. */
2138 void
2139 move_block_from_reg (regno, x, nregs, size)
2140 int regno;
2141 rtx x;
2142 int nregs;
2143 int size;
2145 int i;
2146 #ifdef HAVE_store_multiple
2147 rtx pat;
2148 rtx last;
2149 #endif
2150 enum machine_mode mode;
2152 if (nregs == 0)
2153 return;
2155 /* If SIZE is that of a mode no bigger than a word, just use that
2156 mode's store operation. */
2157 if (size <= UNITS_PER_WORD
2158 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2160 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2161 return;
2164 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2165 to the left before storing to memory. Note that the previous test
2166 doesn't handle all cases (e.g. SIZE == 3). */
2167 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2169 rtx tem = operand_subword (x, 0, 1, BLKmode);
2170 rtx shift;
2172 if (tem == 0)
2173 abort ();
2175 shift = expand_shift (LSHIFT_EXPR, word_mode,
2176 gen_rtx_REG (word_mode, regno),
2177 build_int_2 ((UNITS_PER_WORD - size)
2178 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2179 emit_move_insn (tem, shift);
2180 return;
2183 /* See if the machine can do this with a store multiple insn. */
2184 #ifdef HAVE_store_multiple
2185 if (HAVE_store_multiple)
2187 last = get_last_insn ();
2188 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2189 GEN_INT (nregs));
2190 if (pat)
2192 emit_insn (pat);
2193 return;
2195 else
2196 delete_insns_since (last);
2198 #endif
2200 for (i = 0; i < nregs; i++)
2202 rtx tem = operand_subword (x, i, 1, BLKmode);
2204 if (tem == 0)
2205 abort ();
2207 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2211 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2212 ORIG, where ORIG is a non-consecutive group of registers represented by
2213 a PARALLEL. The clone is identical to the original except in that the
2214 original set of registers is replaced by a new set of pseudo registers.
2215 The new set has the same modes as the original set. */
2218 gen_group_rtx (orig)
2219 rtx orig;
2221 int i, length;
2222 rtx *tmps;
2224 if (GET_CODE (orig) != PARALLEL)
2225 abort ();
2227 length = XVECLEN (orig, 0);
2228 tmps = (rtx *) alloca (sizeof (rtx) * length);
2230 /* Skip a NULL entry in first slot. */
2231 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2233 if (i)
2234 tmps[0] = 0;
2236 for (; i < length; i++)
2238 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2239 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2241 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2244 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2247 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2248 registers represented by a PARALLEL. SSIZE represents the total size of
2249 block SRC in bytes, or -1 if not known. */
2250 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2251 the balance will be in what would be the low-order memory addresses, i.e.
2252 left justified for big endian, right justified for little endian. This
2253 happens to be true for the targets currently using this support. If this
2254 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2255 would be needed. */
2257 void
2258 emit_group_load (dst, orig_src, ssize)
2259 rtx dst, orig_src;
2260 int ssize;
2262 rtx *tmps, src;
2263 int start, i;
2265 if (GET_CODE (dst) != PARALLEL)
2266 abort ();
2268 /* Check for a NULL entry, used to indicate that the parameter goes
2269 both on the stack and in registers. */
2270 if (XEXP (XVECEXP (dst, 0, 0), 0))
2271 start = 0;
2272 else
2273 start = 1;
2275 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2277 /* Process the pieces. */
2278 for (i = start; i < XVECLEN (dst, 0); i++)
2280 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2281 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2282 unsigned int bytelen = GET_MODE_SIZE (mode);
2283 int shift = 0;
2285 /* Handle trailing fragments that run over the size of the struct. */
2286 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2288 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2289 bytelen = ssize - bytepos;
2290 if (bytelen <= 0)
2291 abort ();
2294 /* If we won't be loading directly from memory, protect the real source
2295 from strange tricks we might play; but make sure that the source can
2296 be loaded directly into the destination. */
2297 src = orig_src;
2298 if (GET_CODE (orig_src) != MEM
2299 && (!CONSTANT_P (orig_src)
2300 || (GET_MODE (orig_src) != mode
2301 && GET_MODE (orig_src) != VOIDmode)))
2303 if (GET_MODE (orig_src) == VOIDmode)
2304 src = gen_reg_rtx (mode);
2305 else
2306 src = gen_reg_rtx (GET_MODE (orig_src));
2308 emit_move_insn (src, orig_src);
2311 /* Optimize the access just a bit. */
2312 if (GET_CODE (src) == MEM
2313 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2314 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2315 && bytelen == GET_MODE_SIZE (mode))
2317 tmps[i] = gen_reg_rtx (mode);
2318 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2320 else if (GET_CODE (src) == CONCAT)
2322 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2323 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2325 if ((bytepos == 0 && bytelen == slen0)
2326 || (bytepos != 0 && bytepos + bytelen <= slen))
2328 /* The following assumes that the concatenated objects all
2329 have the same size. In this case, a simple calculation
2330 can be used to determine the object and the bit field
2331 to be extracted. */
2332 tmps[i] = XEXP (src, bytepos / slen0);
2333 if (! CONSTANT_P (tmps[i])
2334 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2335 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2336 (bytepos % slen0) * BITS_PER_UNIT,
2337 1, NULL_RTX, mode, mode, ssize);
2339 else if (bytepos == 0)
2341 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2342 emit_move_insn (mem, src);
2343 tmps[i] = adjust_address (mem, mode, 0);
2345 else
2346 abort ();
2348 else if (CONSTANT_P (src)
2349 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2350 tmps[i] = src;
2351 else
2352 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2353 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2354 mode, mode, ssize);
2356 if (BYTES_BIG_ENDIAN && shift)
2357 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2358 tmps[i], 0, OPTAB_WIDEN);
2361 emit_queue ();
2363 /* Copy the extracted pieces into the proper (probable) hard regs. */
2364 for (i = start; i < XVECLEN (dst, 0); i++)
2365 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2368 /* Emit code to move a block SRC to block DST, where SRC and DST are
2369 non-consecutive groups of registers, each represented by a PARALLEL. */
2371 void
2372 emit_group_move (dst, src)
2373 rtx dst, src;
2375 int i;
2377 if (GET_CODE (src) != PARALLEL
2378 || GET_CODE (dst) != PARALLEL
2379 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2380 abort ();
2382 /* Skip first entry if NULL. */
2383 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2384 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2385 XEXP (XVECEXP (src, 0, i), 0));
2388 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2389 registers represented by a PARALLEL. SSIZE represents the total size of
2390 block DST, or -1 if not known. */
2392 void
2393 emit_group_store (orig_dst, src, ssize)
2394 rtx orig_dst, src;
2395 int ssize;
2397 rtx *tmps, dst;
2398 int start, i;
2400 if (GET_CODE (src) != PARALLEL)
2401 abort ();
2403 /* Check for a NULL entry, used to indicate that the parameter goes
2404 both on the stack and in registers. */
2405 if (XEXP (XVECEXP (src, 0, 0), 0))
2406 start = 0;
2407 else
2408 start = 1;
2410 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2412 /* Copy the (probable) hard regs into pseudos. */
2413 for (i = start; i < XVECLEN (src, 0); i++)
2415 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2416 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2417 emit_move_insn (tmps[i], reg);
2419 emit_queue ();
2421 /* If we won't be storing directly into memory, protect the real destination
2422 from strange tricks we might play. */
2423 dst = orig_dst;
2424 if (GET_CODE (dst) == PARALLEL)
2426 rtx temp;
2428 /* We can get a PARALLEL dst if there is a conditional expression in
2429 a return statement. In that case, the dst and src are the same,
2430 so no action is necessary. */
2431 if (rtx_equal_p (dst, src))
2432 return;
2434 /* It is unclear if we can ever reach here, but we may as well handle
2435 it. Allocate a temporary, and split this into a store/load to/from
2436 the temporary. */
2438 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2439 emit_group_store (temp, src, ssize);
2440 emit_group_load (dst, temp, ssize);
2441 return;
2443 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2445 dst = gen_reg_rtx (GET_MODE (orig_dst));
2446 /* Make life a bit easier for combine. */
2447 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2450 /* Process the pieces. */
2451 for (i = start; i < XVECLEN (src, 0); i++)
2453 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2454 enum machine_mode mode = GET_MODE (tmps[i]);
2455 unsigned int bytelen = GET_MODE_SIZE (mode);
2456 rtx dest = dst;
2458 /* Handle trailing fragments that run over the size of the struct. */
2459 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2461 if (BYTES_BIG_ENDIAN)
2463 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2464 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2465 tmps[i], 0, OPTAB_WIDEN);
2467 bytelen = ssize - bytepos;
2470 if (GET_CODE (dst) == CONCAT)
2472 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2473 dest = XEXP (dst, 0);
2474 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2476 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2477 dest = XEXP (dst, 1);
2479 else if (bytepos == 0 && XVECLEN (src, 0))
2481 dest = assign_stack_temp (GET_MODE (dest),
2482 GET_MODE_SIZE (GET_MODE (dest)), 0);
2483 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2484 tmps[i]);
2485 dst = dest;
2486 break;
2488 else
2489 abort ();
2492 /* Optimize the access just a bit. */
2493 if (GET_CODE (dest) == MEM
2494 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2495 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2496 && bytelen == GET_MODE_SIZE (mode))
2497 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2498 else
2499 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2500 mode, tmps[i], ssize);
2503 emit_queue ();
2505 /* Copy from the pseudo into the (probable) hard reg. */
2506 if (orig_dst != dst)
2507 emit_move_insn (orig_dst, dst);
2510 /* Generate code to copy a BLKmode object of TYPE out of a
2511 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2512 is null, a stack temporary is created. TGTBLK is returned.
2514 The primary purpose of this routine is to handle functions
2515 that return BLKmode structures in registers. Some machines
2516 (the PA for example) want to return all small structures
2517 in registers regardless of the structure's alignment. */
2520 copy_blkmode_from_reg (tgtblk, srcreg, type)
2521 rtx tgtblk;
2522 rtx srcreg;
2523 tree type;
2525 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2526 rtx src = NULL, dst = NULL;
2527 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2528 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2530 if (tgtblk == 0)
2532 tgtblk = assign_temp (build_qualified_type (type,
2533 (TYPE_QUALS (type)
2534 | TYPE_QUAL_CONST)),
2535 0, 1, 1);
2536 preserve_temp_slots (tgtblk);
2539 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2540 into a new pseudo which is a full word. */
2542 if (GET_MODE (srcreg) != BLKmode
2543 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2544 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2546 /* Structures whose size is not a multiple of a word are aligned
2547 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2548 machine, this means we must skip the empty high order bytes when
2549 calculating the bit offset. */
2550 if (BYTES_BIG_ENDIAN
2551 && bytes % UNITS_PER_WORD)
2552 big_endian_correction
2553 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2555 /* Copy the structure BITSIZE bites at a time.
2557 We could probably emit more efficient code for machines which do not use
2558 strict alignment, but it doesn't seem worth the effort at the current
2559 time. */
2560 for (bitpos = 0, xbitpos = big_endian_correction;
2561 bitpos < bytes * BITS_PER_UNIT;
2562 bitpos += bitsize, xbitpos += bitsize)
2564 /* We need a new source operand each time xbitpos is on a
2565 word boundary and when xbitpos == big_endian_correction
2566 (the first time through). */
2567 if (xbitpos % BITS_PER_WORD == 0
2568 || xbitpos == big_endian_correction)
2569 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2570 GET_MODE (srcreg));
2572 /* We need a new destination operand each time bitpos is on
2573 a word boundary. */
2574 if (bitpos % BITS_PER_WORD == 0)
2575 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2577 /* Use xbitpos for the source extraction (right justified) and
2578 xbitpos for the destination store (left justified). */
2579 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2580 extract_bit_field (src, bitsize,
2581 xbitpos % BITS_PER_WORD, 1,
2582 NULL_RTX, word_mode, word_mode,
2583 BITS_PER_WORD),
2584 BITS_PER_WORD);
2587 return tgtblk;
2590 /* Add a USE expression for REG to the (possibly empty) list pointed
2591 to by CALL_FUSAGE. REG must denote a hard register. */
2593 void
2594 use_reg (call_fusage, reg)
2595 rtx *call_fusage, reg;
2597 if (GET_CODE (reg) != REG
2598 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2599 abort ();
2601 *call_fusage
2602 = gen_rtx_EXPR_LIST (VOIDmode,
2603 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2606 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2607 starting at REGNO. All of these registers must be hard registers. */
2609 void
2610 use_regs (call_fusage, regno, nregs)
2611 rtx *call_fusage;
2612 int regno;
2613 int nregs;
2615 int i;
2617 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2618 abort ();
2620 for (i = 0; i < nregs; i++)
2621 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2624 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2625 PARALLEL REGS. This is for calls that pass values in multiple
2626 non-contiguous locations. The Irix 6 ABI has examples of this. */
2628 void
2629 use_group_regs (call_fusage, regs)
2630 rtx *call_fusage;
2631 rtx regs;
2633 int i;
2635 for (i = 0; i < XVECLEN (regs, 0); i++)
2637 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2639 /* A NULL entry means the parameter goes both on the stack and in
2640 registers. This can also be a MEM for targets that pass values
2641 partially on the stack and partially in registers. */
2642 if (reg != 0 && GET_CODE (reg) == REG)
2643 use_reg (call_fusage, reg);
2648 /* Determine whether the LEN bytes generated by CONSTFUN can be
2649 stored to memory using several move instructions. CONSTFUNDATA is
2650 a pointer which will be passed as argument in every CONSTFUN call.
2651 ALIGN is maximum alignment we can assume. Return nonzero if a
2652 call to store_by_pieces should succeed. */
2655 can_store_by_pieces (len, constfun, constfundata, align)
2656 unsigned HOST_WIDE_INT len;
2657 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2658 PTR constfundata;
2659 unsigned int align;
2661 unsigned HOST_WIDE_INT max_size, l;
2662 HOST_WIDE_INT offset = 0;
2663 enum machine_mode mode, tmode;
2664 enum insn_code icode;
2665 int reverse;
2666 rtx cst;
2668 if (! STORE_BY_PIECES_P (len, align))
2669 return 0;
2671 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2672 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2673 align = MOVE_MAX * BITS_PER_UNIT;
2675 /* We would first store what we can in the largest integer mode, then go to
2676 successively smaller modes. */
2678 for (reverse = 0;
2679 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2680 reverse++)
2682 l = len;
2683 mode = VOIDmode;
2684 max_size = STORE_MAX_PIECES + 1;
2685 while (max_size > 1)
2687 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2688 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2689 if (GET_MODE_SIZE (tmode) < max_size)
2690 mode = tmode;
2692 if (mode == VOIDmode)
2693 break;
2695 icode = mov_optab->handlers[(int) mode].insn_code;
2696 if (icode != CODE_FOR_nothing
2697 && align >= GET_MODE_ALIGNMENT (mode))
2699 unsigned int size = GET_MODE_SIZE (mode);
2701 while (l >= size)
2703 if (reverse)
2704 offset -= size;
2706 cst = (*constfun) (constfundata, offset, mode);
2707 if (!LEGITIMATE_CONSTANT_P (cst))
2708 return 0;
2710 if (!reverse)
2711 offset += size;
2713 l -= size;
2717 max_size = GET_MODE_SIZE (mode);
2720 /* The code above should have handled everything. */
2721 if (l != 0)
2722 abort ();
2725 return 1;
2728 /* Generate several move instructions to store LEN bytes generated by
2729 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2730 pointer which will be passed as argument in every CONSTFUN call.
2731 ALIGN is maximum alignment we can assume. */
2733 void
2734 store_by_pieces (to, len, constfun, constfundata, align)
2735 rtx to;
2736 unsigned HOST_WIDE_INT len;
2737 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2738 PTR constfundata;
2739 unsigned int align;
2741 struct store_by_pieces data;
2743 if (! STORE_BY_PIECES_P (len, align))
2744 abort ();
2745 to = protect_from_queue (to, 1);
2746 data.constfun = constfun;
2747 data.constfundata = constfundata;
2748 data.len = len;
2749 data.to = to;
2750 store_by_pieces_1 (&data, align);
2753 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2754 rtx with BLKmode). The caller must pass TO through protect_from_queue
2755 before calling. ALIGN is maximum alignment we can assume. */
2757 static void
2758 clear_by_pieces (to, len, align)
2759 rtx to;
2760 unsigned HOST_WIDE_INT len;
2761 unsigned int align;
2763 struct store_by_pieces data;
2765 data.constfun = clear_by_pieces_1;
2766 data.constfundata = NULL;
2767 data.len = len;
2768 data.to = to;
2769 store_by_pieces_1 (&data, align);
2772 /* Callback routine for clear_by_pieces.
2773 Return const0_rtx unconditionally. */
2775 static rtx
2776 clear_by_pieces_1 (data, offset, mode)
2777 PTR data ATTRIBUTE_UNUSED;
2778 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2779 enum machine_mode mode ATTRIBUTE_UNUSED;
2781 return const0_rtx;
2784 /* Subroutine of clear_by_pieces and store_by_pieces.
2785 Generate several move instructions to store LEN bytes of block TO. (A MEM
2786 rtx with BLKmode). The caller must pass TO through protect_from_queue
2787 before calling. ALIGN is maximum alignment we can assume. */
2789 static void
2790 store_by_pieces_1 (data, align)
2791 struct store_by_pieces *data;
2792 unsigned int align;
2794 rtx to_addr = XEXP (data->to, 0);
2795 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2796 enum machine_mode mode = VOIDmode, tmode;
2797 enum insn_code icode;
2799 data->offset = 0;
2800 data->to_addr = to_addr;
2801 data->autinc_to
2802 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2803 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2805 data->explicit_inc_to = 0;
2806 data->reverse
2807 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2808 if (data->reverse)
2809 data->offset = data->len;
2811 /* If storing requires more than two move insns,
2812 copy addresses to registers (to make displacements shorter)
2813 and use post-increment if available. */
2814 if (!data->autinc_to
2815 && move_by_pieces_ninsns (data->len, align) > 2)
2817 /* Determine the main mode we'll be using. */
2818 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2819 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2820 if (GET_MODE_SIZE (tmode) < max_size)
2821 mode = tmode;
2823 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2825 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2826 data->autinc_to = 1;
2827 data->explicit_inc_to = -1;
2830 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2831 && ! data->autinc_to)
2833 data->to_addr = copy_addr_to_reg (to_addr);
2834 data->autinc_to = 1;
2835 data->explicit_inc_to = 1;
2838 if ( !data->autinc_to && CONSTANT_P (to_addr))
2839 data->to_addr = copy_addr_to_reg (to_addr);
2842 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2843 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2844 align = MOVE_MAX * BITS_PER_UNIT;
2846 /* First store what we can in the largest integer mode, then go to
2847 successively smaller modes. */
2849 while (max_size > 1)
2851 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2852 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2853 if (GET_MODE_SIZE (tmode) < max_size)
2854 mode = tmode;
2856 if (mode == VOIDmode)
2857 break;
2859 icode = mov_optab->handlers[(int) mode].insn_code;
2860 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2861 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2863 max_size = GET_MODE_SIZE (mode);
2866 /* The code above should have handled everything. */
2867 if (data->len != 0)
2868 abort ();
2871 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2872 with move instructions for mode MODE. GENFUN is the gen_... function
2873 to make a move insn for that mode. DATA has all the other info. */
2875 static void
2876 store_by_pieces_2 (genfun, mode, data)
2877 rtx (*genfun) PARAMS ((rtx, ...));
2878 enum machine_mode mode;
2879 struct store_by_pieces *data;
2881 unsigned int size = GET_MODE_SIZE (mode);
2882 rtx to1, cst;
2884 while (data->len >= size)
2886 if (data->reverse)
2887 data->offset -= size;
2889 if (data->autinc_to)
2890 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2891 data->offset);
2892 else
2893 to1 = adjust_address (data->to, mode, data->offset);
2895 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2896 emit_insn (gen_add2_insn (data->to_addr,
2897 GEN_INT (-(HOST_WIDE_INT) size)));
2899 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2900 emit_insn ((*genfun) (to1, cst));
2902 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2903 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2905 if (! data->reverse)
2906 data->offset += size;
2908 data->len -= size;
2912 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2913 its length in bytes. */
2916 clear_storage (object, size)
2917 rtx object;
2918 rtx size;
2920 rtx retval = 0;
2921 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2922 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2924 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2925 just move a zero. Otherwise, do this a piece at a time. */
2926 if (GET_MODE (object) != BLKmode
2927 && GET_CODE (size) == CONST_INT
2928 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2929 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2930 else
2932 object = protect_from_queue (object, 1);
2933 size = protect_from_queue (size, 0);
2935 if (GET_CODE (size) == CONST_INT
2936 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2937 clear_by_pieces (object, INTVAL (size), align);
2938 else if (clear_storage_via_clrstr (object, size, align))
2940 else
2941 retval = clear_storage_via_libcall (object, size);
2944 return retval;
2947 /* A subroutine of clear_storage. Expand a clrstr pattern;
2948 return true if successful. */
2950 static bool
2951 clear_storage_via_clrstr (object, size, align)
2952 rtx object, size;
2953 unsigned int align;
2955 /* Try the most limited insn first, because there's no point
2956 including more than one in the machine description unless
2957 the more limited one has some advantage. */
2959 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2960 enum machine_mode mode;
2962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2963 mode = GET_MODE_WIDER_MODE (mode))
2965 enum insn_code code = clrstr_optab[(int) mode];
2966 insn_operand_predicate_fn pred;
2968 if (code != CODE_FOR_nothing
2969 /* We don't need MODE to be narrower than
2970 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2971 the mode mask, as it is returned by the macro, it will
2972 definitely be less than the actual mode mask. */
2973 && ((GET_CODE (size) == CONST_INT
2974 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2975 <= (GET_MODE_MASK (mode) >> 1)))
2976 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2977 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2978 || (*pred) (object, BLKmode))
2979 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2980 || (*pred) (opalign, VOIDmode)))
2982 rtx op1;
2983 rtx last = get_last_insn ();
2984 rtx pat;
2986 op1 = convert_to_mode (mode, size, 1);
2987 pred = insn_data[(int) code].operand[1].predicate;
2988 if (pred != 0 && ! (*pred) (op1, mode))
2989 op1 = copy_to_mode_reg (mode, op1);
2991 pat = GEN_FCN ((int) code) (object, op1, opalign);
2992 if (pat)
2994 emit_insn (pat);
2995 return true;
2997 else
2998 delete_insns_since (last);
3002 return false;
3005 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3006 Return the return value of memset, 0 otherwise. */
3008 static rtx
3009 clear_storage_via_libcall (object, size)
3010 rtx object, size;
3012 tree call_expr, arg_list, fn, object_tree, size_tree;
3013 enum machine_mode size_mode;
3014 rtx retval;
3016 /* OBJECT or SIZE may have been passed through protect_from_queue.
3018 It is unsafe to save the value generated by protect_from_queue
3019 and reuse it later. Consider what happens if emit_queue is
3020 called before the return value from protect_from_queue is used.
3022 Expansion of the CALL_EXPR below will call emit_queue before
3023 we are finished emitting RTL for argument setup. So if we are
3024 not careful we could get the wrong value for an argument.
3026 To avoid this problem we go ahead and emit code to copy OBJECT
3027 and SIZE into new pseudos. We can then place those new pseudos
3028 into an RTL_EXPR and use them later, even after a call to
3029 emit_queue.
3031 Note this is not strictly needed for library calls since they
3032 do not call emit_queue before loading their arguments. However,
3033 we may need to have library calls call emit_queue in the future
3034 since failing to do so could cause problems for targets which
3035 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3037 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3039 if (TARGET_MEM_FUNCTIONS)
3040 size_mode = TYPE_MODE (sizetype);
3041 else
3042 size_mode = TYPE_MODE (unsigned_type_node);
3043 size = convert_to_mode (size_mode, size, 1);
3044 size = copy_to_mode_reg (size_mode, size);
3046 /* It is incorrect to use the libcall calling conventions to call
3047 memset in this context. This could be a user call to memset and
3048 the user may wish to examine the return value from memset. For
3049 targets where libcalls and normal calls have different conventions
3050 for returning pointers, we could end up generating incorrect code.
3052 For convenience, we generate the call to bzero this way as well. */
3054 object_tree = make_tree (ptr_type_node, object);
3055 if (TARGET_MEM_FUNCTIONS)
3056 size_tree = make_tree (sizetype, size);
3057 else
3058 size_tree = make_tree (unsigned_type_node, size);
3060 fn = clear_storage_libcall_fn (true);
3061 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3062 if (TARGET_MEM_FUNCTIONS)
3063 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3064 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3066 /* Now we have to build up the CALL_EXPR itself. */
3067 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3068 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3069 call_expr, arg_list, NULL_TREE);
3070 TREE_SIDE_EFFECTS (call_expr) = 1;
3072 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3074 /* If we are initializing a readonly value, show the above call
3075 clobbered it. Otherwise, a load from it may erroneously be
3076 hoisted from a loop. */
3077 if (RTX_UNCHANGING_P (object))
3078 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3080 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3083 /* A subroutine of clear_storage_via_libcall. Create the tree node
3084 for the function we use for block clears. The first time FOR_CALL
3085 is true, we call assemble_external. */
3087 static GTY(()) tree block_clear_fn;
3089 static tree
3090 clear_storage_libcall_fn (for_call)
3091 int for_call;
3093 static bool emitted_extern;
3094 tree fn = block_clear_fn, args;
3096 if (!fn)
3098 if (TARGET_MEM_FUNCTIONS)
3100 fn = get_identifier ("memset");
3101 args = build_function_type_list (ptr_type_node, ptr_type_node,
3102 integer_type_node, sizetype,
3103 NULL_TREE);
3105 else
3107 fn = get_identifier ("bzero");
3108 args = build_function_type_list (void_type_node, ptr_type_node,
3109 unsigned_type_node, NULL_TREE);
3112 fn = build_decl (FUNCTION_DECL, fn, args);
3113 DECL_EXTERNAL (fn) = 1;
3114 TREE_PUBLIC (fn) = 1;
3115 DECL_ARTIFICIAL (fn) = 1;
3116 TREE_NOTHROW (fn) = 1;
3118 block_clear_fn = fn;
3121 if (for_call && !emitted_extern)
3123 emitted_extern = true;
3124 make_decl_rtl (fn, NULL);
3125 assemble_external (fn);
3128 return fn;
3131 /* Generate code to copy Y into X.
3132 Both Y and X must have the same mode, except that
3133 Y can be a constant with VOIDmode.
3134 This mode cannot be BLKmode; use emit_block_move for that.
3136 Return the last instruction emitted. */
3139 emit_move_insn (x, y)
3140 rtx x, y;
3142 enum machine_mode mode = GET_MODE (x);
3143 rtx y_cst = NULL_RTX;
3144 rtx last_insn;
3146 x = protect_from_queue (x, 1);
3147 y = protect_from_queue (y, 0);
3149 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3150 abort ();
3152 /* Never force constant_p_rtx to memory. */
3153 if (GET_CODE (y) == CONSTANT_P_RTX)
3155 else if (CONSTANT_P (y))
3157 if (optimize
3158 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3159 && (last_insn = compress_float_constant (x, y)))
3160 return last_insn;
3162 if (!LEGITIMATE_CONSTANT_P (y))
3164 y_cst = y;
3165 y = force_const_mem (mode, y);
3167 /* If the target's cannot_force_const_mem prevented the spill,
3168 assume that the target's move expanders will also take care
3169 of the non-legitimate constant. */
3170 if (!y)
3171 y = y_cst;
3175 /* If X or Y are memory references, verify that their addresses are valid
3176 for the machine. */
3177 if (GET_CODE (x) == MEM
3178 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3179 && ! push_operand (x, GET_MODE (x)))
3180 || (flag_force_addr
3181 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3182 x = validize_mem (x);
3184 if (GET_CODE (y) == MEM
3185 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3186 || (flag_force_addr
3187 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3188 y = validize_mem (y);
3190 if (mode == BLKmode)
3191 abort ();
3193 last_insn = emit_move_insn_1 (x, y);
3195 if (y_cst && GET_CODE (x) == REG)
3196 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3198 return last_insn;
3201 /* Low level part of emit_move_insn.
3202 Called just like emit_move_insn, but assumes X and Y
3203 are basically valid. */
3206 emit_move_insn_1 (x, y)
3207 rtx x, y;
3209 enum machine_mode mode = GET_MODE (x);
3210 enum machine_mode submode;
3211 enum mode_class class = GET_MODE_CLASS (mode);
3213 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3214 abort ();
3216 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3217 return
3218 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3220 /* Expand complex moves by moving real part and imag part, if possible. */
3221 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3222 && BLKmode != (submode = GET_MODE_INNER (mode))
3223 && (mov_optab->handlers[(int) submode].insn_code
3224 != CODE_FOR_nothing))
3226 /* Don't split destination if it is a stack push. */
3227 int stack = push_operand (x, GET_MODE (x));
3229 #ifdef PUSH_ROUNDING
3230 /* In case we output to the stack, but the size is smaller machine can
3231 push exactly, we need to use move instructions. */
3232 if (stack
3233 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3234 != GET_MODE_SIZE (submode)))
3236 rtx temp;
3237 HOST_WIDE_INT offset1, offset2;
3239 /* Do not use anti_adjust_stack, since we don't want to update
3240 stack_pointer_delta. */
3241 temp = expand_binop (Pmode,
3242 #ifdef STACK_GROWS_DOWNWARD
3243 sub_optab,
3244 #else
3245 add_optab,
3246 #endif
3247 stack_pointer_rtx,
3248 GEN_INT
3249 (PUSH_ROUNDING
3250 (GET_MODE_SIZE (GET_MODE (x)))),
3251 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3253 if (temp != stack_pointer_rtx)
3254 emit_move_insn (stack_pointer_rtx, temp);
3256 #ifdef STACK_GROWS_DOWNWARD
3257 offset1 = 0;
3258 offset2 = GET_MODE_SIZE (submode);
3259 #else
3260 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3261 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3262 + GET_MODE_SIZE (submode));
3263 #endif
3265 emit_move_insn (change_address (x, submode,
3266 gen_rtx_PLUS (Pmode,
3267 stack_pointer_rtx,
3268 GEN_INT (offset1))),
3269 gen_realpart (submode, y));
3270 emit_move_insn (change_address (x, submode,
3271 gen_rtx_PLUS (Pmode,
3272 stack_pointer_rtx,
3273 GEN_INT (offset2))),
3274 gen_imagpart (submode, y));
3276 else
3277 #endif
3278 /* If this is a stack, push the highpart first, so it
3279 will be in the argument order.
3281 In that case, change_address is used only to convert
3282 the mode, not to change the address. */
3283 if (stack)
3285 /* Note that the real part always precedes the imag part in memory
3286 regardless of machine's endianness. */
3287 #ifdef STACK_GROWS_DOWNWARD
3288 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3289 (gen_rtx_MEM (submode, XEXP (x, 0)),
3290 gen_imagpart (submode, y)));
3291 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3292 (gen_rtx_MEM (submode, XEXP (x, 0)),
3293 gen_realpart (submode, y)));
3294 #else
3295 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3296 (gen_rtx_MEM (submode, XEXP (x, 0)),
3297 gen_realpart (submode, y)));
3298 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3299 (gen_rtx_MEM (submode, XEXP (x, 0)),
3300 gen_imagpart (submode, y)));
3301 #endif
3303 else
3305 rtx realpart_x, realpart_y;
3306 rtx imagpart_x, imagpart_y;
3308 /* If this is a complex value with each part being smaller than a
3309 word, the usual calling sequence will likely pack the pieces into
3310 a single register. Unfortunately, SUBREG of hard registers only
3311 deals in terms of words, so we have a problem converting input
3312 arguments to the CONCAT of two registers that is used elsewhere
3313 for complex values. If this is before reload, we can copy it into
3314 memory and reload. FIXME, we should see about using extract and
3315 insert on integer registers, but complex short and complex char
3316 variables should be rarely used. */
3317 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3318 && (reload_in_progress | reload_completed) == 0)
3320 int packed_dest_p
3321 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3322 int packed_src_p
3323 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3325 if (packed_dest_p || packed_src_p)
3327 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3328 ? MODE_FLOAT : MODE_INT);
3330 enum machine_mode reg_mode
3331 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3333 if (reg_mode != BLKmode)
3335 rtx mem = assign_stack_temp (reg_mode,
3336 GET_MODE_SIZE (mode), 0);
3337 rtx cmem = adjust_address (mem, mode, 0);
3339 cfun->cannot_inline
3340 = N_("function using short complex types cannot be inline");
3342 if (packed_dest_p)
3344 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3346 emit_move_insn_1 (cmem, y);
3347 return emit_move_insn_1 (sreg, mem);
3349 else
3351 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3353 emit_move_insn_1 (mem, sreg);
3354 return emit_move_insn_1 (x, cmem);
3360 realpart_x = gen_realpart (submode, x);
3361 realpart_y = gen_realpart (submode, y);
3362 imagpart_x = gen_imagpart (submode, x);
3363 imagpart_y = gen_imagpart (submode, y);
3365 /* Show the output dies here. This is necessary for SUBREGs
3366 of pseudos since we cannot track their lifetimes correctly;
3367 hard regs shouldn't appear here except as return values.
3368 We never want to emit such a clobber after reload. */
3369 if (x != y
3370 && ! (reload_in_progress || reload_completed)
3371 && (GET_CODE (realpart_x) == SUBREG
3372 || GET_CODE (imagpart_x) == SUBREG))
3373 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3375 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3376 (realpart_x, realpart_y));
3377 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3378 (imagpart_x, imagpart_y));
3381 return get_last_insn ();
3384 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3385 find a mode to do it in. If we have a movcc, use it. Otherwise,
3386 find the MODE_INT mode of the same width. */
3387 else if (GET_MODE_CLASS (mode) == MODE_CC
3388 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3390 enum insn_code insn_code;
3391 enum machine_mode tmode = VOIDmode;
3392 rtx x1 = x, y1 = y;
3394 if (mode != CCmode
3395 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3396 tmode = CCmode;
3397 else
3398 for (tmode = QImode; tmode != VOIDmode;
3399 tmode = GET_MODE_WIDER_MODE (tmode))
3400 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3401 break;
3403 if (tmode == VOIDmode)
3404 abort ();
3406 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3407 may call change_address which is not appropriate if we were
3408 called when a reload was in progress. We don't have to worry
3409 about changing the address since the size in bytes is supposed to
3410 be the same. Copy the MEM to change the mode and move any
3411 substitutions from the old MEM to the new one. */
3413 if (reload_in_progress)
3415 x = gen_lowpart_common (tmode, x1);
3416 if (x == 0 && GET_CODE (x1) == MEM)
3418 x = adjust_address_nv (x1, tmode, 0);
3419 copy_replacements (x1, x);
3422 y = gen_lowpart_common (tmode, y1);
3423 if (y == 0 && GET_CODE (y1) == MEM)
3425 y = adjust_address_nv (y1, tmode, 0);
3426 copy_replacements (y1, y);
3429 else
3431 x = gen_lowpart (tmode, x);
3432 y = gen_lowpart (tmode, y);
3435 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3436 return emit_insn (GEN_FCN (insn_code) (x, y));
3439 /* This will handle any multi-word or full-word mode that lacks a move_insn
3440 pattern. However, you will get better code if you define such patterns,
3441 even if they must turn into multiple assembler instructions. */
3442 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3444 rtx last_insn = 0;
3445 rtx seq, inner;
3446 int need_clobber;
3447 int i;
3449 #ifdef PUSH_ROUNDING
3451 /* If X is a push on the stack, do the push now and replace
3452 X with a reference to the stack pointer. */
3453 if (push_operand (x, GET_MODE (x)))
3455 rtx temp;
3456 enum rtx_code code;
3458 /* Do not use anti_adjust_stack, since we don't want to update
3459 stack_pointer_delta. */
3460 temp = expand_binop (Pmode,
3461 #ifdef STACK_GROWS_DOWNWARD
3462 sub_optab,
3463 #else
3464 add_optab,
3465 #endif
3466 stack_pointer_rtx,
3467 GEN_INT
3468 (PUSH_ROUNDING
3469 (GET_MODE_SIZE (GET_MODE (x)))),
3470 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3472 if (temp != stack_pointer_rtx)
3473 emit_move_insn (stack_pointer_rtx, temp);
3475 code = GET_CODE (XEXP (x, 0));
3477 /* Just hope that small offsets off SP are OK. */
3478 if (code == POST_INC)
3479 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3480 GEN_INT (-((HOST_WIDE_INT)
3481 GET_MODE_SIZE (GET_MODE (x)))));
3482 else if (code == POST_DEC)
3483 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3484 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3485 else
3486 temp = stack_pointer_rtx;
3488 x = change_address (x, VOIDmode, temp);
3490 #endif
3492 /* If we are in reload, see if either operand is a MEM whose address
3493 is scheduled for replacement. */
3494 if (reload_in_progress && GET_CODE (x) == MEM
3495 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3496 x = replace_equiv_address_nv (x, inner);
3497 if (reload_in_progress && GET_CODE (y) == MEM
3498 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3499 y = replace_equiv_address_nv (y, inner);
3501 start_sequence ();
3503 need_clobber = 0;
3504 for (i = 0;
3505 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3506 i++)
3508 rtx xpart = operand_subword (x, i, 1, mode);
3509 rtx ypart = operand_subword (y, i, 1, mode);
3511 /* If we can't get a part of Y, put Y into memory if it is a
3512 constant. Otherwise, force it into a register. If we still
3513 can't get a part of Y, abort. */
3514 if (ypart == 0 && CONSTANT_P (y))
3516 y = force_const_mem (mode, y);
3517 ypart = operand_subword (y, i, 1, mode);
3519 else if (ypart == 0)
3520 ypart = operand_subword_force (y, i, mode);
3522 if (xpart == 0 || ypart == 0)
3523 abort ();
3525 need_clobber |= (GET_CODE (xpart) == SUBREG);
3527 last_insn = emit_move_insn (xpart, ypart);
3530 seq = get_insns ();
3531 end_sequence ();
3533 /* Show the output dies here. This is necessary for SUBREGs
3534 of pseudos since we cannot track their lifetimes correctly;
3535 hard regs shouldn't appear here except as return values.
3536 We never want to emit such a clobber after reload. */
3537 if (x != y
3538 && ! (reload_in_progress || reload_completed)
3539 && need_clobber != 0)
3540 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3542 emit_insn (seq);
3544 return last_insn;
3546 else
3547 abort ();
3550 /* If Y is representable exactly in a narrower mode, and the target can
3551 perform the extension directly from constant or memory, then emit the
3552 move as an extension. */
3554 static rtx
3555 compress_float_constant (x, y)
3556 rtx x, y;
3558 enum machine_mode dstmode = GET_MODE (x);
3559 enum machine_mode orig_srcmode = GET_MODE (y);
3560 enum machine_mode srcmode;
3561 REAL_VALUE_TYPE r;
3563 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3565 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3566 srcmode != orig_srcmode;
3567 srcmode = GET_MODE_WIDER_MODE (srcmode))
3569 enum insn_code ic;
3570 rtx trunc_y, last_insn;
3572 /* Skip if the target can't extend this way. */
3573 ic = can_extend_p (dstmode, srcmode, 0);
3574 if (ic == CODE_FOR_nothing)
3575 continue;
3577 /* Skip if the narrowed value isn't exact. */
3578 if (! exact_real_truncate (srcmode, &r))
3579 continue;
3581 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3583 if (LEGITIMATE_CONSTANT_P (trunc_y))
3585 /* Skip if the target needs extra instructions to perform
3586 the extension. */
3587 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3588 continue;
3590 else if (float_extend_from_mem[dstmode][srcmode])
3591 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3592 else
3593 continue;
3595 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3596 last_insn = get_last_insn ();
3598 if (GET_CODE (x) == REG)
3599 REG_NOTES (last_insn)
3600 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3602 return last_insn;
3605 return NULL_RTX;
3608 /* Pushing data onto the stack. */
3610 /* Push a block of length SIZE (perhaps variable)
3611 and return an rtx to address the beginning of the block.
3612 Note that it is not possible for the value returned to be a QUEUED.
3613 The value may be virtual_outgoing_args_rtx.
3615 EXTRA is the number of bytes of padding to push in addition to SIZE.
3616 BELOW nonzero means this padding comes at low addresses;
3617 otherwise, the padding comes at high addresses. */
3620 push_block (size, extra, below)
3621 rtx size;
3622 int extra, below;
3624 rtx temp;
3626 size = convert_modes (Pmode, ptr_mode, size, 1);
3627 if (CONSTANT_P (size))
3628 anti_adjust_stack (plus_constant (size, extra));
3629 else if (GET_CODE (size) == REG && extra == 0)
3630 anti_adjust_stack (size);
3631 else
3633 temp = copy_to_mode_reg (Pmode, size);
3634 if (extra != 0)
3635 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3636 temp, 0, OPTAB_LIB_WIDEN);
3637 anti_adjust_stack (temp);
3640 #ifndef STACK_GROWS_DOWNWARD
3641 if (0)
3642 #else
3643 if (1)
3644 #endif
3646 temp = virtual_outgoing_args_rtx;
3647 if (extra != 0 && below)
3648 temp = plus_constant (temp, extra);
3650 else
3652 if (GET_CODE (size) == CONST_INT)
3653 temp = plus_constant (virtual_outgoing_args_rtx,
3654 -INTVAL (size) - (below ? 0 : extra));
3655 else if (extra != 0 && !below)
3656 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3657 negate_rtx (Pmode, plus_constant (size, extra)));
3658 else
3659 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3660 negate_rtx (Pmode, size));
3663 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3666 #ifdef PUSH_ROUNDING
3668 /* Emit single push insn. */
3670 static void
3671 emit_single_push_insn (mode, x, type)
3672 rtx x;
3673 enum machine_mode mode;
3674 tree type;
3676 rtx dest_addr;
3677 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3678 rtx dest;
3679 enum insn_code icode;
3680 insn_operand_predicate_fn pred;
3682 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3683 /* If there is push pattern, use it. Otherwise try old way of throwing
3684 MEM representing push operation to move expander. */
3685 icode = push_optab->handlers[(int) mode].insn_code;
3686 if (icode != CODE_FOR_nothing)
3688 if (((pred = insn_data[(int) icode].operand[0].predicate)
3689 && !((*pred) (x, mode))))
3690 x = force_reg (mode, x);
3691 emit_insn (GEN_FCN (icode) (x));
3692 return;
3694 if (GET_MODE_SIZE (mode) == rounded_size)
3695 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3696 else
3698 #ifdef STACK_GROWS_DOWNWARD
3699 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3700 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3701 #else
3702 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3703 GEN_INT (rounded_size));
3704 #endif
3705 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3708 dest = gen_rtx_MEM (mode, dest_addr);
3710 if (type != 0)
3712 set_mem_attributes (dest, type, 1);
3714 if (flag_optimize_sibling_calls)
3715 /* Function incoming arguments may overlap with sibling call
3716 outgoing arguments and we cannot allow reordering of reads
3717 from function arguments with stores to outgoing arguments
3718 of sibling calls. */
3719 set_mem_alias_set (dest, 0);
3721 emit_move_insn (dest, x);
3723 #endif
3725 /* Generate code to push X onto the stack, assuming it has mode MODE and
3726 type TYPE.
3727 MODE is redundant except when X is a CONST_INT (since they don't
3728 carry mode info).
3729 SIZE is an rtx for the size of data to be copied (in bytes),
3730 needed only if X is BLKmode.
3732 ALIGN (in bits) is maximum alignment we can assume.
3734 If PARTIAL and REG are both nonzero, then copy that many of the first
3735 words of X into registers starting with REG, and push the rest of X.
3736 The amount of space pushed is decreased by PARTIAL words,
3737 rounded *down* to a multiple of PARM_BOUNDARY.
3738 REG must be a hard register in this case.
3739 If REG is zero but PARTIAL is not, take any all others actions for an
3740 argument partially in registers, but do not actually load any
3741 registers.
3743 EXTRA is the amount in bytes of extra space to leave next to this arg.
3744 This is ignored if an argument block has already been allocated.
3746 On a machine that lacks real push insns, ARGS_ADDR is the address of
3747 the bottom of the argument block for this call. We use indexing off there
3748 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3749 argument block has not been preallocated.
3751 ARGS_SO_FAR is the size of args previously pushed for this call.
3753 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3754 for arguments passed in registers. If nonzero, it will be the number
3755 of bytes required. */
3757 void
3758 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3759 args_addr, args_so_far, reg_parm_stack_space,
3760 alignment_pad)
3761 rtx x;
3762 enum machine_mode mode;
3763 tree type;
3764 rtx size;
3765 unsigned int align;
3766 int partial;
3767 rtx reg;
3768 int extra;
3769 rtx args_addr;
3770 rtx args_so_far;
3771 int reg_parm_stack_space;
3772 rtx alignment_pad;
3774 rtx xinner;
3775 enum direction stack_direction
3776 #ifdef STACK_GROWS_DOWNWARD
3777 = downward;
3778 #else
3779 = upward;
3780 #endif
3782 /* Decide where to pad the argument: `downward' for below,
3783 `upward' for above, or `none' for don't pad it.
3784 Default is below for small data on big-endian machines; else above. */
3785 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3787 /* Invert direction if stack is post-decrement.
3788 FIXME: why? */
3789 if (STACK_PUSH_CODE == POST_DEC)
3790 if (where_pad != none)
3791 where_pad = (where_pad == downward ? upward : downward);
3793 xinner = x = protect_from_queue (x, 0);
3795 if (mode == BLKmode)
3797 /* Copy a block into the stack, entirely or partially. */
3799 rtx temp;
3800 int used = partial * UNITS_PER_WORD;
3801 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3802 int skip;
3804 if (size == 0)
3805 abort ();
3807 used -= offset;
3809 /* USED is now the # of bytes we need not copy to the stack
3810 because registers will take care of them. */
3812 if (partial != 0)
3813 xinner = adjust_address (xinner, BLKmode, used);
3815 /* If the partial register-part of the arg counts in its stack size,
3816 skip the part of stack space corresponding to the registers.
3817 Otherwise, start copying to the beginning of the stack space,
3818 by setting SKIP to 0. */
3819 skip = (reg_parm_stack_space == 0) ? 0 : used;
3821 #ifdef PUSH_ROUNDING
3822 /* Do it with several push insns if that doesn't take lots of insns
3823 and if there is no difficulty with push insns that skip bytes
3824 on the stack for alignment purposes. */
3825 if (args_addr == 0
3826 && PUSH_ARGS
3827 && GET_CODE (size) == CONST_INT
3828 && skip == 0
3829 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3830 /* Here we avoid the case of a structure whose weak alignment
3831 forces many pushes of a small amount of data,
3832 and such small pushes do rounding that causes trouble. */
3833 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3834 || align >= BIGGEST_ALIGNMENT
3835 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3836 == (align / BITS_PER_UNIT)))
3837 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3839 /* Push padding now if padding above and stack grows down,
3840 or if padding below and stack grows up.
3841 But if space already allocated, this has already been done. */
3842 if (extra && args_addr == 0
3843 && where_pad != none && where_pad != stack_direction)
3844 anti_adjust_stack (GEN_INT (extra));
3846 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3848 else
3849 #endif /* PUSH_ROUNDING */
3851 rtx target;
3853 /* Otherwise make space on the stack and copy the data
3854 to the address of that space. */
3856 /* Deduct words put into registers from the size we must copy. */
3857 if (partial != 0)
3859 if (GET_CODE (size) == CONST_INT)
3860 size = GEN_INT (INTVAL (size) - used);
3861 else
3862 size = expand_binop (GET_MODE (size), sub_optab, size,
3863 GEN_INT (used), NULL_RTX, 0,
3864 OPTAB_LIB_WIDEN);
3867 /* Get the address of the stack space.
3868 In this case, we do not deal with EXTRA separately.
3869 A single stack adjust will do. */
3870 if (! args_addr)
3872 temp = push_block (size, extra, where_pad == downward);
3873 extra = 0;
3875 else if (GET_CODE (args_so_far) == CONST_INT)
3876 temp = memory_address (BLKmode,
3877 plus_constant (args_addr,
3878 skip + INTVAL (args_so_far)));
3879 else
3880 temp = memory_address (BLKmode,
3881 plus_constant (gen_rtx_PLUS (Pmode,
3882 args_addr,
3883 args_so_far),
3884 skip));
3886 if (!ACCUMULATE_OUTGOING_ARGS)
3888 /* If the source is referenced relative to the stack pointer,
3889 copy it to another register to stabilize it. We do not need
3890 to do this if we know that we won't be changing sp. */
3892 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3893 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3894 temp = copy_to_reg (temp);
3897 target = gen_rtx_MEM (BLKmode, temp);
3899 if (type != 0)
3901 set_mem_attributes (target, type, 1);
3902 /* Function incoming arguments may overlap with sibling call
3903 outgoing arguments and we cannot allow reordering of reads
3904 from function arguments with stores to outgoing arguments
3905 of sibling calls. */
3906 set_mem_alias_set (target, 0);
3909 /* ALIGN may well be better aligned than TYPE, e.g. due to
3910 PARM_BOUNDARY. Assume the caller isn't lying. */
3911 set_mem_align (target, align);
3913 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3916 else if (partial > 0)
3918 /* Scalar partly in registers. */
3920 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3921 int i;
3922 int not_stack;
3923 /* # words of start of argument
3924 that we must make space for but need not store. */
3925 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3926 int args_offset = INTVAL (args_so_far);
3927 int skip;
3929 /* Push padding now if padding above and stack grows down,
3930 or if padding below and stack grows up.
3931 But if space already allocated, this has already been done. */
3932 if (extra && args_addr == 0
3933 && where_pad != none && where_pad != stack_direction)
3934 anti_adjust_stack (GEN_INT (extra));
3936 /* If we make space by pushing it, we might as well push
3937 the real data. Otherwise, we can leave OFFSET nonzero
3938 and leave the space uninitialized. */
3939 if (args_addr == 0)
3940 offset = 0;
3942 /* Now NOT_STACK gets the number of words that we don't need to
3943 allocate on the stack. */
3944 not_stack = partial - offset;
3946 /* If the partial register-part of the arg counts in its stack size,
3947 skip the part of stack space corresponding to the registers.
3948 Otherwise, start copying to the beginning of the stack space,
3949 by setting SKIP to 0. */
3950 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3952 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3953 x = validize_mem (force_const_mem (mode, x));
3955 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3956 SUBREGs of such registers are not allowed. */
3957 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3958 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3959 x = copy_to_reg (x);
3961 /* Loop over all the words allocated on the stack for this arg. */
3962 /* We can do it by words, because any scalar bigger than a word
3963 has a size a multiple of a word. */
3964 #ifndef PUSH_ARGS_REVERSED
3965 for (i = not_stack; i < size; i++)
3966 #else
3967 for (i = size - 1; i >= not_stack; i--)
3968 #endif
3969 if (i >= not_stack + offset)
3970 emit_push_insn (operand_subword_force (x, i, mode),
3971 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3972 0, args_addr,
3973 GEN_INT (args_offset + ((i - not_stack + skip)
3974 * UNITS_PER_WORD)),
3975 reg_parm_stack_space, alignment_pad);
3977 else
3979 rtx addr;
3980 rtx dest;
3982 /* Push padding now if padding above and stack grows down,
3983 or if padding below and stack grows up.
3984 But if space already allocated, this has already been done. */
3985 if (extra && args_addr == 0
3986 && where_pad != none && where_pad != stack_direction)
3987 anti_adjust_stack (GEN_INT (extra));
3989 #ifdef PUSH_ROUNDING
3990 if (args_addr == 0 && PUSH_ARGS)
3991 emit_single_push_insn (mode, x, type);
3992 else
3993 #endif
3995 if (GET_CODE (args_so_far) == CONST_INT)
3996 addr
3997 = memory_address (mode,
3998 plus_constant (args_addr,
3999 INTVAL (args_so_far)));
4000 else
4001 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4002 args_so_far));
4003 dest = gen_rtx_MEM (mode, addr);
4004 if (type != 0)
4006 set_mem_attributes (dest, type, 1);
4007 /* Function incoming arguments may overlap with sibling call
4008 outgoing arguments and we cannot allow reordering of reads
4009 from function arguments with stores to outgoing arguments
4010 of sibling calls. */
4011 set_mem_alias_set (dest, 0);
4014 emit_move_insn (dest, x);
4018 /* If part should go in registers, copy that part
4019 into the appropriate registers. Do this now, at the end,
4020 since mem-to-mem copies above may do function calls. */
4021 if (partial > 0 && reg != 0)
4023 /* Handle calls that pass values in multiple non-contiguous locations.
4024 The Irix 6 ABI has examples of this. */
4025 if (GET_CODE (reg) == PARALLEL)
4026 emit_group_load (reg, x, -1); /* ??? size? */
4027 else
4028 move_block_to_reg (REGNO (reg), x, partial, mode);
4031 if (extra && args_addr == 0 && where_pad == stack_direction)
4032 anti_adjust_stack (GEN_INT (extra));
4034 if (alignment_pad && args_addr == 0)
4035 anti_adjust_stack (alignment_pad);
4038 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4039 operations. */
4041 static rtx
4042 get_subtarget (x)
4043 rtx x;
4045 return ((x == 0
4046 /* Only registers can be subtargets. */
4047 || GET_CODE (x) != REG
4048 /* If the register is readonly, it can't be set more than once. */
4049 || RTX_UNCHANGING_P (x)
4050 /* Don't use hard regs to avoid extending their life. */
4051 || REGNO (x) < FIRST_PSEUDO_REGISTER
4052 /* Avoid subtargets inside loops,
4053 since they hide some invariant expressions. */
4054 || preserve_subexpressions_p ())
4055 ? 0 : x);
4058 /* Expand an assignment that stores the value of FROM into TO.
4059 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4060 (This may contain a QUEUED rtx;
4061 if the value is constant, this rtx is a constant.)
4062 Otherwise, the returned value is NULL_RTX.
4064 SUGGEST_REG is no longer actually used.
4065 It used to mean, copy the value through a register
4066 and return that register, if that is possible.
4067 We now use WANT_VALUE to decide whether to do this. */
4070 expand_assignment (to, from, want_value, suggest_reg)
4071 tree to, from;
4072 int want_value;
4073 int suggest_reg ATTRIBUTE_UNUSED;
4075 rtx to_rtx = 0;
4076 rtx result;
4078 /* Don't crash if the lhs of the assignment was erroneous. */
4080 if (TREE_CODE (to) == ERROR_MARK)
4082 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4083 return want_value ? result : NULL_RTX;
4086 /* Assignment of a structure component needs special treatment
4087 if the structure component's rtx is not simply a MEM.
4088 Assignment of an array element at a constant index, and assignment of
4089 an array element in an unaligned packed structure field, has the same
4090 problem. */
4092 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4093 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4094 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4096 enum machine_mode mode1;
4097 HOST_WIDE_INT bitsize, bitpos;
4098 rtx orig_to_rtx;
4099 tree offset;
4100 int unsignedp;
4101 int volatilep = 0;
4102 tree tem;
4104 push_temp_slots ();
4105 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4106 &unsignedp, &volatilep);
4108 /* If we are going to use store_bit_field and extract_bit_field,
4109 make sure to_rtx will be safe for multiple use. */
4111 if (mode1 == VOIDmode && want_value)
4112 tem = stabilize_reference (tem);
4114 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4116 if (offset != 0)
4118 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4120 if (GET_CODE (to_rtx) != MEM)
4121 abort ();
4123 #ifdef POINTERS_EXTEND_UNSIGNED
4124 if (GET_MODE (offset_rtx) != Pmode)
4125 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4126 #else
4127 if (GET_MODE (offset_rtx) != ptr_mode)
4128 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4129 #endif
4131 /* A constant address in TO_RTX can have VOIDmode, we must not try
4132 to call force_reg for that case. Avoid that case. */
4133 if (GET_CODE (to_rtx) == MEM
4134 && GET_MODE (to_rtx) == BLKmode
4135 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4136 && bitsize > 0
4137 && (bitpos % bitsize) == 0
4138 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4139 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4141 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4142 bitpos = 0;
4145 to_rtx = offset_address (to_rtx, offset_rtx,
4146 highest_pow2_factor_for_type (TREE_TYPE (to),
4147 offset));
4150 if (GET_CODE (to_rtx) == MEM)
4152 /* If the field is at offset zero, we could have been given the
4153 DECL_RTX of the parent struct. Don't munge it. */
4154 to_rtx = shallow_copy_rtx (to_rtx);
4156 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4159 /* Deal with volatile and readonly fields. The former is only done
4160 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4161 if (volatilep && GET_CODE (to_rtx) == MEM)
4163 if (to_rtx == orig_to_rtx)
4164 to_rtx = copy_rtx (to_rtx);
4165 MEM_VOLATILE_P (to_rtx) = 1;
4168 if (TREE_CODE (to) == COMPONENT_REF
4169 && TREE_READONLY (TREE_OPERAND (to, 1)))
4171 if (to_rtx == orig_to_rtx)
4172 to_rtx = copy_rtx (to_rtx);
4173 RTX_UNCHANGING_P (to_rtx) = 1;
4176 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4178 if (to_rtx == orig_to_rtx)
4179 to_rtx = copy_rtx (to_rtx);
4180 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4183 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4184 (want_value
4185 /* Spurious cast for HPUX compiler. */
4186 ? ((enum machine_mode)
4187 TYPE_MODE (TREE_TYPE (to)))
4188 : VOIDmode),
4189 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4191 preserve_temp_slots (result);
4192 free_temp_slots ();
4193 pop_temp_slots ();
4195 /* If the value is meaningful, convert RESULT to the proper mode.
4196 Otherwise, return nothing. */
4197 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4198 TYPE_MODE (TREE_TYPE (from)),
4199 result,
4200 TREE_UNSIGNED (TREE_TYPE (to)))
4201 : NULL_RTX);
4204 /* If the rhs is a function call and its value is not an aggregate,
4205 call the function before we start to compute the lhs.
4206 This is needed for correct code for cases such as
4207 val = setjmp (buf) on machines where reference to val
4208 requires loading up part of an address in a separate insn.
4210 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4211 since it might be a promoted variable where the zero- or sign- extension
4212 needs to be done. Handling this in the normal way is safe because no
4213 computation is done before the call. */
4214 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4215 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4216 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4217 && GET_CODE (DECL_RTL (to)) == REG))
4219 rtx value;
4221 push_temp_slots ();
4222 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4223 if (to_rtx == 0)
4224 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4226 /* Handle calls that return values in multiple non-contiguous locations.
4227 The Irix 6 ABI has examples of this. */
4228 if (GET_CODE (to_rtx) == PARALLEL)
4229 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4230 else if (GET_MODE (to_rtx) == BLKmode)
4231 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4232 else
4234 #ifdef POINTERS_EXTEND_UNSIGNED
4235 if (POINTER_TYPE_P (TREE_TYPE (to))
4236 && GET_MODE (to_rtx) != GET_MODE (value))
4237 value = convert_memory_address (GET_MODE (to_rtx), value);
4238 #endif
4239 emit_move_insn (to_rtx, value);
4241 preserve_temp_slots (to_rtx);
4242 free_temp_slots ();
4243 pop_temp_slots ();
4244 return want_value ? to_rtx : NULL_RTX;
4247 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4248 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4250 if (to_rtx == 0)
4251 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4253 /* Don't move directly into a return register. */
4254 if (TREE_CODE (to) == RESULT_DECL
4255 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4257 rtx temp;
4259 push_temp_slots ();
4260 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4262 if (GET_CODE (to_rtx) == PARALLEL)
4263 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4264 else
4265 emit_move_insn (to_rtx, temp);
4267 preserve_temp_slots (to_rtx);
4268 free_temp_slots ();
4269 pop_temp_slots ();
4270 return want_value ? to_rtx : NULL_RTX;
4273 /* In case we are returning the contents of an object which overlaps
4274 the place the value is being stored, use a safe function when copying
4275 a value through a pointer into a structure value return block. */
4276 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4277 && current_function_returns_struct
4278 && !current_function_returns_pcc_struct)
4280 rtx from_rtx, size;
4282 push_temp_slots ();
4283 size = expr_size (from);
4284 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4286 if (TARGET_MEM_FUNCTIONS)
4287 emit_library_call (memmove_libfunc, LCT_NORMAL,
4288 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4289 XEXP (from_rtx, 0), Pmode,
4290 convert_to_mode (TYPE_MODE (sizetype),
4291 size, TREE_UNSIGNED (sizetype)),
4292 TYPE_MODE (sizetype));
4293 else
4294 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4295 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4296 XEXP (to_rtx, 0), Pmode,
4297 convert_to_mode (TYPE_MODE (integer_type_node),
4298 size,
4299 TREE_UNSIGNED (integer_type_node)),
4300 TYPE_MODE (integer_type_node));
4302 preserve_temp_slots (to_rtx);
4303 free_temp_slots ();
4304 pop_temp_slots ();
4305 return want_value ? to_rtx : NULL_RTX;
4308 /* Compute FROM and store the value in the rtx we got. */
4310 push_temp_slots ();
4311 result = store_expr (from, to_rtx, want_value);
4312 preserve_temp_slots (result);
4313 free_temp_slots ();
4314 pop_temp_slots ();
4315 return want_value ? result : NULL_RTX;
4318 /* Generate code for computing expression EXP,
4319 and storing the value into TARGET.
4320 TARGET may contain a QUEUED rtx.
4322 If WANT_VALUE & 1 is nonzero, return a copy of the value
4323 not in TARGET, so that we can be sure to use the proper
4324 value in a containing expression even if TARGET has something
4325 else stored in it. If possible, we copy the value through a pseudo
4326 and return that pseudo. Or, if the value is constant, we try to
4327 return the constant. In some cases, we return a pseudo
4328 copied *from* TARGET.
4330 If the mode is BLKmode then we may return TARGET itself.
4331 It turns out that in BLKmode it doesn't cause a problem.
4332 because C has no operators that could combine two different
4333 assignments into the same BLKmode object with different values
4334 with no sequence point. Will other languages need this to
4335 be more thorough?
4337 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4338 to catch quickly any cases where the caller uses the value
4339 and fails to set WANT_VALUE.
4341 If WANT_VALUE & 2 is set, this is a store into a call param on the
4342 stack, and block moves may need to be treated specially. */
4345 store_expr (exp, target, want_value)
4346 tree exp;
4347 rtx target;
4348 int want_value;
4350 rtx temp;
4351 int dont_return_target = 0;
4352 int dont_store_target = 0;
4354 if (VOID_TYPE_P (TREE_TYPE (exp)))
4356 /* C++ can generate ?: expressions with a throw expression in one
4357 branch and an rvalue in the other. Here, we resolve attempts to
4358 store the throw expression's nonexistant result. */
4359 if (want_value)
4360 abort ();
4361 expand_expr (exp, const0_rtx, VOIDmode, 0);
4362 return NULL_RTX;
4364 if (TREE_CODE (exp) == COMPOUND_EXPR)
4366 /* Perform first part of compound expression, then assign from second
4367 part. */
4368 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4369 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4370 emit_queue ();
4371 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4373 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4375 /* For conditional expression, get safe form of the target. Then
4376 test the condition, doing the appropriate assignment on either
4377 side. This avoids the creation of unnecessary temporaries.
4378 For non-BLKmode, it is more efficient not to do this. */
4380 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4382 emit_queue ();
4383 target = protect_from_queue (target, 1);
4385 do_pending_stack_adjust ();
4386 NO_DEFER_POP;
4387 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4388 start_cleanup_deferral ();
4389 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4390 end_cleanup_deferral ();
4391 emit_queue ();
4392 emit_jump_insn (gen_jump (lab2));
4393 emit_barrier ();
4394 emit_label (lab1);
4395 start_cleanup_deferral ();
4396 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4397 end_cleanup_deferral ();
4398 emit_queue ();
4399 emit_label (lab2);
4400 OK_DEFER_POP;
4402 return want_value & 1 ? target : NULL_RTX;
4404 else if (queued_subexp_p (target))
4405 /* If target contains a postincrement, let's not risk
4406 using it as the place to generate the rhs. */
4408 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4410 /* Expand EXP into a new pseudo. */
4411 temp = gen_reg_rtx (GET_MODE (target));
4412 temp = expand_expr (exp, temp, GET_MODE (target),
4413 (want_value & 2
4414 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4416 else
4417 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4418 (want_value & 2
4419 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4421 /* If target is volatile, ANSI requires accessing the value
4422 *from* the target, if it is accessed. So make that happen.
4423 In no case return the target itself. */
4424 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4425 dont_return_target = 1;
4427 else if ((want_value & 1) != 0
4428 && GET_CODE (target) == MEM
4429 && ! MEM_VOLATILE_P (target)
4430 && GET_MODE (target) != BLKmode)
4431 /* If target is in memory and caller wants value in a register instead,
4432 arrange that. Pass TARGET as target for expand_expr so that,
4433 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4434 We know expand_expr will not use the target in that case.
4435 Don't do this if TARGET is volatile because we are supposed
4436 to write it and then read it. */
4438 temp = expand_expr (exp, target, GET_MODE (target),
4439 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4440 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4442 /* If TEMP is already in the desired TARGET, only copy it from
4443 memory and don't store it there again. */
4444 if (temp == target
4445 || (rtx_equal_p (temp, target)
4446 && ! side_effects_p (temp) && ! side_effects_p (target)))
4447 dont_store_target = 1;
4448 temp = copy_to_reg (temp);
4450 dont_return_target = 1;
4452 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4453 /* If this is a scalar in a register that is stored in a wider mode
4454 than the declared mode, compute the result into its declared mode
4455 and then convert to the wider mode. Our value is the computed
4456 expression. */
4458 rtx inner_target = 0;
4460 /* If we don't want a value, we can do the conversion inside EXP,
4461 which will often result in some optimizations. Do the conversion
4462 in two steps: first change the signedness, if needed, then
4463 the extend. But don't do this if the type of EXP is a subtype
4464 of something else since then the conversion might involve
4465 more than just converting modes. */
4466 if ((want_value & 1) == 0
4467 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4468 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4470 if (TREE_UNSIGNED (TREE_TYPE (exp))
4471 != SUBREG_PROMOTED_UNSIGNED_P (target))
4472 exp = convert
4473 ((*lang_hooks.types.signed_or_unsigned_type)
4474 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4476 exp = convert ((*lang_hooks.types.type_for_mode)
4477 (GET_MODE (SUBREG_REG (target)),
4478 SUBREG_PROMOTED_UNSIGNED_P (target)),
4479 exp);
4481 inner_target = SUBREG_REG (target);
4484 temp = expand_expr (exp, inner_target, VOIDmode,
4485 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4487 /* If TEMP is a MEM and we want a result value, make the access
4488 now so it gets done only once. Strictly speaking, this is
4489 only necessary if the MEM is volatile, or if the address
4490 overlaps TARGET. But not performing the load twice also
4491 reduces the amount of rtl we generate and then have to CSE. */
4492 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4493 temp = copy_to_reg (temp);
4495 /* If TEMP is a VOIDmode constant, use convert_modes to make
4496 sure that we properly convert it. */
4497 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4499 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4500 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4501 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4502 GET_MODE (target), temp,
4503 SUBREG_PROMOTED_UNSIGNED_P (target));
4506 convert_move (SUBREG_REG (target), temp,
4507 SUBREG_PROMOTED_UNSIGNED_P (target));
4509 /* If we promoted a constant, change the mode back down to match
4510 target. Otherwise, the caller might get confused by a result whose
4511 mode is larger than expected. */
4513 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4515 if (GET_MODE (temp) != VOIDmode)
4517 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4518 SUBREG_PROMOTED_VAR_P (temp) = 1;
4519 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4520 SUBREG_PROMOTED_UNSIGNED_P (target));
4522 else
4523 temp = convert_modes (GET_MODE (target),
4524 GET_MODE (SUBREG_REG (target)),
4525 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4528 return want_value & 1 ? temp : NULL_RTX;
4530 else
4532 temp = expand_expr (exp, target, GET_MODE (target),
4533 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4534 /* Return TARGET if it's a specified hardware register.
4535 If TARGET is a volatile mem ref, either return TARGET
4536 or return a reg copied *from* TARGET; ANSI requires this.
4538 Otherwise, if TEMP is not TARGET, return TEMP
4539 if it is constant (for efficiency),
4540 or if we really want the correct value. */
4541 if (!(target && GET_CODE (target) == REG
4542 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4543 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4544 && ! rtx_equal_p (temp, target)
4545 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4546 dont_return_target = 1;
4549 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4550 the same as that of TARGET, adjust the constant. This is needed, for
4551 example, in case it is a CONST_DOUBLE and we want only a word-sized
4552 value. */
4553 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4554 && TREE_CODE (exp) != ERROR_MARK
4555 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4556 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4557 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4559 /* If value was not generated in the target, store it there.
4560 Convert the value to TARGET's type first if necessary.
4561 If TEMP and TARGET compare equal according to rtx_equal_p, but
4562 one or both of them are volatile memory refs, we have to distinguish
4563 two cases:
4564 - expand_expr has used TARGET. In this case, we must not generate
4565 another copy. This can be detected by TARGET being equal according
4566 to == .
4567 - expand_expr has not used TARGET - that means that the source just
4568 happens to have the same RTX form. Since temp will have been created
4569 by expand_expr, it will compare unequal according to == .
4570 We must generate a copy in this case, to reach the correct number
4571 of volatile memory references. */
4573 if ((! rtx_equal_p (temp, target)
4574 || (temp != target && (side_effects_p (temp)
4575 || side_effects_p (target))))
4576 && TREE_CODE (exp) != ERROR_MARK
4577 && ! dont_store_target
4578 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4579 but TARGET is not valid memory reference, TEMP will differ
4580 from TARGET although it is really the same location. */
4581 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4582 || target != DECL_RTL_IF_SET (exp))
4583 /* If there's nothing to copy, don't bother. Don't call expr_size
4584 unless necessary, because some front-ends (C++) expr_size-hook
4585 aborts on objects that are not supposed to be bit-copied or
4586 bit-initialized. */
4587 && expr_size (exp) != const0_rtx)
4589 target = protect_from_queue (target, 1);
4590 if (GET_MODE (temp) != GET_MODE (target)
4591 && GET_MODE (temp) != VOIDmode)
4593 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4594 if (dont_return_target)
4596 /* In this case, we will return TEMP,
4597 so make sure it has the proper mode.
4598 But don't forget to store the value into TARGET. */
4599 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4600 emit_move_insn (target, temp);
4602 else
4603 convert_move (target, temp, unsignedp);
4606 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4608 /* Handle copying a string constant into an array. The string
4609 constant may be shorter than the array. So copy just the string's
4610 actual length, and clear the rest. First get the size of the data
4611 type of the string, which is actually the size of the target. */
4612 rtx size = expr_size (exp);
4614 if (GET_CODE (size) == CONST_INT
4615 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4616 emit_block_move (target, temp, size,
4617 (want_value & 2
4618 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4619 else
4621 /* Compute the size of the data to copy from the string. */
4622 tree copy_size
4623 = size_binop (MIN_EXPR,
4624 make_tree (sizetype, size),
4625 size_int (TREE_STRING_LENGTH (exp)));
4626 rtx copy_size_rtx
4627 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4628 (want_value & 2
4629 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4630 rtx label = 0;
4632 /* Copy that much. */
4633 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4634 TREE_UNSIGNED (sizetype));
4635 emit_block_move (target, temp, copy_size_rtx,
4636 (want_value & 2
4637 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4639 /* Figure out how much is left in TARGET that we have to clear.
4640 Do all calculations in ptr_mode. */
4641 if (GET_CODE (copy_size_rtx) == CONST_INT)
4643 size = plus_constant (size, -INTVAL (copy_size_rtx));
4644 target = adjust_address (target, BLKmode,
4645 INTVAL (copy_size_rtx));
4647 else
4649 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4650 copy_size_rtx, NULL_RTX, 0,
4651 OPTAB_LIB_WIDEN);
4653 #ifdef POINTERS_EXTEND_UNSIGNED
4654 if (GET_MODE (copy_size_rtx) != Pmode)
4655 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4656 TREE_UNSIGNED (sizetype));
4657 #endif
4659 target = offset_address (target, copy_size_rtx,
4660 highest_pow2_factor (copy_size));
4661 label = gen_label_rtx ();
4662 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4663 GET_MODE (size), 0, label);
4666 if (size != const0_rtx)
4667 clear_storage (target, size);
4669 if (label)
4670 emit_label (label);
4673 /* Handle calls that return values in multiple non-contiguous locations.
4674 The Irix 6 ABI has examples of this. */
4675 else if (GET_CODE (target) == PARALLEL)
4676 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4677 else if (GET_MODE (temp) == BLKmode)
4678 emit_block_move (target, temp, expr_size (exp),
4679 (want_value & 2
4680 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4681 else
4682 emit_move_insn (target, temp);
4685 /* If we don't want a value, return NULL_RTX. */
4686 if ((want_value & 1) == 0)
4687 return NULL_RTX;
4689 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4690 ??? The latter test doesn't seem to make sense. */
4691 else if (dont_return_target && GET_CODE (temp) != MEM)
4692 return temp;
4694 /* Return TARGET itself if it is a hard register. */
4695 else if ((want_value & 1) != 0
4696 && GET_MODE (target) != BLKmode
4697 && ! (GET_CODE (target) == REG
4698 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4699 return copy_to_reg (target);
4701 else
4702 return target;
4705 /* Return 1 if EXP just contains zeros. */
4707 static int
4708 is_zeros_p (exp)
4709 tree exp;
4711 tree elt;
4713 switch (TREE_CODE (exp))
4715 case CONVERT_EXPR:
4716 case NOP_EXPR:
4717 case NON_LVALUE_EXPR:
4718 case VIEW_CONVERT_EXPR:
4719 return is_zeros_p (TREE_OPERAND (exp, 0));
4721 case INTEGER_CST:
4722 return integer_zerop (exp);
4724 case COMPLEX_CST:
4725 return
4726 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4728 case REAL_CST:
4729 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4731 case VECTOR_CST:
4732 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4733 elt = TREE_CHAIN (elt))
4734 if (!is_zeros_p (TREE_VALUE (elt)))
4735 return 0;
4737 return 1;
4739 case CONSTRUCTOR:
4740 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4741 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4742 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4743 if (! is_zeros_p (TREE_VALUE (elt)))
4744 return 0;
4746 return 1;
4748 default:
4749 return 0;
4753 /* Return 1 if EXP contains mostly (3/4) zeros. */
4755 static int
4756 mostly_zeros_p (exp)
4757 tree exp;
4759 if (TREE_CODE (exp) == CONSTRUCTOR)
4761 int elts = 0, zeros = 0;
4762 tree elt = CONSTRUCTOR_ELTS (exp);
4763 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4765 /* If there are no ranges of true bits, it is all zero. */
4766 return elt == NULL_TREE;
4768 for (; elt; elt = TREE_CHAIN (elt))
4770 /* We do not handle the case where the index is a RANGE_EXPR,
4771 so the statistic will be somewhat inaccurate.
4772 We do make a more accurate count in store_constructor itself,
4773 so since this function is only used for nested array elements,
4774 this should be close enough. */
4775 if (mostly_zeros_p (TREE_VALUE (elt)))
4776 zeros++;
4777 elts++;
4780 return 4 * zeros >= 3 * elts;
4783 return is_zeros_p (exp);
4786 /* Helper function for store_constructor.
4787 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4788 TYPE is the type of the CONSTRUCTOR, not the element type.
4789 CLEARED is as for store_constructor.
4790 ALIAS_SET is the alias set to use for any stores.
4792 This provides a recursive shortcut back to store_constructor when it isn't
4793 necessary to go through store_field. This is so that we can pass through
4794 the cleared field to let store_constructor know that we may not have to
4795 clear a substructure if the outer structure has already been cleared. */
4797 static void
4798 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4799 alias_set)
4800 rtx target;
4801 unsigned HOST_WIDE_INT bitsize;
4802 HOST_WIDE_INT bitpos;
4803 enum machine_mode mode;
4804 tree exp, type;
4805 int cleared;
4806 int alias_set;
4808 if (TREE_CODE (exp) == CONSTRUCTOR
4809 && bitpos % BITS_PER_UNIT == 0
4810 /* If we have a nonzero bitpos for a register target, then we just
4811 let store_field do the bitfield handling. This is unlikely to
4812 generate unnecessary clear instructions anyways. */
4813 && (bitpos == 0 || GET_CODE (target) == MEM))
4815 if (GET_CODE (target) == MEM)
4816 target
4817 = adjust_address (target,
4818 GET_MODE (target) == BLKmode
4819 || 0 != (bitpos
4820 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4821 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4824 /* Update the alias set, if required. */
4825 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4826 && MEM_ALIAS_SET (target) != 0)
4828 target = copy_rtx (target);
4829 set_mem_alias_set (target, alias_set);
4832 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4834 else
4835 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4836 alias_set);
4839 /* Store the value of constructor EXP into the rtx TARGET.
4840 TARGET is either a REG or a MEM; we know it cannot conflict, since
4841 safe_from_p has been called.
4842 CLEARED is true if TARGET is known to have been zero'd.
4843 SIZE is the number of bytes of TARGET we are allowed to modify: this
4844 may not be the same as the size of EXP if we are assigning to a field
4845 which has been packed to exclude padding bits. */
4847 static void
4848 store_constructor (exp, target, cleared, size)
4849 tree exp;
4850 rtx target;
4851 int cleared;
4852 HOST_WIDE_INT size;
4854 tree type = TREE_TYPE (exp);
4855 #ifdef WORD_REGISTER_OPERATIONS
4856 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4857 #endif
4859 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4860 || TREE_CODE (type) == QUAL_UNION_TYPE)
4862 tree elt;
4864 /* We either clear the aggregate or indicate the value is dead. */
4865 if ((TREE_CODE (type) == UNION_TYPE
4866 || TREE_CODE (type) == QUAL_UNION_TYPE)
4867 && ! cleared
4868 && ! CONSTRUCTOR_ELTS (exp))
4869 /* If the constructor is empty, clear the union. */
4871 clear_storage (target, expr_size (exp));
4872 cleared = 1;
4875 /* If we are building a static constructor into a register,
4876 set the initial value as zero so we can fold the value into
4877 a constant. But if more than one register is involved,
4878 this probably loses. */
4879 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4880 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4882 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4883 cleared = 1;
4886 /* If the constructor has fewer fields than the structure
4887 or if we are initializing the structure to mostly zeros,
4888 clear the whole structure first. Don't do this if TARGET is a
4889 register whose mode size isn't equal to SIZE since clear_storage
4890 can't handle this case. */
4891 else if (! cleared && size > 0
4892 && ((list_length (CONSTRUCTOR_ELTS (exp))
4893 != fields_length (type))
4894 || mostly_zeros_p (exp))
4895 && (GET_CODE (target) != REG
4896 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4897 == size)))
4899 clear_storage (target, GEN_INT (size));
4900 cleared = 1;
4903 if (! cleared)
4904 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4906 /* Store each element of the constructor into
4907 the corresponding field of TARGET. */
4909 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4911 tree field = TREE_PURPOSE (elt);
4912 tree value = TREE_VALUE (elt);
4913 enum machine_mode mode;
4914 HOST_WIDE_INT bitsize;
4915 HOST_WIDE_INT bitpos = 0;
4916 tree offset;
4917 rtx to_rtx = target;
4919 /* Just ignore missing fields.
4920 We cleared the whole structure, above,
4921 if any fields are missing. */
4922 if (field == 0)
4923 continue;
4925 if (cleared && is_zeros_p (value))
4926 continue;
4928 if (host_integerp (DECL_SIZE (field), 1))
4929 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4930 else
4931 bitsize = -1;
4933 mode = DECL_MODE (field);
4934 if (DECL_BIT_FIELD (field))
4935 mode = VOIDmode;
4937 offset = DECL_FIELD_OFFSET (field);
4938 if (host_integerp (offset, 0)
4939 && host_integerp (bit_position (field), 0))
4941 bitpos = int_bit_position (field);
4942 offset = 0;
4944 else
4945 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4947 if (offset)
4949 rtx offset_rtx;
4951 if (contains_placeholder_p (offset))
4952 offset = build (WITH_RECORD_EXPR, sizetype,
4953 offset, make_tree (TREE_TYPE (exp), target));
4955 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4956 if (GET_CODE (to_rtx) != MEM)
4957 abort ();
4959 #ifdef POINTERS_EXTEND_UNSIGNED
4960 if (GET_MODE (offset_rtx) != Pmode)
4961 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4962 #else
4963 if (GET_MODE (offset_rtx) != ptr_mode)
4964 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4965 #endif
4967 to_rtx = offset_address (to_rtx, offset_rtx,
4968 highest_pow2_factor (offset));
4971 if (TREE_READONLY (field))
4973 if (GET_CODE (to_rtx) == MEM)
4974 to_rtx = copy_rtx (to_rtx);
4976 RTX_UNCHANGING_P (to_rtx) = 1;
4979 #ifdef WORD_REGISTER_OPERATIONS
4980 /* If this initializes a field that is smaller than a word, at the
4981 start of a word, try to widen it to a full word.
4982 This special case allows us to output C++ member function
4983 initializations in a form that the optimizers can understand. */
4984 if (GET_CODE (target) == REG
4985 && bitsize < BITS_PER_WORD
4986 && bitpos % BITS_PER_WORD == 0
4987 && GET_MODE_CLASS (mode) == MODE_INT
4988 && TREE_CODE (value) == INTEGER_CST
4989 && exp_size >= 0
4990 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4992 tree type = TREE_TYPE (value);
4994 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4996 type = (*lang_hooks.types.type_for_size)
4997 (BITS_PER_WORD, TREE_UNSIGNED (type));
4998 value = convert (type, value);
5001 if (BYTES_BIG_ENDIAN)
5002 value
5003 = fold (build (LSHIFT_EXPR, type, value,
5004 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5005 bitsize = BITS_PER_WORD;
5006 mode = word_mode;
5008 #endif
5010 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5011 && DECL_NONADDRESSABLE_P (field))
5013 to_rtx = copy_rtx (to_rtx);
5014 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5017 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5018 value, type, cleared,
5019 get_alias_set (TREE_TYPE (field)));
5022 else if (TREE_CODE (type) == ARRAY_TYPE
5023 || TREE_CODE (type) == VECTOR_TYPE)
5025 tree elt;
5026 int i;
5027 int need_to_clear;
5028 tree domain = TYPE_DOMAIN (type);
5029 tree elttype = TREE_TYPE (type);
5030 int const_bounds_p;
5031 HOST_WIDE_INT minelt = 0;
5032 HOST_WIDE_INT maxelt = 0;
5034 /* Vectors are like arrays, but the domain is stored via an array
5035 type indirectly. */
5036 if (TREE_CODE (type) == VECTOR_TYPE)
5038 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5039 the same field as TYPE_DOMAIN, we are not guaranteed that
5040 it always will. */
5041 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5042 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5045 const_bounds_p = (TYPE_MIN_VALUE (domain)
5046 && TYPE_MAX_VALUE (domain)
5047 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5048 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5050 /* If we have constant bounds for the range of the type, get them. */
5051 if (const_bounds_p)
5053 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5054 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5057 /* If the constructor has fewer elements than the array,
5058 clear the whole array first. Similarly if this is
5059 static constructor of a non-BLKmode object. */
5060 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5061 need_to_clear = 1;
5062 else
5064 HOST_WIDE_INT count = 0, zero_count = 0;
5065 need_to_clear = ! const_bounds_p;
5067 /* This loop is a more accurate version of the loop in
5068 mostly_zeros_p (it handles RANGE_EXPR in an index).
5069 It is also needed to check for missing elements. */
5070 for (elt = CONSTRUCTOR_ELTS (exp);
5071 elt != NULL_TREE && ! need_to_clear;
5072 elt = TREE_CHAIN (elt))
5074 tree index = TREE_PURPOSE (elt);
5075 HOST_WIDE_INT this_node_count;
5077 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5079 tree lo_index = TREE_OPERAND (index, 0);
5080 tree hi_index = TREE_OPERAND (index, 1);
5082 if (! host_integerp (lo_index, 1)
5083 || ! host_integerp (hi_index, 1))
5085 need_to_clear = 1;
5086 break;
5089 this_node_count = (tree_low_cst (hi_index, 1)
5090 - tree_low_cst (lo_index, 1) + 1);
5092 else
5093 this_node_count = 1;
5095 count += this_node_count;
5096 if (mostly_zeros_p (TREE_VALUE (elt)))
5097 zero_count += this_node_count;
5100 /* Clear the entire array first if there are any missing elements,
5101 or if the incidence of zero elements is >= 75%. */
5102 if (! need_to_clear
5103 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5104 need_to_clear = 1;
5107 if (need_to_clear && size > 0)
5109 if (! cleared)
5111 if (REG_P (target))
5112 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5113 else
5114 clear_storage (target, GEN_INT (size));
5116 cleared = 1;
5118 else if (REG_P (target))
5119 /* Inform later passes that the old value is dead. */
5120 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5122 /* Store each element of the constructor into
5123 the corresponding element of TARGET, determined
5124 by counting the elements. */
5125 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5126 elt;
5127 elt = TREE_CHAIN (elt), i++)
5129 enum machine_mode mode;
5130 HOST_WIDE_INT bitsize;
5131 HOST_WIDE_INT bitpos;
5132 int unsignedp;
5133 tree value = TREE_VALUE (elt);
5134 tree index = TREE_PURPOSE (elt);
5135 rtx xtarget = target;
5137 if (cleared && is_zeros_p (value))
5138 continue;
5140 unsignedp = TREE_UNSIGNED (elttype);
5141 mode = TYPE_MODE (elttype);
5142 if (mode == BLKmode)
5143 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5144 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5145 : -1);
5146 else
5147 bitsize = GET_MODE_BITSIZE (mode);
5149 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5151 tree lo_index = TREE_OPERAND (index, 0);
5152 tree hi_index = TREE_OPERAND (index, 1);
5153 rtx index_r, pos_rtx, loop_end;
5154 struct nesting *loop;
5155 HOST_WIDE_INT lo, hi, count;
5156 tree position;
5158 /* If the range is constant and "small", unroll the loop. */
5159 if (const_bounds_p
5160 && host_integerp (lo_index, 0)
5161 && host_integerp (hi_index, 0)
5162 && (lo = tree_low_cst (lo_index, 0),
5163 hi = tree_low_cst (hi_index, 0),
5164 count = hi - lo + 1,
5165 (GET_CODE (target) != MEM
5166 || count <= 2
5167 || (host_integerp (TYPE_SIZE (elttype), 1)
5168 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5169 <= 40 * 8)))))
5171 lo -= minelt; hi -= minelt;
5172 for (; lo <= hi; lo++)
5174 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5176 if (GET_CODE (target) == MEM
5177 && !MEM_KEEP_ALIAS_SET_P (target)
5178 && TREE_CODE (type) == ARRAY_TYPE
5179 && TYPE_NONALIASED_COMPONENT (type))
5181 target = copy_rtx (target);
5182 MEM_KEEP_ALIAS_SET_P (target) = 1;
5185 store_constructor_field
5186 (target, bitsize, bitpos, mode, value, type, cleared,
5187 get_alias_set (elttype));
5190 else
5192 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5193 loop_end = gen_label_rtx ();
5195 unsignedp = TREE_UNSIGNED (domain);
5197 index = build_decl (VAR_DECL, NULL_TREE, domain);
5199 index_r
5200 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5201 &unsignedp, 0));
5202 SET_DECL_RTL (index, index_r);
5203 if (TREE_CODE (value) == SAVE_EXPR
5204 && SAVE_EXPR_RTL (value) == 0)
5206 /* Make sure value gets expanded once before the
5207 loop. */
5208 expand_expr (value, const0_rtx, VOIDmode, 0);
5209 emit_queue ();
5211 store_expr (lo_index, index_r, 0);
5212 loop = expand_start_loop (0);
5214 /* Assign value to element index. */
5215 position
5216 = convert (ssizetype,
5217 fold (build (MINUS_EXPR, TREE_TYPE (index),
5218 index, TYPE_MIN_VALUE (domain))));
5219 position = size_binop (MULT_EXPR, position,
5220 convert (ssizetype,
5221 TYPE_SIZE_UNIT (elttype)));
5223 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5224 xtarget = offset_address (target, pos_rtx,
5225 highest_pow2_factor (position));
5226 xtarget = adjust_address (xtarget, mode, 0);
5227 if (TREE_CODE (value) == CONSTRUCTOR)
5228 store_constructor (value, xtarget, cleared,
5229 bitsize / BITS_PER_UNIT);
5230 else
5231 store_expr (value, xtarget, 0);
5233 expand_exit_loop_if_false (loop,
5234 build (LT_EXPR, integer_type_node,
5235 index, hi_index));
5237 expand_increment (build (PREINCREMENT_EXPR,
5238 TREE_TYPE (index),
5239 index, integer_one_node), 0, 0);
5240 expand_end_loop ();
5241 emit_label (loop_end);
5244 else if ((index != 0 && ! host_integerp (index, 0))
5245 || ! host_integerp (TYPE_SIZE (elttype), 1))
5247 tree position;
5249 if (index == 0)
5250 index = ssize_int (1);
5252 if (minelt)
5253 index = convert (ssizetype,
5254 fold (build (MINUS_EXPR, index,
5255 TYPE_MIN_VALUE (domain))));
5257 position = size_binop (MULT_EXPR, index,
5258 convert (ssizetype,
5259 TYPE_SIZE_UNIT (elttype)));
5260 xtarget = offset_address (target,
5261 expand_expr (position, 0, VOIDmode, 0),
5262 highest_pow2_factor (position));
5263 xtarget = adjust_address (xtarget, mode, 0);
5264 store_expr (value, xtarget, 0);
5266 else
5268 if (index != 0)
5269 bitpos = ((tree_low_cst (index, 0) - minelt)
5270 * tree_low_cst (TYPE_SIZE (elttype), 1));
5271 else
5272 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5274 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5275 && TREE_CODE (type) == ARRAY_TYPE
5276 && TYPE_NONALIASED_COMPONENT (type))
5278 target = copy_rtx (target);
5279 MEM_KEEP_ALIAS_SET_P (target) = 1;
5282 store_constructor_field (target, bitsize, bitpos, mode, value,
5283 type, cleared, get_alias_set (elttype));
5289 /* Set constructor assignments. */
5290 else if (TREE_CODE (type) == SET_TYPE)
5292 tree elt = CONSTRUCTOR_ELTS (exp);
5293 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5294 tree domain = TYPE_DOMAIN (type);
5295 tree domain_min, domain_max, bitlength;
5297 /* The default implementation strategy is to extract the constant
5298 parts of the constructor, use that to initialize the target,
5299 and then "or" in whatever non-constant ranges we need in addition.
5301 If a large set is all zero or all ones, it is
5302 probably better to set it using memset (if available) or bzero.
5303 Also, if a large set has just a single range, it may also be
5304 better to first clear all the first clear the set (using
5305 bzero/memset), and set the bits we want. */
5307 /* Check for all zeros. */
5308 if (elt == NULL_TREE && size > 0)
5310 if (!cleared)
5311 clear_storage (target, GEN_INT (size));
5312 return;
5315 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5316 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5317 bitlength = size_binop (PLUS_EXPR,
5318 size_diffop (domain_max, domain_min),
5319 ssize_int (1));
5321 nbits = tree_low_cst (bitlength, 1);
5323 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5324 are "complicated" (more than one range), initialize (the
5325 constant parts) by copying from a constant. */
5326 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5327 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5329 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5330 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5331 char *bit_buffer = (char *) alloca (nbits);
5332 HOST_WIDE_INT word = 0;
5333 unsigned int bit_pos = 0;
5334 unsigned int ibit = 0;
5335 unsigned int offset = 0; /* In bytes from beginning of set. */
5337 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5338 for (;;)
5340 if (bit_buffer[ibit])
5342 if (BYTES_BIG_ENDIAN)
5343 word |= (1 << (set_word_size - 1 - bit_pos));
5344 else
5345 word |= 1 << bit_pos;
5348 bit_pos++; ibit++;
5349 if (bit_pos >= set_word_size || ibit == nbits)
5351 if (word != 0 || ! cleared)
5353 rtx datum = GEN_INT (word);
5354 rtx to_rtx;
5356 /* The assumption here is that it is safe to use
5357 XEXP if the set is multi-word, but not if
5358 it's single-word. */
5359 if (GET_CODE (target) == MEM)
5360 to_rtx = adjust_address (target, mode, offset);
5361 else if (offset == 0)
5362 to_rtx = target;
5363 else
5364 abort ();
5365 emit_move_insn (to_rtx, datum);
5368 if (ibit == nbits)
5369 break;
5370 word = 0;
5371 bit_pos = 0;
5372 offset += set_word_size / BITS_PER_UNIT;
5376 else if (!cleared)
5377 /* Don't bother clearing storage if the set is all ones. */
5378 if (TREE_CHAIN (elt) != NULL_TREE
5379 || (TREE_PURPOSE (elt) == NULL_TREE
5380 ? nbits != 1
5381 : ( ! host_integerp (TREE_VALUE (elt), 0)
5382 || ! host_integerp (TREE_PURPOSE (elt), 0)
5383 || (tree_low_cst (TREE_VALUE (elt), 0)
5384 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5385 != (HOST_WIDE_INT) nbits))))
5386 clear_storage (target, expr_size (exp));
5388 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5390 /* Start of range of element or NULL. */
5391 tree startbit = TREE_PURPOSE (elt);
5392 /* End of range of element, or element value. */
5393 tree endbit = TREE_VALUE (elt);
5394 HOST_WIDE_INT startb, endb;
5395 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5397 bitlength_rtx = expand_expr (bitlength,
5398 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5400 /* Handle non-range tuple element like [ expr ]. */
5401 if (startbit == NULL_TREE)
5403 startbit = save_expr (endbit);
5404 endbit = startbit;
5407 startbit = convert (sizetype, startbit);
5408 endbit = convert (sizetype, endbit);
5409 if (! integer_zerop (domain_min))
5411 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5412 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5414 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5415 EXPAND_CONST_ADDRESS);
5416 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5417 EXPAND_CONST_ADDRESS);
5419 if (REG_P (target))
5421 targetx
5422 = assign_temp
5423 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5424 (GET_MODE (target), 0),
5425 TYPE_QUAL_CONST)),
5426 0, 1, 1);
5427 emit_move_insn (targetx, target);
5430 else if (GET_CODE (target) == MEM)
5431 targetx = target;
5432 else
5433 abort ();
5435 /* Optimization: If startbit and endbit are constants divisible
5436 by BITS_PER_UNIT, call memset instead. */
5437 if (TARGET_MEM_FUNCTIONS
5438 && TREE_CODE (startbit) == INTEGER_CST
5439 && TREE_CODE (endbit) == INTEGER_CST
5440 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5441 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5443 emit_library_call (memset_libfunc, LCT_NORMAL,
5444 VOIDmode, 3,
5445 plus_constant (XEXP (targetx, 0),
5446 startb / BITS_PER_UNIT),
5447 Pmode,
5448 constm1_rtx, TYPE_MODE (integer_type_node),
5449 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5450 TYPE_MODE (sizetype));
5452 else
5453 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5454 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5455 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5456 startbit_rtx, TYPE_MODE (sizetype),
5457 endbit_rtx, TYPE_MODE (sizetype));
5459 if (REG_P (target))
5460 emit_move_insn (target, targetx);
5464 else
5465 abort ();
5468 /* Store the value of EXP (an expression tree)
5469 into a subfield of TARGET which has mode MODE and occupies
5470 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5471 If MODE is VOIDmode, it means that we are storing into a bit-field.
5473 If VALUE_MODE is VOIDmode, return nothing in particular.
5474 UNSIGNEDP is not used in this case.
5476 Otherwise, return an rtx for the value stored. This rtx
5477 has mode VALUE_MODE if that is convenient to do.
5478 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5480 TYPE is the type of the underlying object,
5482 ALIAS_SET is the alias set for the destination. This value will
5483 (in general) be different from that for TARGET, since TARGET is a
5484 reference to the containing structure. */
5486 static rtx
5487 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5488 alias_set)
5489 rtx target;
5490 HOST_WIDE_INT bitsize;
5491 HOST_WIDE_INT bitpos;
5492 enum machine_mode mode;
5493 tree exp;
5494 enum machine_mode value_mode;
5495 int unsignedp;
5496 tree type;
5497 int alias_set;
5499 HOST_WIDE_INT width_mask = 0;
5501 if (TREE_CODE (exp) == ERROR_MARK)
5502 return const0_rtx;
5504 /* If we have nothing to store, do nothing unless the expression has
5505 side-effects. */
5506 if (bitsize == 0)
5507 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5508 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5509 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5511 /* If we are storing into an unaligned field of an aligned union that is
5512 in a register, we may have the mode of TARGET being an integer mode but
5513 MODE == BLKmode. In that case, get an aligned object whose size and
5514 alignment are the same as TARGET and store TARGET into it (we can avoid
5515 the store if the field being stored is the entire width of TARGET). Then
5516 call ourselves recursively to store the field into a BLKmode version of
5517 that object. Finally, load from the object into TARGET. This is not
5518 very efficient in general, but should only be slightly more expensive
5519 than the otherwise-required unaligned accesses. Perhaps this can be
5520 cleaned up later. */
5522 if (mode == BLKmode
5523 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5525 rtx object
5526 = assign_temp
5527 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5528 0, 1, 1);
5529 rtx blk_object = adjust_address (object, BLKmode, 0);
5531 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5532 emit_move_insn (object, target);
5534 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5535 alias_set);
5537 emit_move_insn (target, object);
5539 /* We want to return the BLKmode version of the data. */
5540 return blk_object;
5543 if (GET_CODE (target) == CONCAT)
5545 /* We're storing into a struct containing a single __complex. */
5547 if (bitpos != 0)
5548 abort ();
5549 return store_expr (exp, target, 0);
5552 /* If the structure is in a register or if the component
5553 is a bit field, we cannot use addressing to access it.
5554 Use bit-field techniques or SUBREG to store in it. */
5556 if (mode == VOIDmode
5557 || (mode != BLKmode && ! direct_store[(int) mode]
5558 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5559 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5560 || GET_CODE (target) == REG
5561 || GET_CODE (target) == SUBREG
5562 /* If the field isn't aligned enough to store as an ordinary memref,
5563 store it as a bit field. */
5564 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5565 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5566 || bitpos % GET_MODE_ALIGNMENT (mode)))
5567 /* If the RHS and field are a constant size and the size of the
5568 RHS isn't the same size as the bitfield, we must use bitfield
5569 operations. */
5570 || (bitsize >= 0
5571 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5572 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5574 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5576 /* If BITSIZE is narrower than the size of the type of EXP
5577 we will be narrowing TEMP. Normally, what's wanted are the
5578 low-order bits. However, if EXP's type is a record and this is
5579 big-endian machine, we want the upper BITSIZE bits. */
5580 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5581 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5582 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5583 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5584 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5585 - bitsize),
5586 temp, 1);
5588 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5589 MODE. */
5590 if (mode != VOIDmode && mode != BLKmode
5591 && mode != TYPE_MODE (TREE_TYPE (exp)))
5592 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5594 /* If the modes of TARGET and TEMP are both BLKmode, both
5595 must be in memory and BITPOS must be aligned on a byte
5596 boundary. If so, we simply do a block copy. */
5597 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5599 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5600 || bitpos % BITS_PER_UNIT != 0)
5601 abort ();
5603 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5604 emit_block_move (target, temp,
5605 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5606 / BITS_PER_UNIT),
5607 BLOCK_OP_NORMAL);
5609 return value_mode == VOIDmode ? const0_rtx : target;
5612 /* Store the value in the bitfield. */
5613 store_bit_field (target, bitsize, bitpos, mode, temp,
5614 int_size_in_bytes (type));
5616 if (value_mode != VOIDmode)
5618 /* The caller wants an rtx for the value.
5619 If possible, avoid refetching from the bitfield itself. */
5620 if (width_mask != 0
5621 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5623 tree count;
5624 enum machine_mode tmode;
5626 tmode = GET_MODE (temp);
5627 if (tmode == VOIDmode)
5628 tmode = value_mode;
5630 if (unsignedp)
5631 return expand_and (tmode, temp,
5632 gen_int_mode (width_mask, tmode),
5633 NULL_RTX);
5635 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5636 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5637 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5640 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5641 NULL_RTX, value_mode, VOIDmode,
5642 int_size_in_bytes (type));
5644 return const0_rtx;
5646 else
5648 rtx addr = XEXP (target, 0);
5649 rtx to_rtx = target;
5651 /* If a value is wanted, it must be the lhs;
5652 so make the address stable for multiple use. */
5654 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5655 && ! CONSTANT_ADDRESS_P (addr)
5656 /* A frame-pointer reference is already stable. */
5657 && ! (GET_CODE (addr) == PLUS
5658 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5659 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5660 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5661 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5663 /* Now build a reference to just the desired component. */
5665 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5667 if (to_rtx == target)
5668 to_rtx = copy_rtx (to_rtx);
5670 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5671 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5672 set_mem_alias_set (to_rtx, alias_set);
5674 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5678 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5679 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5680 codes and find the ultimate containing object, which we return.
5682 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5683 bit position, and *PUNSIGNEDP to the signedness of the field.
5684 If the position of the field is variable, we store a tree
5685 giving the variable offset (in units) in *POFFSET.
5686 This offset is in addition to the bit position.
5687 If the position is not variable, we store 0 in *POFFSET.
5689 If any of the extraction expressions is volatile,
5690 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5692 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5693 is a mode that can be used to access the field. In that case, *PBITSIZE
5694 is redundant.
5696 If the field describes a variable-sized object, *PMODE is set to
5697 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5698 this case, but the address of the object can be found. */
5700 tree
5701 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5702 punsignedp, pvolatilep)
5703 tree exp;
5704 HOST_WIDE_INT *pbitsize;
5705 HOST_WIDE_INT *pbitpos;
5706 tree *poffset;
5707 enum machine_mode *pmode;
5708 int *punsignedp;
5709 int *pvolatilep;
5711 tree size_tree = 0;
5712 enum machine_mode mode = VOIDmode;
5713 tree offset = size_zero_node;
5714 tree bit_offset = bitsize_zero_node;
5715 tree placeholder_ptr = 0;
5716 tree tem;
5718 /* First get the mode, signedness, and size. We do this from just the
5719 outermost expression. */
5720 if (TREE_CODE (exp) == COMPONENT_REF)
5722 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5723 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5724 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5726 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5728 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5730 size_tree = TREE_OPERAND (exp, 1);
5731 *punsignedp = TREE_UNSIGNED (exp);
5733 else
5735 mode = TYPE_MODE (TREE_TYPE (exp));
5736 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5738 if (mode == BLKmode)
5739 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5740 else
5741 *pbitsize = GET_MODE_BITSIZE (mode);
5744 if (size_tree != 0)
5746 if (! host_integerp (size_tree, 1))
5747 mode = BLKmode, *pbitsize = -1;
5748 else
5749 *pbitsize = tree_low_cst (size_tree, 1);
5752 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5753 and find the ultimate containing object. */
5754 while (1)
5756 if (TREE_CODE (exp) == BIT_FIELD_REF)
5757 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5758 else if (TREE_CODE (exp) == COMPONENT_REF)
5760 tree field = TREE_OPERAND (exp, 1);
5761 tree this_offset = DECL_FIELD_OFFSET (field);
5763 /* If this field hasn't been filled in yet, don't go
5764 past it. This should only happen when folding expressions
5765 made during type construction. */
5766 if (this_offset == 0)
5767 break;
5768 else if (! TREE_CONSTANT (this_offset)
5769 && contains_placeholder_p (this_offset))
5770 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5772 offset = size_binop (PLUS_EXPR, offset, this_offset);
5773 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5774 DECL_FIELD_BIT_OFFSET (field));
5776 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5779 else if (TREE_CODE (exp) == ARRAY_REF
5780 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5782 tree index = TREE_OPERAND (exp, 1);
5783 tree array = TREE_OPERAND (exp, 0);
5784 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5785 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5786 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5788 /* We assume all arrays have sizes that are a multiple of a byte.
5789 First subtract the lower bound, if any, in the type of the
5790 index, then convert to sizetype and multiply by the size of the
5791 array element. */
5792 if (low_bound != 0 && ! integer_zerop (low_bound))
5793 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5794 index, low_bound));
5796 /* If the index has a self-referential type, pass it to a
5797 WITH_RECORD_EXPR; if the component size is, pass our
5798 component to one. */
5799 if (! TREE_CONSTANT (index)
5800 && contains_placeholder_p (index))
5801 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5802 if (! TREE_CONSTANT (unit_size)
5803 && contains_placeholder_p (unit_size))
5804 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5806 offset = size_binop (PLUS_EXPR, offset,
5807 size_binop (MULT_EXPR,
5808 convert (sizetype, index),
5809 unit_size));
5812 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5814 tree new = find_placeholder (exp, &placeholder_ptr);
5816 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5817 We might have been called from tree optimization where we
5818 haven't set up an object yet. */
5819 if (new == 0)
5820 break;
5821 else
5822 exp = new;
5824 continue;
5826 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5827 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5828 && ! ((TREE_CODE (exp) == NOP_EXPR
5829 || TREE_CODE (exp) == CONVERT_EXPR)
5830 && (TYPE_MODE (TREE_TYPE (exp))
5831 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5832 break;
5834 /* If any reference in the chain is volatile, the effect is volatile. */
5835 if (TREE_THIS_VOLATILE (exp))
5836 *pvolatilep = 1;
5838 exp = TREE_OPERAND (exp, 0);
5841 /* If OFFSET is constant, see if we can return the whole thing as a
5842 constant bit position. Otherwise, split it up. */
5843 if (host_integerp (offset, 0)
5844 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5845 bitsize_unit_node))
5846 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5847 && host_integerp (tem, 0))
5848 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5849 else
5850 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5852 *pmode = mode;
5853 return exp;
5856 /* Return 1 if T is an expression that get_inner_reference handles. */
5859 handled_component_p (t)
5860 tree t;
5862 switch (TREE_CODE (t))
5864 case BIT_FIELD_REF:
5865 case COMPONENT_REF:
5866 case ARRAY_REF:
5867 case ARRAY_RANGE_REF:
5868 case NON_LVALUE_EXPR:
5869 case VIEW_CONVERT_EXPR:
5870 return 1;
5872 case NOP_EXPR:
5873 case CONVERT_EXPR:
5874 return (TYPE_MODE (TREE_TYPE (t))
5875 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5877 default:
5878 return 0;
5882 /* Given an rtx VALUE that may contain additions and multiplications, return
5883 an equivalent value that just refers to a register, memory, or constant.
5884 This is done by generating instructions to perform the arithmetic and
5885 returning a pseudo-register containing the value.
5887 The returned value may be a REG, SUBREG, MEM or constant. */
5890 force_operand (value, target)
5891 rtx value, target;
5893 rtx op1, op2;
5894 /* Use subtarget as the target for operand 0 of a binary operation. */
5895 rtx subtarget = get_subtarget (target);
5896 enum rtx_code code = GET_CODE (value);
5898 /* Check for a PIC address load. */
5899 if ((code == PLUS || code == MINUS)
5900 && XEXP (value, 0) == pic_offset_table_rtx
5901 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5902 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5903 || GET_CODE (XEXP (value, 1)) == CONST))
5905 if (!subtarget)
5906 subtarget = gen_reg_rtx (GET_MODE (value));
5907 emit_move_insn (subtarget, value);
5908 return subtarget;
5911 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5913 if (!target)
5914 target = gen_reg_rtx (GET_MODE (value));
5915 convert_move (target, force_operand (XEXP (value, 0), NULL),
5916 code == ZERO_EXTEND);
5917 return target;
5920 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5922 op2 = XEXP (value, 1);
5923 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5924 subtarget = 0;
5925 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5927 code = PLUS;
5928 op2 = negate_rtx (GET_MODE (value), op2);
5931 /* Check for an addition with OP2 a constant integer and our first
5932 operand a PLUS of a virtual register and something else. In that
5933 case, we want to emit the sum of the virtual register and the
5934 constant first and then add the other value. This allows virtual
5935 register instantiation to simply modify the constant rather than
5936 creating another one around this addition. */
5937 if (code == PLUS && GET_CODE (op2) == CONST_INT
5938 && GET_CODE (XEXP (value, 0)) == PLUS
5939 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5940 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5941 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5943 rtx temp = expand_simple_binop (GET_MODE (value), code,
5944 XEXP (XEXP (value, 0), 0), op2,
5945 subtarget, 0, OPTAB_LIB_WIDEN);
5946 return expand_simple_binop (GET_MODE (value), code, temp,
5947 force_operand (XEXP (XEXP (value,
5948 0), 1), 0),
5949 target, 0, OPTAB_LIB_WIDEN);
5952 op1 = force_operand (XEXP (value, 0), subtarget);
5953 op2 = force_operand (op2, NULL_RTX);
5954 switch (code)
5956 case MULT:
5957 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5958 case DIV:
5959 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5960 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5961 target, 1, OPTAB_LIB_WIDEN);
5962 else
5963 return expand_divmod (0,
5964 FLOAT_MODE_P (GET_MODE (value))
5965 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5966 GET_MODE (value), op1, op2, target, 0);
5967 break;
5968 case MOD:
5969 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5970 target, 0);
5971 break;
5972 case UDIV:
5973 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5974 target, 1);
5975 break;
5976 case UMOD:
5977 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5978 target, 1);
5979 break;
5980 case ASHIFTRT:
5981 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5982 target, 0, OPTAB_LIB_WIDEN);
5983 break;
5984 default:
5985 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5986 target, 1, OPTAB_LIB_WIDEN);
5989 if (GET_RTX_CLASS (code) == '1')
5991 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5992 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5995 #ifdef INSN_SCHEDULING
5996 /* On machines that have insn scheduling, we want all memory reference to be
5997 explicit, so we need to deal with such paradoxical SUBREGs. */
5998 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5999 && (GET_MODE_SIZE (GET_MODE (value))
6000 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6001 value
6002 = simplify_gen_subreg (GET_MODE (value),
6003 force_reg (GET_MODE (SUBREG_REG (value)),
6004 force_operand (SUBREG_REG (value),
6005 NULL_RTX)),
6006 GET_MODE (SUBREG_REG (value)),
6007 SUBREG_BYTE (value));
6008 #endif
6010 return value;
6013 /* Subroutine of expand_expr: return nonzero iff there is no way that
6014 EXP can reference X, which is being modified. TOP_P is nonzero if this
6015 call is going to be used to determine whether we need a temporary
6016 for EXP, as opposed to a recursive call to this function.
6018 It is always safe for this routine to return zero since it merely
6019 searches for optimization opportunities. */
6022 safe_from_p (x, exp, top_p)
6023 rtx x;
6024 tree exp;
6025 int top_p;
6027 rtx exp_rtl = 0;
6028 int i, nops;
6029 static tree save_expr_list;
6031 if (x == 0
6032 /* If EXP has varying size, we MUST use a target since we currently
6033 have no way of allocating temporaries of variable size
6034 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6035 So we assume here that something at a higher level has prevented a
6036 clash. This is somewhat bogus, but the best we can do. Only
6037 do this when X is BLKmode and when we are at the top level. */
6038 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6039 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6040 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6041 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6042 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6043 != INTEGER_CST)
6044 && GET_MODE (x) == BLKmode)
6045 /* If X is in the outgoing argument area, it is always safe. */
6046 || (GET_CODE (x) == MEM
6047 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6048 || (GET_CODE (XEXP (x, 0)) == PLUS
6049 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6050 return 1;
6052 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6053 find the underlying pseudo. */
6054 if (GET_CODE (x) == SUBREG)
6056 x = SUBREG_REG (x);
6057 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6058 return 0;
6061 /* A SAVE_EXPR might appear many times in the expression passed to the
6062 top-level safe_from_p call, and if it has a complex subexpression,
6063 examining it multiple times could result in a combinatorial explosion.
6064 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6065 with optimization took about 28 minutes to compile -- even though it was
6066 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6067 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6068 we have processed. Note that the only test of top_p was above. */
6070 if (top_p)
6072 int rtn;
6073 tree t;
6075 save_expr_list = 0;
6077 rtn = safe_from_p (x, exp, 0);
6079 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6080 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6082 return rtn;
6085 /* Now look at our tree code and possibly recurse. */
6086 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6088 case 'd':
6089 exp_rtl = DECL_RTL_IF_SET (exp);
6090 break;
6092 case 'c':
6093 return 1;
6095 case 'x':
6096 if (TREE_CODE (exp) == TREE_LIST)
6098 while (1)
6100 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6101 return 0;
6102 exp = TREE_CHAIN (exp);
6103 if (!exp)
6104 return 1;
6105 if (TREE_CODE (exp) != TREE_LIST)
6106 return safe_from_p (x, exp, 0);
6109 else if (TREE_CODE (exp) == ERROR_MARK)
6110 return 1; /* An already-visited SAVE_EXPR? */
6111 else
6112 return 0;
6114 case '2':
6115 case '<':
6116 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6117 return 0;
6118 /* FALLTHRU */
6120 case '1':
6121 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6123 case 'e':
6124 case 'r':
6125 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6126 the expression. If it is set, we conflict iff we are that rtx or
6127 both are in memory. Otherwise, we check all operands of the
6128 expression recursively. */
6130 switch (TREE_CODE (exp))
6132 case ADDR_EXPR:
6133 /* If the operand is static or we are static, we can't conflict.
6134 Likewise if we don't conflict with the operand at all. */
6135 if (staticp (TREE_OPERAND (exp, 0))
6136 || TREE_STATIC (exp)
6137 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6138 return 1;
6140 /* Otherwise, the only way this can conflict is if we are taking
6141 the address of a DECL a that address if part of X, which is
6142 very rare. */
6143 exp = TREE_OPERAND (exp, 0);
6144 if (DECL_P (exp))
6146 if (!DECL_RTL_SET_P (exp)
6147 || GET_CODE (DECL_RTL (exp)) != MEM)
6148 return 0;
6149 else
6150 exp_rtl = XEXP (DECL_RTL (exp), 0);
6152 break;
6154 case INDIRECT_REF:
6155 if (GET_CODE (x) == MEM
6156 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6157 get_alias_set (exp)))
6158 return 0;
6159 break;
6161 case CALL_EXPR:
6162 /* Assume that the call will clobber all hard registers and
6163 all of memory. */
6164 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6165 || GET_CODE (x) == MEM)
6166 return 0;
6167 break;
6169 case RTL_EXPR:
6170 /* If a sequence exists, we would have to scan every instruction
6171 in the sequence to see if it was safe. This is probably not
6172 worthwhile. */
6173 if (RTL_EXPR_SEQUENCE (exp))
6174 return 0;
6176 exp_rtl = RTL_EXPR_RTL (exp);
6177 break;
6179 case WITH_CLEANUP_EXPR:
6180 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6181 break;
6183 case CLEANUP_POINT_EXPR:
6184 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6186 case SAVE_EXPR:
6187 exp_rtl = SAVE_EXPR_RTL (exp);
6188 if (exp_rtl)
6189 break;
6191 /* If we've already scanned this, don't do it again. Otherwise,
6192 show we've scanned it and record for clearing the flag if we're
6193 going on. */
6194 if (TREE_PRIVATE (exp))
6195 return 1;
6197 TREE_PRIVATE (exp) = 1;
6198 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6200 TREE_PRIVATE (exp) = 0;
6201 return 0;
6204 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6205 return 1;
6207 case BIND_EXPR:
6208 /* The only operand we look at is operand 1. The rest aren't
6209 part of the expression. */
6210 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6212 case METHOD_CALL_EXPR:
6213 /* This takes an rtx argument, but shouldn't appear here. */
6214 abort ();
6216 default:
6217 break;
6220 /* If we have an rtx, we do not need to scan our operands. */
6221 if (exp_rtl)
6222 break;
6224 nops = first_rtl_op (TREE_CODE (exp));
6225 for (i = 0; i < nops; i++)
6226 if (TREE_OPERAND (exp, i) != 0
6227 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6228 return 0;
6230 /* If this is a language-specific tree code, it may require
6231 special handling. */
6232 if ((unsigned int) TREE_CODE (exp)
6233 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6234 && !(*lang_hooks.safe_from_p) (x, exp))
6235 return 0;
6238 /* If we have an rtl, find any enclosed object. Then see if we conflict
6239 with it. */
6240 if (exp_rtl)
6242 if (GET_CODE (exp_rtl) == SUBREG)
6244 exp_rtl = SUBREG_REG (exp_rtl);
6245 if (GET_CODE (exp_rtl) == REG
6246 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6247 return 0;
6250 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6251 are memory and they conflict. */
6252 return ! (rtx_equal_p (x, exp_rtl)
6253 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6254 && true_dependence (exp_rtl, VOIDmode, x,
6255 rtx_addr_varies_p)));
6258 /* If we reach here, it is safe. */
6259 return 1;
6262 /* Subroutine of expand_expr: return rtx if EXP is a
6263 variable or parameter; else return 0. */
6265 static rtx
6266 var_rtx (exp)
6267 tree exp;
6269 STRIP_NOPS (exp);
6270 switch (TREE_CODE (exp))
6272 case PARM_DECL:
6273 case VAR_DECL:
6274 return DECL_RTL (exp);
6275 default:
6276 return 0;
6280 #ifdef MAX_INTEGER_COMPUTATION_MODE
6282 void
6283 check_max_integer_computation_mode (exp)
6284 tree exp;
6286 enum tree_code code;
6287 enum machine_mode mode;
6289 /* Strip any NOPs that don't change the mode. */
6290 STRIP_NOPS (exp);
6291 code = TREE_CODE (exp);
6293 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6294 if (code == NOP_EXPR
6295 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6296 return;
6298 /* First check the type of the overall operation. We need only look at
6299 unary, binary and relational operations. */
6300 if (TREE_CODE_CLASS (code) == '1'
6301 || TREE_CODE_CLASS (code) == '2'
6302 || TREE_CODE_CLASS (code) == '<')
6304 mode = TYPE_MODE (TREE_TYPE (exp));
6305 if (GET_MODE_CLASS (mode) == MODE_INT
6306 && mode > MAX_INTEGER_COMPUTATION_MODE)
6307 internal_error ("unsupported wide integer operation");
6310 /* Check operand of a unary op. */
6311 if (TREE_CODE_CLASS (code) == '1')
6313 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6314 if (GET_MODE_CLASS (mode) == MODE_INT
6315 && mode > MAX_INTEGER_COMPUTATION_MODE)
6316 internal_error ("unsupported wide integer operation");
6319 /* Check operands of a binary/comparison op. */
6320 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6322 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6323 if (GET_MODE_CLASS (mode) == MODE_INT
6324 && mode > MAX_INTEGER_COMPUTATION_MODE)
6325 internal_error ("unsupported wide integer operation");
6327 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6328 if (GET_MODE_CLASS (mode) == MODE_INT
6329 && mode > MAX_INTEGER_COMPUTATION_MODE)
6330 internal_error ("unsupported wide integer operation");
6333 #endif
6335 /* Return the highest power of two that EXP is known to be a multiple of.
6336 This is used in updating alignment of MEMs in array references. */
6338 static HOST_WIDE_INT
6339 highest_pow2_factor (exp)
6340 tree exp;
6342 HOST_WIDE_INT c0, c1;
6344 switch (TREE_CODE (exp))
6346 case INTEGER_CST:
6347 /* We can find the lowest bit that's a one. If the low
6348 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6349 We need to handle this case since we can find it in a COND_EXPR,
6350 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6351 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6352 later ICE. */
6353 if (TREE_CONSTANT_OVERFLOW (exp))
6354 return BIGGEST_ALIGNMENT;
6355 else
6357 /* Note: tree_low_cst is intentionally not used here,
6358 we don't care about the upper bits. */
6359 c0 = TREE_INT_CST_LOW (exp);
6360 c0 &= -c0;
6361 return c0 ? c0 : BIGGEST_ALIGNMENT;
6363 break;
6365 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6366 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6367 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6368 return MIN (c0, c1);
6370 case MULT_EXPR:
6371 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6372 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6373 return c0 * c1;
6375 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6376 case CEIL_DIV_EXPR:
6377 if (integer_pow2p (TREE_OPERAND (exp, 1))
6378 && host_integerp (TREE_OPERAND (exp, 1), 1))
6380 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6381 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6382 return MAX (1, c0 / c1);
6384 break;
6386 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6387 case SAVE_EXPR: case WITH_RECORD_EXPR:
6388 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6390 case COMPOUND_EXPR:
6391 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6393 case COND_EXPR:
6394 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6395 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6396 return MIN (c0, c1);
6398 default:
6399 break;
6402 return 1;
6405 /* Similar, except that it is known that the expression must be a multiple
6406 of the alignment of TYPE. */
6408 static HOST_WIDE_INT
6409 highest_pow2_factor_for_type (type, exp)
6410 tree type;
6411 tree exp;
6413 HOST_WIDE_INT type_align, factor;
6415 factor = highest_pow2_factor (exp);
6416 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6417 return MAX (factor, type_align);
6420 /* Return an object on the placeholder list that matches EXP, a
6421 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6422 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6423 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6424 is a location which initially points to a starting location in the
6425 placeholder list (zero means start of the list) and where a pointer into
6426 the placeholder list at which the object is found is placed. */
6428 tree
6429 find_placeholder (exp, plist)
6430 tree exp;
6431 tree *plist;
6433 tree type = TREE_TYPE (exp);
6434 tree placeholder_expr;
6436 for (placeholder_expr
6437 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6438 placeholder_expr != 0;
6439 placeholder_expr = TREE_CHAIN (placeholder_expr))
6441 tree need_type = TYPE_MAIN_VARIANT (type);
6442 tree elt;
6444 /* Find the outermost reference that is of the type we want. If none,
6445 see if any object has a type that is a pointer to the type we
6446 want. */
6447 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6448 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6449 || TREE_CODE (elt) == COND_EXPR)
6450 ? TREE_OPERAND (elt, 1)
6451 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6452 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6453 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6454 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6455 ? TREE_OPERAND (elt, 0) : 0))
6456 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6458 if (plist)
6459 *plist = placeholder_expr;
6460 return elt;
6463 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6465 = ((TREE_CODE (elt) == COMPOUND_EXPR
6466 || TREE_CODE (elt) == COND_EXPR)
6467 ? TREE_OPERAND (elt, 1)
6468 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6469 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6470 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6471 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6472 ? TREE_OPERAND (elt, 0) : 0))
6473 if (POINTER_TYPE_P (TREE_TYPE (elt))
6474 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6475 == need_type))
6477 if (plist)
6478 *plist = placeholder_expr;
6479 return build1 (INDIRECT_REF, need_type, elt);
6483 return 0;
6486 /* expand_expr: generate code for computing expression EXP.
6487 An rtx for the computed value is returned. The value is never null.
6488 In the case of a void EXP, const0_rtx is returned.
6490 The value may be stored in TARGET if TARGET is nonzero.
6491 TARGET is just a suggestion; callers must assume that
6492 the rtx returned may not be the same as TARGET.
6494 If TARGET is CONST0_RTX, it means that the value will be ignored.
6496 If TMODE is not VOIDmode, it suggests generating the
6497 result in mode TMODE. But this is done only when convenient.
6498 Otherwise, TMODE is ignored and the value generated in its natural mode.
6499 TMODE is just a suggestion; callers must assume that
6500 the rtx returned may not have mode TMODE.
6502 Note that TARGET may have neither TMODE nor MODE. In that case, it
6503 probably will not be used.
6505 If MODIFIER is EXPAND_SUM then when EXP is an addition
6506 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6507 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6508 products as above, or REG or MEM, or constant.
6509 Ordinarily in such cases we would output mul or add instructions
6510 and then return a pseudo reg containing the sum.
6512 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6513 it also marks a label as absolutely required (it can't be dead).
6514 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6515 This is used for outputting expressions used in initializers.
6517 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6518 with a constant address even if that address is not normally legitimate.
6519 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6521 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6522 a call parameter. Such targets require special care as we haven't yet
6523 marked TARGET so that it's safe from being trashed by libcalls. We
6524 don't want to use TARGET for anything but the final result;
6525 Intermediate values must go elsewhere. Additionally, calls to
6526 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6529 expand_expr (exp, target, tmode, modifier)
6530 tree exp;
6531 rtx target;
6532 enum machine_mode tmode;
6533 enum expand_modifier modifier;
6535 rtx op0, op1, temp;
6536 tree type = TREE_TYPE (exp);
6537 int unsignedp = TREE_UNSIGNED (type);
6538 enum machine_mode mode;
6539 enum tree_code code = TREE_CODE (exp);
6540 optab this_optab;
6541 rtx subtarget, original_target;
6542 int ignore;
6543 tree context;
6545 /* Handle ERROR_MARK before anybody tries to access its type. */
6546 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6548 op0 = CONST0_RTX (tmode);
6549 if (op0 != 0)
6550 return op0;
6551 return const0_rtx;
6554 mode = TYPE_MODE (type);
6555 /* Use subtarget as the target for operand 0 of a binary operation. */
6556 subtarget = get_subtarget (target);
6557 original_target = target;
6558 ignore = (target == const0_rtx
6559 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6560 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6561 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6562 && TREE_CODE (type) == VOID_TYPE));
6564 /* If we are going to ignore this result, we need only do something
6565 if there is a side-effect somewhere in the expression. If there
6566 is, short-circuit the most common cases here. Note that we must
6567 not call expand_expr with anything but const0_rtx in case this
6568 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6570 if (ignore)
6572 if (! TREE_SIDE_EFFECTS (exp))
6573 return const0_rtx;
6575 /* Ensure we reference a volatile object even if value is ignored, but
6576 don't do this if all we are doing is taking its address. */
6577 if (TREE_THIS_VOLATILE (exp)
6578 && TREE_CODE (exp) != FUNCTION_DECL
6579 && mode != VOIDmode && mode != BLKmode
6580 && modifier != EXPAND_CONST_ADDRESS)
6582 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6583 if (GET_CODE (temp) == MEM)
6584 temp = copy_to_reg (temp);
6585 return const0_rtx;
6588 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6589 || code == INDIRECT_REF || code == BUFFER_REF)
6590 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6591 modifier);
6593 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6594 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6596 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6597 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6598 return const0_rtx;
6600 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6601 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6602 /* If the second operand has no side effects, just evaluate
6603 the first. */
6604 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6605 modifier);
6606 else if (code == BIT_FIELD_REF)
6608 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6609 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6610 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6611 return const0_rtx;
6614 target = 0;
6617 #ifdef MAX_INTEGER_COMPUTATION_MODE
6618 /* Only check stuff here if the mode we want is different from the mode
6619 of the expression; if it's the same, check_max_integer_computation_mode
6620 will handle it. Do we really need to check this stuff at all? */
6622 if (target
6623 && GET_MODE (target) != mode
6624 && TREE_CODE (exp) != INTEGER_CST
6625 && TREE_CODE (exp) != PARM_DECL
6626 && TREE_CODE (exp) != ARRAY_REF
6627 && TREE_CODE (exp) != ARRAY_RANGE_REF
6628 && TREE_CODE (exp) != COMPONENT_REF
6629 && TREE_CODE (exp) != BIT_FIELD_REF
6630 && TREE_CODE (exp) != INDIRECT_REF
6631 && TREE_CODE (exp) != CALL_EXPR
6632 && TREE_CODE (exp) != VAR_DECL
6633 && TREE_CODE (exp) != RTL_EXPR)
6635 enum machine_mode mode = GET_MODE (target);
6637 if (GET_MODE_CLASS (mode) == MODE_INT
6638 && mode > MAX_INTEGER_COMPUTATION_MODE)
6639 internal_error ("unsupported wide integer operation");
6642 if (tmode != mode
6643 && TREE_CODE (exp) != INTEGER_CST
6644 && TREE_CODE (exp) != PARM_DECL
6645 && TREE_CODE (exp) != ARRAY_REF
6646 && TREE_CODE (exp) != ARRAY_RANGE_REF
6647 && TREE_CODE (exp) != COMPONENT_REF
6648 && TREE_CODE (exp) != BIT_FIELD_REF
6649 && TREE_CODE (exp) != INDIRECT_REF
6650 && TREE_CODE (exp) != VAR_DECL
6651 && TREE_CODE (exp) != CALL_EXPR
6652 && TREE_CODE (exp) != RTL_EXPR
6653 && GET_MODE_CLASS (tmode) == MODE_INT
6654 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6655 internal_error ("unsupported wide integer operation");
6657 check_max_integer_computation_mode (exp);
6658 #endif
6660 /* If will do cse, generate all results into pseudo registers
6661 since 1) that allows cse to find more things
6662 and 2) otherwise cse could produce an insn the machine
6663 cannot support. An exception is a CONSTRUCTOR into a multi-word
6664 MEM: that's much more likely to be most efficient into the MEM.
6665 Another is a CALL_EXPR which must return in memory. */
6667 if (! cse_not_expected && mode != BLKmode && target
6668 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6669 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6670 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6671 target = 0;
6673 switch (code)
6675 case LABEL_DECL:
6677 tree function = decl_function_context (exp);
6678 /* Handle using a label in a containing function. */
6679 if (function != current_function_decl
6680 && function != inline_function_decl && function != 0)
6682 struct function *p = find_function_data (function);
6683 p->expr->x_forced_labels
6684 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6685 p->expr->x_forced_labels);
6687 else
6689 if (modifier == EXPAND_INITIALIZER)
6690 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6691 label_rtx (exp),
6692 forced_labels);
6695 temp = gen_rtx_MEM (FUNCTION_MODE,
6696 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6697 if (function != current_function_decl
6698 && function != inline_function_decl && function != 0)
6699 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6700 return temp;
6703 case PARM_DECL:
6704 if (!DECL_RTL_SET_P (exp))
6706 error_with_decl (exp, "prior parameter's size depends on `%s'");
6707 return CONST0_RTX (mode);
6710 /* ... fall through ... */
6712 case VAR_DECL:
6713 /* If a static var's type was incomplete when the decl was written,
6714 but the type is complete now, lay out the decl now. */
6715 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6716 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6718 rtx value = DECL_RTL_IF_SET (exp);
6720 layout_decl (exp, 0);
6722 /* If the RTL was already set, update its mode and memory
6723 attributes. */
6724 if (value != 0)
6726 PUT_MODE (value, DECL_MODE (exp));
6727 SET_DECL_RTL (exp, 0);
6728 set_mem_attributes (value, exp, 1);
6729 SET_DECL_RTL (exp, value);
6733 /* ... fall through ... */
6735 case FUNCTION_DECL:
6736 case RESULT_DECL:
6737 if (DECL_RTL (exp) == 0)
6738 abort ();
6740 /* Ensure variable marked as used even if it doesn't go through
6741 a parser. If it hasn't be used yet, write out an external
6742 definition. */
6743 if (! TREE_USED (exp))
6745 assemble_external (exp);
6746 TREE_USED (exp) = 1;
6749 /* Show we haven't gotten RTL for this yet. */
6750 temp = 0;
6752 /* Handle variables inherited from containing functions. */
6753 context = decl_function_context (exp);
6755 /* We treat inline_function_decl as an alias for the current function
6756 because that is the inline function whose vars, types, etc.
6757 are being merged into the current function.
6758 See expand_inline_function. */
6760 if (context != 0 && context != current_function_decl
6761 && context != inline_function_decl
6762 /* If var is static, we don't need a static chain to access it. */
6763 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6764 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6766 rtx addr;
6768 /* Mark as non-local and addressable. */
6769 DECL_NONLOCAL (exp) = 1;
6770 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6771 abort ();
6772 (*lang_hooks.mark_addressable) (exp);
6773 if (GET_CODE (DECL_RTL (exp)) != MEM)
6774 abort ();
6775 addr = XEXP (DECL_RTL (exp), 0);
6776 if (GET_CODE (addr) == MEM)
6777 addr
6778 = replace_equiv_address (addr,
6779 fix_lexical_addr (XEXP (addr, 0), exp));
6780 else
6781 addr = fix_lexical_addr (addr, exp);
6783 temp = replace_equiv_address (DECL_RTL (exp), addr);
6786 /* This is the case of an array whose size is to be determined
6787 from its initializer, while the initializer is still being parsed.
6788 See expand_decl. */
6790 else if (GET_CODE (DECL_RTL (exp)) == MEM
6791 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6792 temp = validize_mem (DECL_RTL (exp));
6794 /* If DECL_RTL is memory, we are in the normal case and either
6795 the address is not valid or it is not a register and -fforce-addr
6796 is specified, get the address into a register. */
6798 else if (GET_CODE (DECL_RTL (exp)) == MEM
6799 && modifier != EXPAND_CONST_ADDRESS
6800 && modifier != EXPAND_SUM
6801 && modifier != EXPAND_INITIALIZER
6802 && (! memory_address_p (DECL_MODE (exp),
6803 XEXP (DECL_RTL (exp), 0))
6804 || (flag_force_addr
6805 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6806 temp = replace_equiv_address (DECL_RTL (exp),
6807 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6809 /* If we got something, return it. But first, set the alignment
6810 if the address is a register. */
6811 if (temp != 0)
6813 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6814 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6816 return temp;
6819 /* If the mode of DECL_RTL does not match that of the decl, it
6820 must be a promoted value. We return a SUBREG of the wanted mode,
6821 but mark it so that we know that it was already extended. */
6823 if (GET_CODE (DECL_RTL (exp)) == REG
6824 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6826 /* Get the signedness used for this variable. Ensure we get the
6827 same mode we got when the variable was declared. */
6828 if (GET_MODE (DECL_RTL (exp))
6829 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6830 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6831 abort ();
6833 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6834 SUBREG_PROMOTED_VAR_P (temp) = 1;
6835 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6836 return temp;
6839 return DECL_RTL (exp);
6841 case INTEGER_CST:
6842 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6843 TREE_INT_CST_HIGH (exp), mode);
6845 /* ??? If overflow is set, fold will have done an incomplete job,
6846 which can result in (plus xx (const_int 0)), which can get
6847 simplified by validate_replace_rtx during virtual register
6848 instantiation, which can result in unrecognizable insns.
6849 Avoid this by forcing all overflows into registers. */
6850 if (TREE_CONSTANT_OVERFLOW (exp)
6851 && modifier != EXPAND_INITIALIZER)
6852 temp = force_reg (mode, temp);
6854 return temp;
6856 case VECTOR_CST:
6857 return const_vector_from_tree (exp);
6859 case CONST_DECL:
6860 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6862 case REAL_CST:
6863 /* If optimized, generate immediate CONST_DOUBLE
6864 which will be turned into memory by reload if necessary.
6866 We used to force a register so that loop.c could see it. But
6867 this does not allow gen_* patterns to perform optimizations with
6868 the constants. It also produces two insns in cases like "x = 1.0;".
6869 On most machines, floating-point constants are not permitted in
6870 many insns, so we'd end up copying it to a register in any case.
6872 Now, we do the copying in expand_binop, if appropriate. */
6873 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6874 TYPE_MODE (TREE_TYPE (exp)));
6876 case COMPLEX_CST:
6877 case STRING_CST:
6878 if (! TREE_CST_RTL (exp))
6879 output_constant_def (exp, 1);
6881 /* TREE_CST_RTL probably contains a constant address.
6882 On RISC machines where a constant address isn't valid,
6883 make some insns to get that address into a register. */
6884 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6885 && modifier != EXPAND_CONST_ADDRESS
6886 && modifier != EXPAND_INITIALIZER
6887 && modifier != EXPAND_SUM
6888 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6889 || (flag_force_addr
6890 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6891 return replace_equiv_address (TREE_CST_RTL (exp),
6892 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6893 return TREE_CST_RTL (exp);
6895 case EXPR_WITH_FILE_LOCATION:
6897 rtx to_return;
6898 const char *saved_input_filename = input_filename;
6899 int saved_lineno = lineno;
6900 input_filename = EXPR_WFL_FILENAME (exp);
6901 lineno = EXPR_WFL_LINENO (exp);
6902 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6903 emit_line_note (input_filename, lineno);
6904 /* Possibly avoid switching back and forth here. */
6905 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6906 input_filename = saved_input_filename;
6907 lineno = saved_lineno;
6908 return to_return;
6911 case SAVE_EXPR:
6912 context = decl_function_context (exp);
6914 /* If this SAVE_EXPR was at global context, assume we are an
6915 initialization function and move it into our context. */
6916 if (context == 0)
6917 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6919 /* We treat inline_function_decl as an alias for the current function
6920 because that is the inline function whose vars, types, etc.
6921 are being merged into the current function.
6922 See expand_inline_function. */
6923 if (context == current_function_decl || context == inline_function_decl)
6924 context = 0;
6926 /* If this is non-local, handle it. */
6927 if (context)
6929 /* The following call just exists to abort if the context is
6930 not of a containing function. */
6931 find_function_data (context);
6933 temp = SAVE_EXPR_RTL (exp);
6934 if (temp && GET_CODE (temp) == REG)
6936 put_var_into_stack (exp, /*rescan=*/true);
6937 temp = SAVE_EXPR_RTL (exp);
6939 if (temp == 0 || GET_CODE (temp) != MEM)
6940 abort ();
6941 return
6942 replace_equiv_address (temp,
6943 fix_lexical_addr (XEXP (temp, 0), exp));
6945 if (SAVE_EXPR_RTL (exp) == 0)
6947 if (mode == VOIDmode)
6948 temp = const0_rtx;
6949 else
6950 temp = assign_temp (build_qualified_type (type,
6951 (TYPE_QUALS (type)
6952 | TYPE_QUAL_CONST)),
6953 3, 0, 0);
6955 SAVE_EXPR_RTL (exp) = temp;
6956 if (!optimize && GET_CODE (temp) == REG)
6957 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6958 save_expr_regs);
6960 /* If the mode of TEMP does not match that of the expression, it
6961 must be a promoted value. We pass store_expr a SUBREG of the
6962 wanted mode but mark it so that we know that it was already
6963 extended. */
6965 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6967 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6968 promote_mode (type, mode, &unsignedp, 0);
6969 SUBREG_PROMOTED_VAR_P (temp) = 1;
6970 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6973 if (temp == const0_rtx)
6974 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6975 else
6976 store_expr (TREE_OPERAND (exp, 0), temp,
6977 modifier == EXPAND_STACK_PARM ? 2 : 0);
6979 TREE_USED (exp) = 1;
6982 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6983 must be a promoted value. We return a SUBREG of the wanted mode,
6984 but mark it so that we know that it was already extended. */
6986 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6987 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6989 /* Compute the signedness and make the proper SUBREG. */
6990 promote_mode (type, mode, &unsignedp, 0);
6991 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6992 SUBREG_PROMOTED_VAR_P (temp) = 1;
6993 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6994 return temp;
6997 return SAVE_EXPR_RTL (exp);
6999 case UNSAVE_EXPR:
7001 rtx temp;
7002 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7003 TREE_OPERAND (exp, 0)
7004 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7005 return temp;
7008 case PLACEHOLDER_EXPR:
7010 tree old_list = placeholder_list;
7011 tree placeholder_expr = 0;
7013 exp = find_placeholder (exp, &placeholder_expr);
7014 if (exp == 0)
7015 abort ();
7017 placeholder_list = TREE_CHAIN (placeholder_expr);
7018 temp = expand_expr (exp, original_target, tmode, modifier);
7019 placeholder_list = old_list;
7020 return temp;
7023 case WITH_RECORD_EXPR:
7024 /* Put the object on the placeholder list, expand our first operand,
7025 and pop the list. */
7026 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7027 placeholder_list);
7028 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7029 modifier);
7030 placeholder_list = TREE_CHAIN (placeholder_list);
7031 return target;
7033 case GOTO_EXPR:
7034 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7035 expand_goto (TREE_OPERAND (exp, 0));
7036 else
7037 expand_computed_goto (TREE_OPERAND (exp, 0));
7038 return const0_rtx;
7040 case EXIT_EXPR:
7041 expand_exit_loop_if_false (NULL,
7042 invert_truthvalue (TREE_OPERAND (exp, 0)));
7043 return const0_rtx;
7045 case LABELED_BLOCK_EXPR:
7046 if (LABELED_BLOCK_BODY (exp))
7047 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7048 /* Should perhaps use expand_label, but this is simpler and safer. */
7049 do_pending_stack_adjust ();
7050 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7051 return const0_rtx;
7053 case EXIT_BLOCK_EXPR:
7054 if (EXIT_BLOCK_RETURN (exp))
7055 sorry ("returned value in block_exit_expr");
7056 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7057 return const0_rtx;
7059 case LOOP_EXPR:
7060 push_temp_slots ();
7061 expand_start_loop (1);
7062 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7063 expand_end_loop ();
7064 pop_temp_slots ();
7066 return const0_rtx;
7068 case BIND_EXPR:
7070 tree vars = TREE_OPERAND (exp, 0);
7072 /* Need to open a binding contour here because
7073 if there are any cleanups they must be contained here. */
7074 expand_start_bindings (2);
7076 /* Mark the corresponding BLOCK for output in its proper place. */
7077 if (TREE_OPERAND (exp, 2) != 0
7078 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7079 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7081 /* If VARS have not yet been expanded, expand them now. */
7082 while (vars)
7084 if (!DECL_RTL_SET_P (vars))
7085 expand_decl (vars);
7086 expand_decl_init (vars);
7087 vars = TREE_CHAIN (vars);
7090 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7092 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7094 return temp;
7097 case RTL_EXPR:
7098 if (RTL_EXPR_SEQUENCE (exp))
7100 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7101 abort ();
7102 emit_insn (RTL_EXPR_SEQUENCE (exp));
7103 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7105 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7106 free_temps_for_rtl_expr (exp);
7107 return RTL_EXPR_RTL (exp);
7109 case CONSTRUCTOR:
7110 /* If we don't need the result, just ensure we evaluate any
7111 subexpressions. */
7112 if (ignore)
7114 tree elt;
7116 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7117 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7119 return const0_rtx;
7122 /* All elts simple constants => refer to a constant in memory. But
7123 if this is a non-BLKmode mode, let it store a field at a time
7124 since that should make a CONST_INT or CONST_DOUBLE when we
7125 fold. Likewise, if we have a target we can use, it is best to
7126 store directly into the target unless the type is large enough
7127 that memcpy will be used. If we are making an initializer and
7128 all operands are constant, put it in memory as well.
7130 FIXME: Avoid trying to fill vector constructors piece-meal.
7131 Output them with output_constant_def below unless we're sure
7132 they're zeros. This should go away when vector initializers
7133 are treated like VECTOR_CST instead of arrays.
7135 else if ((TREE_STATIC (exp)
7136 && ((mode == BLKmode
7137 && ! (target != 0 && safe_from_p (target, exp, 1)))
7138 || TREE_ADDRESSABLE (exp)
7139 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7140 && (! MOVE_BY_PIECES_P
7141 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7142 TYPE_ALIGN (type)))
7143 && ((TREE_CODE (type) == VECTOR_TYPE
7144 && !is_zeros_p (exp))
7145 || ! mostly_zeros_p (exp)))))
7146 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7148 rtx constructor = output_constant_def (exp, 1);
7150 if (modifier != EXPAND_CONST_ADDRESS
7151 && modifier != EXPAND_INITIALIZER
7152 && modifier != EXPAND_SUM)
7153 constructor = validize_mem (constructor);
7155 return constructor;
7157 else
7159 /* Handle calls that pass values in multiple non-contiguous
7160 locations. The Irix 6 ABI has examples of this. */
7161 if (target == 0 || ! safe_from_p (target, exp, 1)
7162 || GET_CODE (target) == PARALLEL
7163 || modifier == EXPAND_STACK_PARM)
7164 target
7165 = assign_temp (build_qualified_type (type,
7166 (TYPE_QUALS (type)
7167 | (TREE_READONLY (exp)
7168 * TYPE_QUAL_CONST))),
7169 0, TREE_ADDRESSABLE (exp), 1);
7171 store_constructor (exp, target, 0, int_expr_size (exp));
7172 return target;
7175 case INDIRECT_REF:
7177 tree exp1 = TREE_OPERAND (exp, 0);
7178 tree index;
7179 tree string = string_constant (exp1, &index);
7181 /* Try to optimize reads from const strings. */
7182 if (string
7183 && TREE_CODE (string) == STRING_CST
7184 && TREE_CODE (index) == INTEGER_CST
7185 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7186 && GET_MODE_CLASS (mode) == MODE_INT
7187 && GET_MODE_SIZE (mode) == 1
7188 && modifier != EXPAND_WRITE)
7189 return gen_int_mode (TREE_STRING_POINTER (string)
7190 [TREE_INT_CST_LOW (index)], mode);
7192 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7193 op0 = memory_address (mode, op0);
7194 temp = gen_rtx_MEM (mode, op0);
7195 set_mem_attributes (temp, exp, 0);
7197 /* If we are writing to this object and its type is a record with
7198 readonly fields, we must mark it as readonly so it will
7199 conflict with readonly references to those fields. */
7200 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7201 RTX_UNCHANGING_P (temp) = 1;
7203 return temp;
7206 case ARRAY_REF:
7207 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7208 abort ();
7211 tree array = TREE_OPERAND (exp, 0);
7212 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7213 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7214 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7215 HOST_WIDE_INT i;
7217 /* Optimize the special-case of a zero lower bound.
7219 We convert the low_bound to sizetype to avoid some problems
7220 with constant folding. (E.g. suppose the lower bound is 1,
7221 and its mode is QI. Without the conversion, (ARRAY
7222 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7223 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7225 if (! integer_zerop (low_bound))
7226 index = size_diffop (index, convert (sizetype, low_bound));
7228 /* Fold an expression like: "foo"[2].
7229 This is not done in fold so it won't happen inside &.
7230 Don't fold if this is for wide characters since it's too
7231 difficult to do correctly and this is a very rare case. */
7233 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7234 && TREE_CODE (array) == STRING_CST
7235 && TREE_CODE (index) == INTEGER_CST
7236 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7237 && GET_MODE_CLASS (mode) == MODE_INT
7238 && GET_MODE_SIZE (mode) == 1)
7239 return gen_int_mode (TREE_STRING_POINTER (array)
7240 [TREE_INT_CST_LOW (index)], mode);
7242 /* If this is a constant index into a constant array,
7243 just get the value from the array. Handle both the cases when
7244 we have an explicit constructor and when our operand is a variable
7245 that was declared const. */
7247 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7248 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7249 && TREE_CODE (index) == INTEGER_CST
7250 && 0 > compare_tree_int (index,
7251 list_length (CONSTRUCTOR_ELTS
7252 (TREE_OPERAND (exp, 0)))))
7254 tree elem;
7256 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7257 i = TREE_INT_CST_LOW (index);
7258 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7261 if (elem)
7262 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7263 modifier);
7266 else if (optimize >= 1
7267 && modifier != EXPAND_CONST_ADDRESS
7268 && modifier != EXPAND_INITIALIZER
7269 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7270 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7271 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7273 if (TREE_CODE (index) == INTEGER_CST)
7275 tree init = DECL_INITIAL (array);
7277 if (TREE_CODE (init) == CONSTRUCTOR)
7279 tree elem;
7281 for (elem = CONSTRUCTOR_ELTS (init);
7282 (elem
7283 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7284 elem = TREE_CHAIN (elem))
7287 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7288 return expand_expr (fold (TREE_VALUE (elem)), target,
7289 tmode, modifier);
7291 else if (TREE_CODE (init) == STRING_CST
7292 && 0 > compare_tree_int (index,
7293 TREE_STRING_LENGTH (init)))
7295 tree type = TREE_TYPE (TREE_TYPE (init));
7296 enum machine_mode mode = TYPE_MODE (type);
7298 if (GET_MODE_CLASS (mode) == MODE_INT
7299 && GET_MODE_SIZE (mode) == 1)
7300 return gen_int_mode (TREE_STRING_POINTER (init)
7301 [TREE_INT_CST_LOW (index)], mode);
7306 /* Fall through. */
7308 case COMPONENT_REF:
7309 case BIT_FIELD_REF:
7310 case ARRAY_RANGE_REF:
7311 /* If the operand is a CONSTRUCTOR, we can just extract the
7312 appropriate field if it is present. Don't do this if we have
7313 already written the data since we want to refer to that copy
7314 and varasm.c assumes that's what we'll do. */
7315 if (code == COMPONENT_REF
7316 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7317 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7319 tree elt;
7321 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7322 elt = TREE_CHAIN (elt))
7323 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7324 /* We can normally use the value of the field in the
7325 CONSTRUCTOR. However, if this is a bitfield in
7326 an integral mode that we can fit in a HOST_WIDE_INT,
7327 we must mask only the number of bits in the bitfield,
7328 since this is done implicitly by the constructor. If
7329 the bitfield does not meet either of those conditions,
7330 we can't do this optimization. */
7331 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7332 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7333 == MODE_INT)
7334 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7335 <= HOST_BITS_PER_WIDE_INT))))
7337 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7338 && modifier == EXPAND_STACK_PARM)
7339 target = 0;
7340 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7341 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7343 HOST_WIDE_INT bitsize
7344 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7345 enum machine_mode imode
7346 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7348 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7350 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7351 op0 = expand_and (imode, op0, op1, target);
7353 else
7355 tree count
7356 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7359 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7360 target, 0);
7361 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7362 target, 0);
7366 return op0;
7371 enum machine_mode mode1;
7372 HOST_WIDE_INT bitsize, bitpos;
7373 tree offset;
7374 int volatilep = 0;
7375 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7376 &mode1, &unsignedp, &volatilep);
7377 rtx orig_op0;
7379 /* If we got back the original object, something is wrong. Perhaps
7380 we are evaluating an expression too early. In any event, don't
7381 infinitely recurse. */
7382 if (tem == exp)
7383 abort ();
7385 /* If TEM's type is a union of variable size, pass TARGET to the inner
7386 computation, since it will need a temporary and TARGET is known
7387 to have to do. This occurs in unchecked conversion in Ada. */
7389 orig_op0 = op0
7390 = expand_expr (tem,
7391 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7392 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7393 != INTEGER_CST)
7394 && modifier != EXPAND_STACK_PARM
7395 ? target : NULL_RTX),
7396 VOIDmode,
7397 (modifier == EXPAND_INITIALIZER
7398 || modifier == EXPAND_CONST_ADDRESS
7399 || modifier == EXPAND_STACK_PARM)
7400 ? modifier : EXPAND_NORMAL);
7402 /* If this is a constant, put it into a register if it is a
7403 legitimate constant and OFFSET is 0 and memory if it isn't. */
7404 if (CONSTANT_P (op0))
7406 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7407 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7408 && offset == 0)
7409 op0 = force_reg (mode, op0);
7410 else
7411 op0 = validize_mem (force_const_mem (mode, op0));
7414 if (offset != 0)
7416 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7417 EXPAND_SUM);
7419 /* If this object is in a register, put it into memory.
7420 This case can't occur in C, but can in Ada if we have
7421 unchecked conversion of an expression from a scalar type to
7422 an array or record type. */
7423 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7424 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7426 /* If the operand is a SAVE_EXPR, we can deal with this by
7427 forcing the SAVE_EXPR into memory. */
7428 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7430 put_var_into_stack (TREE_OPERAND (exp, 0),
7431 /*rescan=*/true);
7432 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7434 else
7436 tree nt
7437 = build_qualified_type (TREE_TYPE (tem),
7438 (TYPE_QUALS (TREE_TYPE (tem))
7439 | TYPE_QUAL_CONST));
7440 rtx memloc = assign_temp (nt, 1, 1, 1);
7442 emit_move_insn (memloc, op0);
7443 op0 = memloc;
7447 if (GET_CODE (op0) != MEM)
7448 abort ();
7450 #ifdef POINTERS_EXTEND_UNSIGNED
7451 if (GET_MODE (offset_rtx) != Pmode)
7452 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7453 #else
7454 if (GET_MODE (offset_rtx) != ptr_mode)
7455 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7456 #endif
7458 /* A constant address in OP0 can have VOIDmode, we must not try
7459 to call force_reg for that case. Avoid that case. */
7460 if (GET_CODE (op0) == MEM
7461 && GET_MODE (op0) == BLKmode
7462 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7463 && bitsize != 0
7464 && (bitpos % bitsize) == 0
7465 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7466 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7468 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7469 bitpos = 0;
7472 op0 = offset_address (op0, offset_rtx,
7473 highest_pow2_factor (offset));
7476 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7477 record its alignment as BIGGEST_ALIGNMENT. */
7478 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7479 && is_aligning_offset (offset, tem))
7480 set_mem_align (op0, BIGGEST_ALIGNMENT);
7482 /* Don't forget about volatility even if this is a bitfield. */
7483 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7485 if (op0 == orig_op0)
7486 op0 = copy_rtx (op0);
7488 MEM_VOLATILE_P (op0) = 1;
7491 /* The following code doesn't handle CONCAT.
7492 Assume only bitpos == 0 can be used for CONCAT, due to
7493 one element arrays having the same mode as its element. */
7494 if (GET_CODE (op0) == CONCAT)
7496 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7497 abort ();
7498 return op0;
7501 /* In cases where an aligned union has an unaligned object
7502 as a field, we might be extracting a BLKmode value from
7503 an integer-mode (e.g., SImode) object. Handle this case
7504 by doing the extract into an object as wide as the field
7505 (which we know to be the width of a basic mode), then
7506 storing into memory, and changing the mode to BLKmode. */
7507 if (mode1 == VOIDmode
7508 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7509 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7510 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7511 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7512 && modifier != EXPAND_CONST_ADDRESS
7513 && modifier != EXPAND_INITIALIZER)
7514 /* If the field isn't aligned enough to fetch as a memref,
7515 fetch it as a bit field. */
7516 || (mode1 != BLKmode
7517 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7518 && ((TYPE_ALIGN (TREE_TYPE (tem))
7519 < GET_MODE_ALIGNMENT (mode))
7520 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7521 /* If the type and the field are a constant size and the
7522 size of the type isn't the same size as the bitfield,
7523 we must use bitfield operations. */
7524 || (bitsize >= 0
7525 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7526 == INTEGER_CST)
7527 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7528 bitsize)))
7530 enum machine_mode ext_mode = mode;
7532 if (ext_mode == BLKmode
7533 && ! (target != 0 && GET_CODE (op0) == MEM
7534 && GET_CODE (target) == MEM
7535 && bitpos % BITS_PER_UNIT == 0))
7536 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7538 if (ext_mode == BLKmode)
7540 /* In this case, BITPOS must start at a byte boundary and
7541 TARGET, if specified, must be a MEM. */
7542 if (GET_CODE (op0) != MEM
7543 || (target != 0 && GET_CODE (target) != MEM)
7544 || bitpos % BITS_PER_UNIT != 0)
7545 abort ();
7547 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7548 if (target == 0)
7549 target = assign_temp (type, 0, 1, 1);
7551 emit_block_move (target, op0,
7552 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7553 / BITS_PER_UNIT),
7554 (modifier == EXPAND_STACK_PARM
7555 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7557 return target;
7560 op0 = validize_mem (op0);
7562 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7563 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7565 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7566 (modifier == EXPAND_STACK_PARM
7567 ? NULL_RTX : target),
7568 ext_mode, ext_mode,
7569 int_size_in_bytes (TREE_TYPE (tem)));
7571 /* If the result is a record type and BITSIZE is narrower than
7572 the mode of OP0, an integral mode, and this is a big endian
7573 machine, we must put the field into the high-order bits. */
7574 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7575 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7576 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7577 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7578 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7579 - bitsize),
7580 op0, 1);
7582 if (mode == BLKmode)
7584 rtx new = assign_temp (build_qualified_type
7585 ((*lang_hooks.types.type_for_mode)
7586 (ext_mode, 0),
7587 TYPE_QUAL_CONST), 0, 1, 1);
7589 emit_move_insn (new, op0);
7590 op0 = copy_rtx (new);
7591 PUT_MODE (op0, BLKmode);
7592 set_mem_attributes (op0, exp, 1);
7595 return op0;
7598 /* If the result is BLKmode, use that to access the object
7599 now as well. */
7600 if (mode == BLKmode)
7601 mode1 = BLKmode;
7603 /* Get a reference to just this component. */
7604 if (modifier == EXPAND_CONST_ADDRESS
7605 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7606 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7607 else
7608 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7610 if (op0 == orig_op0)
7611 op0 = copy_rtx (op0);
7613 set_mem_attributes (op0, exp, 0);
7614 if (GET_CODE (XEXP (op0, 0)) == REG)
7615 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7617 MEM_VOLATILE_P (op0) |= volatilep;
7618 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7619 || modifier == EXPAND_CONST_ADDRESS
7620 || modifier == EXPAND_INITIALIZER)
7621 return op0;
7622 else if (target == 0)
7623 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7625 convert_move (target, op0, unsignedp);
7626 return target;
7629 case VTABLE_REF:
7631 rtx insn, before = get_last_insn (), vtbl_ref;
7633 /* Evaluate the interior expression. */
7634 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7635 tmode, modifier);
7637 /* Get or create an instruction off which to hang a note. */
7638 if (REG_P (subtarget))
7640 target = subtarget;
7641 insn = get_last_insn ();
7642 if (insn == before)
7643 abort ();
7644 if (! INSN_P (insn))
7645 insn = prev_nonnote_insn (insn);
7647 else
7649 target = gen_reg_rtx (GET_MODE (subtarget));
7650 insn = emit_move_insn (target, subtarget);
7653 /* Collect the data for the note. */
7654 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7655 vtbl_ref = plus_constant (vtbl_ref,
7656 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7657 /* Discard the initial CONST that was added. */
7658 vtbl_ref = XEXP (vtbl_ref, 0);
7660 REG_NOTES (insn)
7661 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7663 return target;
7666 /* Intended for a reference to a buffer of a file-object in Pascal.
7667 But it's not certain that a special tree code will really be
7668 necessary for these. INDIRECT_REF might work for them. */
7669 case BUFFER_REF:
7670 abort ();
7672 case IN_EXPR:
7674 /* Pascal set IN expression.
7676 Algorithm:
7677 rlo = set_low - (set_low%bits_per_word);
7678 the_word = set [ (index - rlo)/bits_per_word ];
7679 bit_index = index % bits_per_word;
7680 bitmask = 1 << bit_index;
7681 return !!(the_word & bitmask); */
7683 tree set = TREE_OPERAND (exp, 0);
7684 tree index = TREE_OPERAND (exp, 1);
7685 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7686 tree set_type = TREE_TYPE (set);
7687 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7688 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7689 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7690 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7691 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7692 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7693 rtx setaddr = XEXP (setval, 0);
7694 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7695 rtx rlow;
7696 rtx diff, quo, rem, addr, bit, result;
7698 /* If domain is empty, answer is no. Likewise if index is constant
7699 and out of bounds. */
7700 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7701 && TREE_CODE (set_low_bound) == INTEGER_CST
7702 && tree_int_cst_lt (set_high_bound, set_low_bound))
7703 || (TREE_CODE (index) == INTEGER_CST
7704 && TREE_CODE (set_low_bound) == INTEGER_CST
7705 && tree_int_cst_lt (index, set_low_bound))
7706 || (TREE_CODE (set_high_bound) == INTEGER_CST
7707 && TREE_CODE (index) == INTEGER_CST
7708 && tree_int_cst_lt (set_high_bound, index))))
7709 return const0_rtx;
7711 if (target == 0)
7712 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7714 /* If we get here, we have to generate the code for both cases
7715 (in range and out of range). */
7717 op0 = gen_label_rtx ();
7718 op1 = gen_label_rtx ();
7720 if (! (GET_CODE (index_val) == CONST_INT
7721 && GET_CODE (lo_r) == CONST_INT))
7722 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7723 GET_MODE (index_val), iunsignedp, op1);
7725 if (! (GET_CODE (index_val) == CONST_INT
7726 && GET_CODE (hi_r) == CONST_INT))
7727 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7728 GET_MODE (index_val), iunsignedp, op1);
7730 /* Calculate the element number of bit zero in the first word
7731 of the set. */
7732 if (GET_CODE (lo_r) == CONST_INT)
7733 rlow = GEN_INT (INTVAL (lo_r)
7734 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7735 else
7736 rlow = expand_binop (index_mode, and_optab, lo_r,
7737 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7738 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7740 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7741 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7743 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7744 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7745 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7746 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7748 addr = memory_address (byte_mode,
7749 expand_binop (index_mode, add_optab, diff,
7750 setaddr, NULL_RTX, iunsignedp,
7751 OPTAB_LIB_WIDEN));
7753 /* Extract the bit we want to examine. */
7754 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7755 gen_rtx_MEM (byte_mode, addr),
7756 make_tree (TREE_TYPE (index), rem),
7757 NULL_RTX, 1);
7758 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7759 GET_MODE (target) == byte_mode ? target : 0,
7760 1, OPTAB_LIB_WIDEN);
7762 if (result != target)
7763 convert_move (target, result, 1);
7765 /* Output the code to handle the out-of-range case. */
7766 emit_jump (op0);
7767 emit_label (op1);
7768 emit_move_insn (target, const0_rtx);
7769 emit_label (op0);
7770 return target;
7773 case WITH_CLEANUP_EXPR:
7774 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7776 WITH_CLEANUP_EXPR_RTL (exp)
7777 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7778 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7779 CLEANUP_EH_ONLY (exp));
7781 /* That's it for this cleanup. */
7782 TREE_OPERAND (exp, 1) = 0;
7784 return WITH_CLEANUP_EXPR_RTL (exp);
7786 case CLEANUP_POINT_EXPR:
7788 /* Start a new binding layer that will keep track of all cleanup
7789 actions to be performed. */
7790 expand_start_bindings (2);
7792 target_temp_slot_level = temp_slot_level;
7794 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7795 /* If we're going to use this value, load it up now. */
7796 if (! ignore)
7797 op0 = force_not_mem (op0);
7798 preserve_temp_slots (op0);
7799 expand_end_bindings (NULL_TREE, 0, 0);
7801 return op0;
7803 case CALL_EXPR:
7804 /* Check for a built-in function. */
7805 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7806 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7807 == FUNCTION_DECL)
7808 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7810 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7811 == BUILT_IN_FRONTEND)
7812 return (*lang_hooks.expand_expr) (exp, original_target,
7813 tmode, modifier);
7814 else
7815 return expand_builtin (exp, target, subtarget, tmode, ignore);
7818 return expand_call (exp, target, ignore);
7820 case NON_LVALUE_EXPR:
7821 case NOP_EXPR:
7822 case CONVERT_EXPR:
7823 case REFERENCE_EXPR:
7824 if (TREE_OPERAND (exp, 0) == error_mark_node)
7825 return const0_rtx;
7827 if (TREE_CODE (type) == UNION_TYPE)
7829 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7831 /* If both input and output are BLKmode, this conversion isn't doing
7832 anything except possibly changing memory attribute. */
7833 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7835 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7836 modifier);
7838 result = copy_rtx (result);
7839 set_mem_attributes (result, exp, 0);
7840 return result;
7843 if (target == 0)
7844 target = assign_temp (type, 0, 1, 1);
7846 if (GET_CODE (target) == MEM)
7847 /* Store data into beginning of memory target. */
7848 store_expr (TREE_OPERAND (exp, 0),
7849 adjust_address (target, TYPE_MODE (valtype), 0),
7850 modifier == EXPAND_STACK_PARM ? 2 : 0);
7852 else if (GET_CODE (target) == REG)
7853 /* Store this field into a union of the proper type. */
7854 store_field (target,
7855 MIN ((int_size_in_bytes (TREE_TYPE
7856 (TREE_OPERAND (exp, 0)))
7857 * BITS_PER_UNIT),
7858 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7859 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7860 VOIDmode, 0, type, 0);
7861 else
7862 abort ();
7864 /* Return the entire union. */
7865 return target;
7868 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7870 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7871 modifier);
7873 /* If the signedness of the conversion differs and OP0 is
7874 a promoted SUBREG, clear that indication since we now
7875 have to do the proper extension. */
7876 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7877 && GET_CODE (op0) == SUBREG)
7878 SUBREG_PROMOTED_VAR_P (op0) = 0;
7880 return op0;
7883 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7884 if (GET_MODE (op0) == mode)
7885 return op0;
7887 /* If OP0 is a constant, just convert it into the proper mode. */
7888 if (CONSTANT_P (op0))
7890 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7891 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7893 if (modifier == EXPAND_INITIALIZER)
7894 return simplify_gen_subreg (mode, op0, inner_mode,
7895 subreg_lowpart_offset (mode,
7896 inner_mode));
7897 else
7898 return convert_modes (mode, inner_mode, op0,
7899 TREE_UNSIGNED (inner_type));
7902 if (modifier == EXPAND_INITIALIZER)
7903 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7905 if (target == 0)
7906 return
7907 convert_to_mode (mode, op0,
7908 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7909 else
7910 convert_move (target, op0,
7911 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7912 return target;
7914 case VIEW_CONVERT_EXPR:
7915 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7917 /* If the input and output modes are both the same, we are done.
7918 Otherwise, if neither mode is BLKmode and both are within a word, we
7919 can use gen_lowpart. If neither is true, make sure the operand is
7920 in memory and convert the MEM to the new mode. */
7921 if (TYPE_MODE (type) == GET_MODE (op0))
7923 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7924 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7925 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7926 op0 = gen_lowpart (TYPE_MODE (type), op0);
7927 else if (GET_CODE (op0) != MEM)
7929 /* If the operand is not a MEM, force it into memory. Since we
7930 are going to be be changing the mode of the MEM, don't call
7931 force_const_mem for constants because we don't allow pool
7932 constants to change mode. */
7933 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7935 if (TREE_ADDRESSABLE (exp))
7936 abort ();
7938 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7939 target
7940 = assign_stack_temp_for_type
7941 (TYPE_MODE (inner_type),
7942 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7944 emit_move_insn (target, op0);
7945 op0 = target;
7948 /* At this point, OP0 is in the correct mode. If the output type is such
7949 that the operand is known to be aligned, indicate that it is.
7950 Otherwise, we need only be concerned about alignment for non-BLKmode
7951 results. */
7952 if (GET_CODE (op0) == MEM)
7954 op0 = copy_rtx (op0);
7956 if (TYPE_ALIGN_OK (type))
7957 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7958 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7959 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7961 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7962 HOST_WIDE_INT temp_size
7963 = MAX (int_size_in_bytes (inner_type),
7964 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7965 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7966 temp_size, 0, type);
7967 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7969 if (TREE_ADDRESSABLE (exp))
7970 abort ();
7972 if (GET_MODE (op0) == BLKmode)
7973 emit_block_move (new_with_op0_mode, op0,
7974 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7975 (modifier == EXPAND_STACK_PARM
7976 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7977 else
7978 emit_move_insn (new_with_op0_mode, op0);
7980 op0 = new;
7983 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7986 return op0;
7988 case PLUS_EXPR:
7989 this_optab = ! unsignedp && flag_trapv
7990 && (GET_MODE_CLASS (mode) == MODE_INT)
7991 ? addv_optab : add_optab;
7993 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7994 something else, make sure we add the register to the constant and
7995 then to the other thing. This case can occur during strength
7996 reduction and doing it this way will produce better code if the
7997 frame pointer or argument pointer is eliminated.
7999 fold-const.c will ensure that the constant is always in the inner
8000 PLUS_EXPR, so the only case we need to do anything about is if
8001 sp, ap, or fp is our second argument, in which case we must swap
8002 the innermost first argument and our second argument. */
8004 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8005 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8006 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8007 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8008 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8009 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8011 tree t = TREE_OPERAND (exp, 1);
8013 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8014 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8017 /* If the result is to be ptr_mode and we are adding an integer to
8018 something, we might be forming a constant. So try to use
8019 plus_constant. If it produces a sum and we can't accept it,
8020 use force_operand. This allows P = &ARR[const] to generate
8021 efficient code on machines where a SYMBOL_REF is not a valid
8022 address.
8024 If this is an EXPAND_SUM call, always return the sum. */
8025 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8026 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8028 if (modifier == EXPAND_STACK_PARM)
8029 target = 0;
8030 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8031 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8032 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8034 rtx constant_part;
8036 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8037 EXPAND_SUM);
8038 /* Use immed_double_const to ensure that the constant is
8039 truncated according to the mode of OP1, then sign extended
8040 to a HOST_WIDE_INT. Using the constant directly can result
8041 in non-canonical RTL in a 64x32 cross compile. */
8042 constant_part
8043 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8044 (HOST_WIDE_INT) 0,
8045 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8046 op1 = plus_constant (op1, INTVAL (constant_part));
8047 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8048 op1 = force_operand (op1, target);
8049 return op1;
8052 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8053 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8054 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8056 rtx constant_part;
8058 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8059 (modifier == EXPAND_INITIALIZER
8060 ? EXPAND_INITIALIZER : EXPAND_SUM));
8061 if (! CONSTANT_P (op0))
8063 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8064 VOIDmode, modifier);
8065 /* Don't go to both_summands if modifier
8066 says it's not right to return a PLUS. */
8067 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8068 goto binop2;
8069 goto both_summands;
8071 /* Use immed_double_const to ensure that the constant is
8072 truncated according to the mode of OP1, then sign extended
8073 to a HOST_WIDE_INT. Using the constant directly can result
8074 in non-canonical RTL in a 64x32 cross compile. */
8075 constant_part
8076 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8077 (HOST_WIDE_INT) 0,
8078 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8079 op0 = plus_constant (op0, INTVAL (constant_part));
8080 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8081 op0 = force_operand (op0, target);
8082 return op0;
8086 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8087 subtarget = 0;
8089 /* No sense saving up arithmetic to be done
8090 if it's all in the wrong mode to form part of an address.
8091 And force_operand won't know whether to sign-extend or
8092 zero-extend. */
8093 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8094 || mode != ptr_mode)
8096 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8097 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8098 if (op0 == const0_rtx)
8099 return op1;
8100 if (op1 == const0_rtx)
8101 return op0;
8102 goto binop2;
8105 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8106 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8108 /* We come here from MINUS_EXPR when the second operand is a
8109 constant. */
8110 both_summands:
8111 /* Make sure any term that's a sum with a constant comes last. */
8112 if (GET_CODE (op0) == PLUS
8113 && CONSTANT_P (XEXP (op0, 1)))
8115 temp = op0;
8116 op0 = op1;
8117 op1 = temp;
8119 /* If adding to a sum including a constant,
8120 associate it to put the constant outside. */
8121 if (GET_CODE (op1) == PLUS
8122 && CONSTANT_P (XEXP (op1, 1)))
8124 rtx constant_term = const0_rtx;
8126 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8127 if (temp != 0)
8128 op0 = temp;
8129 /* Ensure that MULT comes first if there is one. */
8130 else if (GET_CODE (op0) == MULT)
8131 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8132 else
8133 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8135 /* Let's also eliminate constants from op0 if possible. */
8136 op0 = eliminate_constant_term (op0, &constant_term);
8138 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8139 their sum should be a constant. Form it into OP1, since the
8140 result we want will then be OP0 + OP1. */
8142 temp = simplify_binary_operation (PLUS, mode, constant_term,
8143 XEXP (op1, 1));
8144 if (temp != 0)
8145 op1 = temp;
8146 else
8147 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8150 /* Put a constant term last and put a multiplication first. */
8151 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8152 temp = op1, op1 = op0, op0 = temp;
8154 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8155 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8157 case MINUS_EXPR:
8158 /* For initializers, we are allowed to return a MINUS of two
8159 symbolic constants. Here we handle all cases when both operands
8160 are constant. */
8161 /* Handle difference of two symbolic constants,
8162 for the sake of an initializer. */
8163 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8164 && really_constant_p (TREE_OPERAND (exp, 0))
8165 && really_constant_p (TREE_OPERAND (exp, 1)))
8167 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8168 modifier);
8169 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8170 modifier);
8172 /* If the last operand is a CONST_INT, use plus_constant of
8173 the negated constant. Else make the MINUS. */
8174 if (GET_CODE (op1) == CONST_INT)
8175 return plus_constant (op0, - INTVAL (op1));
8176 else
8177 return gen_rtx_MINUS (mode, op0, op1);
8180 this_optab = ! unsignedp && flag_trapv
8181 && (GET_MODE_CLASS(mode) == MODE_INT)
8182 ? subv_optab : sub_optab;
8184 /* No sense saving up arithmetic to be done
8185 if it's all in the wrong mode to form part of an address.
8186 And force_operand won't know whether to sign-extend or
8187 zero-extend. */
8188 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8189 || mode != ptr_mode)
8190 goto binop;
8192 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8193 subtarget = 0;
8195 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8196 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8198 /* Convert A - const to A + (-const). */
8199 if (GET_CODE (op1) == CONST_INT)
8201 op1 = negate_rtx (mode, op1);
8202 goto both_summands;
8205 goto binop2;
8207 case MULT_EXPR:
8208 /* If first operand is constant, swap them.
8209 Thus the following special case checks need only
8210 check the second operand. */
8211 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8213 tree t1 = TREE_OPERAND (exp, 0);
8214 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8215 TREE_OPERAND (exp, 1) = t1;
8218 /* Attempt to return something suitable for generating an
8219 indexed address, for machines that support that. */
8221 if (modifier == EXPAND_SUM && mode == ptr_mode
8222 && host_integerp (TREE_OPERAND (exp, 1), 0))
8224 tree exp1 = TREE_OPERAND (exp, 1);
8226 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8227 EXPAND_SUM);
8229 /* If we knew for certain that this is arithmetic for an array
8230 reference, and we knew the bounds of the array, then we could
8231 apply the distributive law across (PLUS X C) for constant C.
8232 Without such knowledge, we risk overflowing the computation
8233 when both X and C are large, but X+C isn't. */
8234 /* ??? Could perhaps special-case EXP being unsigned and C being
8235 positive. In that case we are certain that X+C is no smaller
8236 than X and so the transformed expression will overflow iff the
8237 original would have. */
8239 if (GET_CODE (op0) != REG)
8240 op0 = force_operand (op0, NULL_RTX);
8241 if (GET_CODE (op0) != REG)
8242 op0 = copy_to_mode_reg (mode, op0);
8244 return gen_rtx_MULT (mode, op0,
8245 gen_int_mode (tree_low_cst (exp1, 0),
8246 TYPE_MODE (TREE_TYPE (exp1))));
8249 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8250 subtarget = 0;
8252 if (modifier == EXPAND_STACK_PARM)
8253 target = 0;
8255 /* Check for multiplying things that have been extended
8256 from a narrower type. If this machine supports multiplying
8257 in that narrower type with a result in the desired type,
8258 do it that way, and avoid the explicit type-conversion. */
8259 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8260 && TREE_CODE (type) == INTEGER_TYPE
8261 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8262 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8263 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8264 && int_fits_type_p (TREE_OPERAND (exp, 1),
8265 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8266 /* Don't use a widening multiply if a shift will do. */
8267 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8268 > HOST_BITS_PER_WIDE_INT)
8269 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8271 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8272 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8274 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8275 /* If both operands are extended, they must either both
8276 be zero-extended or both be sign-extended. */
8277 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8279 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8281 enum machine_mode innermode
8282 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8283 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8284 ? smul_widen_optab : umul_widen_optab);
8285 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8286 ? umul_widen_optab : smul_widen_optab);
8287 if (mode == GET_MODE_WIDER_MODE (innermode))
8289 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8291 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8292 NULL_RTX, VOIDmode, 0);
8293 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8294 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8295 VOIDmode, 0);
8296 else
8297 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8298 NULL_RTX, VOIDmode, 0);
8299 goto binop2;
8301 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8302 && innermode == word_mode)
8304 rtx htem;
8305 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8306 NULL_RTX, VOIDmode, 0);
8307 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8308 op1 = convert_modes (innermode, mode,
8309 expand_expr (TREE_OPERAND (exp, 1),
8310 NULL_RTX, VOIDmode, 0),
8311 unsignedp);
8312 else
8313 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8314 NULL_RTX, VOIDmode, 0);
8315 temp = expand_binop (mode, other_optab, op0, op1, target,
8316 unsignedp, OPTAB_LIB_WIDEN);
8317 htem = expand_mult_highpart_adjust (innermode,
8318 gen_highpart (innermode, temp),
8319 op0, op1,
8320 gen_highpart (innermode, temp),
8321 unsignedp);
8322 emit_move_insn (gen_highpart (innermode, temp), htem);
8323 return temp;
8327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8328 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8329 return expand_mult (mode, op0, op1, target, unsignedp);
8331 case TRUNC_DIV_EXPR:
8332 case FLOOR_DIV_EXPR:
8333 case CEIL_DIV_EXPR:
8334 case ROUND_DIV_EXPR:
8335 case EXACT_DIV_EXPR:
8336 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8337 subtarget = 0;
8338 if (modifier == EXPAND_STACK_PARM)
8339 target = 0;
8340 /* Possible optimization: compute the dividend with EXPAND_SUM
8341 then if the divisor is constant can optimize the case
8342 where some terms of the dividend have coeffs divisible by it. */
8343 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8344 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8345 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8347 case RDIV_EXPR:
8348 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8349 expensive divide. If not, combine will rebuild the original
8350 computation. */
8351 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8352 && TREE_CODE (type) == REAL_TYPE
8353 && !real_onep (TREE_OPERAND (exp, 0)))
8354 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8355 build (RDIV_EXPR, type,
8356 build_real (type, dconst1),
8357 TREE_OPERAND (exp, 1))),
8358 target, tmode, modifier);
8359 this_optab = sdiv_optab;
8360 goto binop;
8362 case TRUNC_MOD_EXPR:
8363 case FLOOR_MOD_EXPR:
8364 case CEIL_MOD_EXPR:
8365 case ROUND_MOD_EXPR:
8366 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8367 subtarget = 0;
8368 if (modifier == EXPAND_STACK_PARM)
8369 target = 0;
8370 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8371 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8372 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8374 case FIX_ROUND_EXPR:
8375 case FIX_FLOOR_EXPR:
8376 case FIX_CEIL_EXPR:
8377 abort (); /* Not used for C. */
8379 case FIX_TRUNC_EXPR:
8380 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8381 if (target == 0 || modifier == EXPAND_STACK_PARM)
8382 target = gen_reg_rtx (mode);
8383 expand_fix (target, op0, unsignedp);
8384 return target;
8386 case FLOAT_EXPR:
8387 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8388 if (target == 0 || modifier == EXPAND_STACK_PARM)
8389 target = gen_reg_rtx (mode);
8390 /* expand_float can't figure out what to do if FROM has VOIDmode.
8391 So give it the correct mode. With -O, cse will optimize this. */
8392 if (GET_MODE (op0) == VOIDmode)
8393 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8394 op0);
8395 expand_float (target, op0,
8396 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8397 return target;
8399 case NEGATE_EXPR:
8400 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8401 if (modifier == EXPAND_STACK_PARM)
8402 target = 0;
8403 temp = expand_unop (mode,
8404 ! unsignedp && flag_trapv
8405 && (GET_MODE_CLASS(mode) == MODE_INT)
8406 ? negv_optab : neg_optab, op0, target, 0);
8407 if (temp == 0)
8408 abort ();
8409 return temp;
8411 case ABS_EXPR:
8412 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8413 if (modifier == EXPAND_STACK_PARM)
8414 target = 0;
8416 /* Handle complex values specially. */
8417 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8418 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8419 return expand_complex_abs (mode, op0, target, unsignedp);
8421 /* Unsigned abs is simply the operand. Testing here means we don't
8422 risk generating incorrect code below. */
8423 if (TREE_UNSIGNED (type))
8424 return op0;
8426 return expand_abs (mode, op0, target, unsignedp,
8427 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8429 case MAX_EXPR:
8430 case MIN_EXPR:
8431 target = original_target;
8432 if (target == 0
8433 || modifier == EXPAND_STACK_PARM
8434 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8435 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8436 || GET_MODE (target) != mode
8437 || (GET_CODE (target) == REG
8438 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8439 target = gen_reg_rtx (mode);
8440 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8441 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8443 /* First try to do it with a special MIN or MAX instruction.
8444 If that does not win, use a conditional jump to select the proper
8445 value. */
8446 this_optab = (TREE_UNSIGNED (type)
8447 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8448 : (code == MIN_EXPR ? smin_optab : smax_optab));
8450 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8451 OPTAB_WIDEN);
8452 if (temp != 0)
8453 return temp;
8455 /* At this point, a MEM target is no longer useful; we will get better
8456 code without it. */
8458 if (GET_CODE (target) == MEM)
8459 target = gen_reg_rtx (mode);
8461 if (target != op0)
8462 emit_move_insn (target, op0);
8464 op0 = gen_label_rtx ();
8466 /* If this mode is an integer too wide to compare properly,
8467 compare word by word. Rely on cse to optimize constant cases. */
8468 if (GET_MODE_CLASS (mode) == MODE_INT
8469 && ! can_compare_p (GE, mode, ccp_jump))
8471 if (code == MAX_EXPR)
8472 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8473 target, op1, NULL_RTX, op0);
8474 else
8475 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8476 op1, target, NULL_RTX, op0);
8478 else
8480 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8481 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8482 unsignedp, mode, NULL_RTX, NULL_RTX,
8483 op0);
8485 emit_move_insn (target, op1);
8486 emit_label (op0);
8487 return target;
8489 case BIT_NOT_EXPR:
8490 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8491 if (modifier == EXPAND_STACK_PARM)
8492 target = 0;
8493 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8494 if (temp == 0)
8495 abort ();
8496 return temp;
8498 case FFS_EXPR:
8499 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8500 if (modifier == EXPAND_STACK_PARM)
8501 target = 0;
8502 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8503 if (temp == 0)
8504 abort ();
8505 return temp;
8507 case CLZ_EXPR:
8508 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8509 temp = expand_unop (mode, clz_optab, op0, target, 1);
8510 if (temp == 0)
8511 abort ();
8512 return temp;
8514 case CTZ_EXPR:
8515 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8516 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8517 if (temp == 0)
8518 abort ();
8519 return temp;
8521 case POPCOUNT_EXPR:
8522 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8523 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8524 if (temp == 0)
8525 abort ();
8526 return temp;
8528 case PARITY_EXPR:
8529 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8530 temp = expand_unop (mode, parity_optab, op0, target, 1);
8531 if (temp == 0)
8532 abort ();
8533 return temp;
8535 /* ??? Can optimize bitwise operations with one arg constant.
8536 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8537 and (a bitwise1 b) bitwise2 b (etc)
8538 but that is probably not worth while. */
8540 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8541 boolean values when we want in all cases to compute both of them. In
8542 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8543 as actual zero-or-1 values and then bitwise anding. In cases where
8544 there cannot be any side effects, better code would be made by
8545 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8546 how to recognize those cases. */
8548 case TRUTH_AND_EXPR:
8549 case BIT_AND_EXPR:
8550 this_optab = and_optab;
8551 goto binop;
8553 case TRUTH_OR_EXPR:
8554 case BIT_IOR_EXPR:
8555 this_optab = ior_optab;
8556 goto binop;
8558 case TRUTH_XOR_EXPR:
8559 case BIT_XOR_EXPR:
8560 this_optab = xor_optab;
8561 goto binop;
8563 case LSHIFT_EXPR:
8564 case RSHIFT_EXPR:
8565 case LROTATE_EXPR:
8566 case RROTATE_EXPR:
8567 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8568 subtarget = 0;
8569 if (modifier == EXPAND_STACK_PARM)
8570 target = 0;
8571 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8572 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8573 unsignedp);
8575 /* Could determine the answer when only additive constants differ. Also,
8576 the addition of one can be handled by changing the condition. */
8577 case LT_EXPR:
8578 case LE_EXPR:
8579 case GT_EXPR:
8580 case GE_EXPR:
8581 case EQ_EXPR:
8582 case NE_EXPR:
8583 case UNORDERED_EXPR:
8584 case ORDERED_EXPR:
8585 case UNLT_EXPR:
8586 case UNLE_EXPR:
8587 case UNGT_EXPR:
8588 case UNGE_EXPR:
8589 case UNEQ_EXPR:
8590 temp = do_store_flag (exp,
8591 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8592 tmode != VOIDmode ? tmode : mode, 0);
8593 if (temp != 0)
8594 return temp;
8596 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8597 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8598 && original_target
8599 && GET_CODE (original_target) == REG
8600 && (GET_MODE (original_target)
8601 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8603 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8604 VOIDmode, 0);
8606 /* If temp is constant, we can just compute the result. */
8607 if (GET_CODE (temp) == CONST_INT)
8609 if (INTVAL (temp) != 0)
8610 emit_move_insn (target, const1_rtx);
8611 else
8612 emit_move_insn (target, const0_rtx);
8614 return target;
8617 if (temp != original_target)
8619 enum machine_mode mode1 = GET_MODE (temp);
8620 if (mode1 == VOIDmode)
8621 mode1 = tmode != VOIDmode ? tmode : mode;
8623 temp = copy_to_mode_reg (mode1, temp);
8626 op1 = gen_label_rtx ();
8627 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8628 GET_MODE (temp), unsignedp, op1);
8629 emit_move_insn (temp, const1_rtx);
8630 emit_label (op1);
8631 return temp;
8634 /* If no set-flag instruction, must generate a conditional
8635 store into a temporary variable. Drop through
8636 and handle this like && and ||. */
8638 case TRUTH_ANDIF_EXPR:
8639 case TRUTH_ORIF_EXPR:
8640 if (! ignore
8641 && (target == 0
8642 || modifier == EXPAND_STACK_PARM
8643 || ! safe_from_p (target, exp, 1)
8644 /* Make sure we don't have a hard reg (such as function's return
8645 value) live across basic blocks, if not optimizing. */
8646 || (!optimize && GET_CODE (target) == REG
8647 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8648 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8650 if (target)
8651 emit_clr_insn (target);
8653 op1 = gen_label_rtx ();
8654 jumpifnot (exp, op1);
8656 if (target)
8657 emit_0_to_1_insn (target);
8659 emit_label (op1);
8660 return ignore ? const0_rtx : target;
8662 case TRUTH_NOT_EXPR:
8663 if (modifier == EXPAND_STACK_PARM)
8664 target = 0;
8665 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8666 /* The parser is careful to generate TRUTH_NOT_EXPR
8667 only with operands that are always zero or one. */
8668 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8669 target, 1, OPTAB_LIB_WIDEN);
8670 if (temp == 0)
8671 abort ();
8672 return temp;
8674 case COMPOUND_EXPR:
8675 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8676 emit_queue ();
8677 return expand_expr (TREE_OPERAND (exp, 1),
8678 (ignore ? const0_rtx : target),
8679 VOIDmode, modifier);
8681 case COND_EXPR:
8682 /* If we would have a "singleton" (see below) were it not for a
8683 conversion in each arm, bring that conversion back out. */
8684 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8685 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8686 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8687 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8689 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8690 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8692 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8693 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8694 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8695 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8696 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8697 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8698 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8699 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8700 return expand_expr (build1 (NOP_EXPR, type,
8701 build (COND_EXPR, TREE_TYPE (iftrue),
8702 TREE_OPERAND (exp, 0),
8703 iftrue, iffalse)),
8704 target, tmode, modifier);
8708 /* Note that COND_EXPRs whose type is a structure or union
8709 are required to be constructed to contain assignments of
8710 a temporary variable, so that we can evaluate them here
8711 for side effect only. If type is void, we must do likewise. */
8713 /* If an arm of the branch requires a cleanup,
8714 only that cleanup is performed. */
8716 tree singleton = 0;
8717 tree binary_op = 0, unary_op = 0;
8719 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8720 convert it to our mode, if necessary. */
8721 if (integer_onep (TREE_OPERAND (exp, 1))
8722 && integer_zerop (TREE_OPERAND (exp, 2))
8723 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8725 if (ignore)
8727 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8728 modifier);
8729 return const0_rtx;
8732 if (modifier == EXPAND_STACK_PARM)
8733 target = 0;
8734 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8735 if (GET_MODE (op0) == mode)
8736 return op0;
8738 if (target == 0)
8739 target = gen_reg_rtx (mode);
8740 convert_move (target, op0, unsignedp);
8741 return target;
8744 /* Check for X ? A + B : A. If we have this, we can copy A to the
8745 output and conditionally add B. Similarly for unary operations.
8746 Don't do this if X has side-effects because those side effects
8747 might affect A or B and the "?" operation is a sequence point in
8748 ANSI. (operand_equal_p tests for side effects.) */
8750 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8751 && operand_equal_p (TREE_OPERAND (exp, 2),
8752 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8753 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8754 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8755 && operand_equal_p (TREE_OPERAND (exp, 1),
8756 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8757 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8758 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8759 && operand_equal_p (TREE_OPERAND (exp, 2),
8760 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8761 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8762 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8763 && operand_equal_p (TREE_OPERAND (exp, 1),
8764 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8765 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8767 /* If we are not to produce a result, we have no target. Otherwise,
8768 if a target was specified use it; it will not be used as an
8769 intermediate target unless it is safe. If no target, use a
8770 temporary. */
8772 if (ignore)
8773 temp = 0;
8774 else if (modifier == EXPAND_STACK_PARM)
8775 temp = assign_temp (type, 0, 0, 1);
8776 else if (original_target
8777 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8778 || (singleton && GET_CODE (original_target) == REG
8779 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8780 && original_target == var_rtx (singleton)))
8781 && GET_MODE (original_target) == mode
8782 #ifdef HAVE_conditional_move
8783 && (! can_conditionally_move_p (mode)
8784 || GET_CODE (original_target) == REG
8785 || TREE_ADDRESSABLE (type))
8786 #endif
8787 && (GET_CODE (original_target) != MEM
8788 || TREE_ADDRESSABLE (type)))
8789 temp = original_target;
8790 else if (TREE_ADDRESSABLE (type))
8791 abort ();
8792 else
8793 temp = assign_temp (type, 0, 0, 1);
8795 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8796 do the test of X as a store-flag operation, do this as
8797 A + ((X != 0) << log C). Similarly for other simple binary
8798 operators. Only do for C == 1 if BRANCH_COST is low. */
8799 if (temp && singleton && binary_op
8800 && (TREE_CODE (binary_op) == PLUS_EXPR
8801 || TREE_CODE (binary_op) == MINUS_EXPR
8802 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8803 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8804 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8805 : integer_onep (TREE_OPERAND (binary_op, 1)))
8806 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8808 rtx result;
8809 tree cond;
8810 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8811 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8812 ? addv_optab : add_optab)
8813 : TREE_CODE (binary_op) == MINUS_EXPR
8814 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8815 ? subv_optab : sub_optab)
8816 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8817 : xor_optab);
8819 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8820 if (singleton == TREE_OPERAND (exp, 1))
8821 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8822 else
8823 cond = TREE_OPERAND (exp, 0);
8825 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8826 ? temp : NULL_RTX),
8827 mode, BRANCH_COST <= 1);
8829 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8830 result = expand_shift (LSHIFT_EXPR, mode, result,
8831 build_int_2 (tree_log2
8832 (TREE_OPERAND
8833 (binary_op, 1)),
8835 (safe_from_p (temp, singleton, 1)
8836 ? temp : NULL_RTX), 0);
8838 if (result)
8840 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8841 return expand_binop (mode, boptab, op1, result, temp,
8842 unsignedp, OPTAB_LIB_WIDEN);
8846 do_pending_stack_adjust ();
8847 NO_DEFER_POP;
8848 op0 = gen_label_rtx ();
8850 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8852 if (temp != 0)
8854 /* If the target conflicts with the other operand of the
8855 binary op, we can't use it. Also, we can't use the target
8856 if it is a hard register, because evaluating the condition
8857 might clobber it. */
8858 if ((binary_op
8859 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8860 || (GET_CODE (temp) == REG
8861 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8862 temp = gen_reg_rtx (mode);
8863 store_expr (singleton, temp,
8864 modifier == EXPAND_STACK_PARM ? 2 : 0);
8866 else
8867 expand_expr (singleton,
8868 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8869 if (singleton == TREE_OPERAND (exp, 1))
8870 jumpif (TREE_OPERAND (exp, 0), op0);
8871 else
8872 jumpifnot (TREE_OPERAND (exp, 0), op0);
8874 start_cleanup_deferral ();
8875 if (binary_op && temp == 0)
8876 /* Just touch the other operand. */
8877 expand_expr (TREE_OPERAND (binary_op, 1),
8878 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8879 else if (binary_op)
8880 store_expr (build (TREE_CODE (binary_op), type,
8881 make_tree (type, temp),
8882 TREE_OPERAND (binary_op, 1)),
8883 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8884 else
8885 store_expr (build1 (TREE_CODE (unary_op), type,
8886 make_tree (type, temp)),
8887 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8888 op1 = op0;
8890 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8891 comparison operator. If we have one of these cases, set the
8892 output to A, branch on A (cse will merge these two references),
8893 then set the output to FOO. */
8894 else if (temp
8895 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8896 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8897 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8898 TREE_OPERAND (exp, 1), 0)
8899 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8900 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8901 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8903 if (GET_CODE (temp) == REG
8904 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8905 temp = gen_reg_rtx (mode);
8906 store_expr (TREE_OPERAND (exp, 1), temp,
8907 modifier == EXPAND_STACK_PARM ? 2 : 0);
8908 jumpif (TREE_OPERAND (exp, 0), op0);
8910 start_cleanup_deferral ();
8911 store_expr (TREE_OPERAND (exp, 2), temp,
8912 modifier == EXPAND_STACK_PARM ? 2 : 0);
8913 op1 = op0;
8915 else if (temp
8916 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8917 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8918 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8919 TREE_OPERAND (exp, 2), 0)
8920 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8921 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8922 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8924 if (GET_CODE (temp) == REG
8925 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8926 temp = gen_reg_rtx (mode);
8927 store_expr (TREE_OPERAND (exp, 2), temp,
8928 modifier == EXPAND_STACK_PARM ? 2 : 0);
8929 jumpifnot (TREE_OPERAND (exp, 0), op0);
8931 start_cleanup_deferral ();
8932 store_expr (TREE_OPERAND (exp, 1), temp,
8933 modifier == EXPAND_STACK_PARM ? 2 : 0);
8934 op1 = op0;
8936 else
8938 op1 = gen_label_rtx ();
8939 jumpifnot (TREE_OPERAND (exp, 0), op0);
8941 start_cleanup_deferral ();
8943 /* One branch of the cond can be void, if it never returns. For
8944 example A ? throw : E */
8945 if (temp != 0
8946 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8947 store_expr (TREE_OPERAND (exp, 1), temp,
8948 modifier == EXPAND_STACK_PARM ? 2 : 0);
8949 else
8950 expand_expr (TREE_OPERAND (exp, 1),
8951 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8952 end_cleanup_deferral ();
8953 emit_queue ();
8954 emit_jump_insn (gen_jump (op1));
8955 emit_barrier ();
8956 emit_label (op0);
8957 start_cleanup_deferral ();
8958 if (temp != 0
8959 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8960 store_expr (TREE_OPERAND (exp, 2), temp,
8961 modifier == EXPAND_STACK_PARM ? 2 : 0);
8962 else
8963 expand_expr (TREE_OPERAND (exp, 2),
8964 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8967 end_cleanup_deferral ();
8969 emit_queue ();
8970 emit_label (op1);
8971 OK_DEFER_POP;
8973 return temp;
8976 case TARGET_EXPR:
8978 /* Something needs to be initialized, but we didn't know
8979 where that thing was when building the tree. For example,
8980 it could be the return value of a function, or a parameter
8981 to a function which lays down in the stack, or a temporary
8982 variable which must be passed by reference.
8984 We guarantee that the expression will either be constructed
8985 or copied into our original target. */
8987 tree slot = TREE_OPERAND (exp, 0);
8988 tree cleanups = NULL_TREE;
8989 tree exp1;
8991 if (TREE_CODE (slot) != VAR_DECL)
8992 abort ();
8994 if (! ignore)
8995 target = original_target;
8997 /* Set this here so that if we get a target that refers to a
8998 register variable that's already been used, put_reg_into_stack
8999 knows that it should fix up those uses. */
9000 TREE_USED (slot) = 1;
9002 if (target == 0)
9004 if (DECL_RTL_SET_P (slot))
9006 target = DECL_RTL (slot);
9007 /* If we have already expanded the slot, so don't do
9008 it again. (mrs) */
9009 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9010 return target;
9012 else
9014 target = assign_temp (type, 2, 0, 1);
9015 /* All temp slots at this level must not conflict. */
9016 preserve_temp_slots (target);
9017 SET_DECL_RTL (slot, target);
9018 if (TREE_ADDRESSABLE (slot))
9019 put_var_into_stack (slot, /*rescan=*/false);
9021 /* Since SLOT is not known to the called function
9022 to belong to its stack frame, we must build an explicit
9023 cleanup. This case occurs when we must build up a reference
9024 to pass the reference as an argument. In this case,
9025 it is very likely that such a reference need not be
9026 built here. */
9028 if (TREE_OPERAND (exp, 2) == 0)
9029 TREE_OPERAND (exp, 2)
9030 = (*lang_hooks.maybe_build_cleanup) (slot);
9031 cleanups = TREE_OPERAND (exp, 2);
9034 else
9036 /* This case does occur, when expanding a parameter which
9037 needs to be constructed on the stack. The target
9038 is the actual stack address that we want to initialize.
9039 The function we call will perform the cleanup in this case. */
9041 /* If we have already assigned it space, use that space,
9042 not target that we were passed in, as our target
9043 parameter is only a hint. */
9044 if (DECL_RTL_SET_P (slot))
9046 target = DECL_RTL (slot);
9047 /* If we have already expanded the slot, so don't do
9048 it again. (mrs) */
9049 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9050 return target;
9052 else
9054 SET_DECL_RTL (slot, target);
9055 /* If we must have an addressable slot, then make sure that
9056 the RTL that we just stored in slot is OK. */
9057 if (TREE_ADDRESSABLE (slot))
9058 put_var_into_stack (slot, /*rescan=*/true);
9062 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9063 /* Mark it as expanded. */
9064 TREE_OPERAND (exp, 1) = NULL_TREE;
9066 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9068 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9070 return target;
9073 case INIT_EXPR:
9075 tree lhs = TREE_OPERAND (exp, 0);
9076 tree rhs = TREE_OPERAND (exp, 1);
9078 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9079 return temp;
9082 case MODIFY_EXPR:
9084 /* If lhs is complex, expand calls in rhs before computing it.
9085 That's so we don't compute a pointer and save it over a
9086 call. If lhs is simple, compute it first so we can give it
9087 as a target if the rhs is just a call. This avoids an
9088 extra temp and copy and that prevents a partial-subsumption
9089 which makes bad code. Actually we could treat
9090 component_ref's of vars like vars. */
9092 tree lhs = TREE_OPERAND (exp, 0);
9093 tree rhs = TREE_OPERAND (exp, 1);
9095 temp = 0;
9097 /* Check for |= or &= of a bitfield of size one into another bitfield
9098 of size 1. In this case, (unless we need the result of the
9099 assignment) we can do this more efficiently with a
9100 test followed by an assignment, if necessary.
9102 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9103 things change so we do, this code should be enhanced to
9104 support it. */
9105 if (ignore
9106 && TREE_CODE (lhs) == COMPONENT_REF
9107 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9108 || TREE_CODE (rhs) == BIT_AND_EXPR)
9109 && TREE_OPERAND (rhs, 0) == lhs
9110 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9111 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9112 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9114 rtx label = gen_label_rtx ();
9116 do_jump (TREE_OPERAND (rhs, 1),
9117 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9118 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9119 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9120 (TREE_CODE (rhs) == BIT_IOR_EXPR
9121 ? integer_one_node
9122 : integer_zero_node)),
9123 0, 0);
9124 do_pending_stack_adjust ();
9125 emit_label (label);
9126 return const0_rtx;
9129 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9131 return temp;
9134 case RETURN_EXPR:
9135 if (!TREE_OPERAND (exp, 0))
9136 expand_null_return ();
9137 else
9138 expand_return (TREE_OPERAND (exp, 0));
9139 return const0_rtx;
9141 case PREINCREMENT_EXPR:
9142 case PREDECREMENT_EXPR:
9143 return expand_increment (exp, 0, ignore);
9145 case POSTINCREMENT_EXPR:
9146 case POSTDECREMENT_EXPR:
9147 /* Faster to treat as pre-increment if result is not used. */
9148 return expand_increment (exp, ! ignore, ignore);
9150 case ADDR_EXPR:
9151 if (modifier == EXPAND_STACK_PARM)
9152 target = 0;
9153 /* Are we taking the address of a nested function? */
9154 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9155 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9156 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9157 && ! TREE_STATIC (exp))
9159 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9160 op0 = force_operand (op0, target);
9162 /* If we are taking the address of something erroneous, just
9163 return a zero. */
9164 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9165 return const0_rtx;
9166 /* If we are taking the address of a constant and are at the
9167 top level, we have to use output_constant_def since we can't
9168 call force_const_mem at top level. */
9169 else if (cfun == 0
9170 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9171 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9172 == 'c')))
9173 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9174 else
9176 /* We make sure to pass const0_rtx down if we came in with
9177 ignore set, to avoid doing the cleanups twice for something. */
9178 op0 = expand_expr (TREE_OPERAND (exp, 0),
9179 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9180 (modifier == EXPAND_INITIALIZER
9181 ? modifier : EXPAND_CONST_ADDRESS));
9183 /* If we are going to ignore the result, OP0 will have been set
9184 to const0_rtx, so just return it. Don't get confused and
9185 think we are taking the address of the constant. */
9186 if (ignore)
9187 return op0;
9189 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9190 clever and returns a REG when given a MEM. */
9191 op0 = protect_from_queue (op0, 1);
9193 /* We would like the object in memory. If it is a constant, we can
9194 have it be statically allocated into memory. For a non-constant,
9195 we need to allocate some memory and store the value into it. */
9197 if (CONSTANT_P (op0))
9198 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9199 op0);
9200 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9201 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9202 || GET_CODE (op0) == PARALLEL)
9204 /* If the operand is a SAVE_EXPR, we can deal with this by
9205 forcing the SAVE_EXPR into memory. */
9206 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9208 put_var_into_stack (TREE_OPERAND (exp, 0),
9209 /*rescan=*/true);
9210 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9212 else
9214 /* If this object is in a register, it can't be BLKmode. */
9215 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9216 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9218 if (GET_CODE (op0) == PARALLEL)
9219 /* Handle calls that pass values in multiple
9220 non-contiguous locations. The Irix 6 ABI has examples
9221 of this. */
9222 emit_group_store (memloc, op0,
9223 int_size_in_bytes (inner_type));
9224 else
9225 emit_move_insn (memloc, op0);
9227 op0 = memloc;
9231 if (GET_CODE (op0) != MEM)
9232 abort ();
9234 mark_temp_addr_taken (op0);
9235 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9237 op0 = XEXP (op0, 0);
9238 #ifdef POINTERS_EXTEND_UNSIGNED
9239 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9240 && mode == ptr_mode)
9241 op0 = convert_memory_address (ptr_mode, op0);
9242 #endif
9243 return op0;
9246 /* If OP0 is not aligned as least as much as the type requires, we
9247 need to make a temporary, copy OP0 to it, and take the address of
9248 the temporary. We want to use the alignment of the type, not of
9249 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9250 the test for BLKmode means that can't happen. The test for
9251 BLKmode is because we never make mis-aligned MEMs with
9252 non-BLKmode.
9254 We don't need to do this at all if the machine doesn't have
9255 strict alignment. */
9256 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9257 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9258 > MEM_ALIGN (op0))
9259 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9261 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9262 rtx new;
9264 if (TYPE_ALIGN_OK (inner_type))
9265 abort ();
9267 if (TREE_ADDRESSABLE (inner_type))
9269 /* We can't make a bitwise copy of this object, so fail. */
9270 error ("cannot take the address of an unaligned member");
9271 return const0_rtx;
9274 new = assign_stack_temp_for_type
9275 (TYPE_MODE (inner_type),
9276 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9277 : int_size_in_bytes (inner_type),
9278 1, build_qualified_type (inner_type,
9279 (TYPE_QUALS (inner_type)
9280 | TYPE_QUAL_CONST)));
9282 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9283 (modifier == EXPAND_STACK_PARM
9284 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9286 op0 = new;
9289 op0 = force_operand (XEXP (op0, 0), target);
9292 if (flag_force_addr
9293 && GET_CODE (op0) != REG
9294 && modifier != EXPAND_CONST_ADDRESS
9295 && modifier != EXPAND_INITIALIZER
9296 && modifier != EXPAND_SUM)
9297 op0 = force_reg (Pmode, op0);
9299 if (GET_CODE (op0) == REG
9300 && ! REG_USERVAR_P (op0))
9301 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9303 #ifdef POINTERS_EXTEND_UNSIGNED
9304 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9305 && mode == ptr_mode)
9306 op0 = convert_memory_address (ptr_mode, op0);
9307 #endif
9309 return op0;
9311 case ENTRY_VALUE_EXPR:
9312 abort ();
9314 /* COMPLEX type for Extended Pascal & Fortran */
9315 case COMPLEX_EXPR:
9317 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9318 rtx insns;
9320 /* Get the rtx code of the operands. */
9321 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9322 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9324 if (! target)
9325 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9327 start_sequence ();
9329 /* Move the real (op0) and imaginary (op1) parts to their location. */
9330 emit_move_insn (gen_realpart (mode, target), op0);
9331 emit_move_insn (gen_imagpart (mode, target), op1);
9333 insns = get_insns ();
9334 end_sequence ();
9336 /* Complex construction should appear as a single unit. */
9337 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9338 each with a separate pseudo as destination.
9339 It's not correct for flow to treat them as a unit. */
9340 if (GET_CODE (target) != CONCAT)
9341 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9342 else
9343 emit_insn (insns);
9345 return target;
9348 case REALPART_EXPR:
9349 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9350 return gen_realpart (mode, op0);
9352 case IMAGPART_EXPR:
9353 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9354 return gen_imagpart (mode, op0);
9356 case CONJ_EXPR:
9358 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9359 rtx imag_t;
9360 rtx insns;
9362 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9364 if (! target)
9365 target = gen_reg_rtx (mode);
9367 start_sequence ();
9369 /* Store the realpart and the negated imagpart to target. */
9370 emit_move_insn (gen_realpart (partmode, target),
9371 gen_realpart (partmode, op0));
9373 imag_t = gen_imagpart (partmode, target);
9374 temp = expand_unop (partmode,
9375 ! unsignedp && flag_trapv
9376 && (GET_MODE_CLASS(partmode) == MODE_INT)
9377 ? negv_optab : neg_optab,
9378 gen_imagpart (partmode, op0), imag_t, 0);
9379 if (temp != imag_t)
9380 emit_move_insn (imag_t, temp);
9382 insns = get_insns ();
9383 end_sequence ();
9385 /* Conjugate should appear as a single unit
9386 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9387 each with a separate pseudo as destination.
9388 It's not correct for flow to treat them as a unit. */
9389 if (GET_CODE (target) != CONCAT)
9390 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9391 else
9392 emit_insn (insns);
9394 return target;
9397 case TRY_CATCH_EXPR:
9399 tree handler = TREE_OPERAND (exp, 1);
9401 expand_eh_region_start ();
9403 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9405 expand_eh_region_end_cleanup (handler);
9407 return op0;
9410 case TRY_FINALLY_EXPR:
9412 tree try_block = TREE_OPERAND (exp, 0);
9413 tree finally_block = TREE_OPERAND (exp, 1);
9415 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9417 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9418 is not sufficient, so we cannot expand the block twice.
9419 So we play games with GOTO_SUBROUTINE_EXPR to let us
9420 expand the thing only once. */
9421 /* When not optimizing, we go ahead with this form since
9422 (1) user breakpoints operate more predictably without
9423 code duplication, and
9424 (2) we're not running any of the global optimizers
9425 that would explode in time/space with the highly
9426 connected CFG created by the indirect branching. */
9428 rtx finally_label = gen_label_rtx ();
9429 rtx done_label = gen_label_rtx ();
9430 rtx return_link = gen_reg_rtx (Pmode);
9431 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9432 (tree) finally_label, (tree) return_link);
9433 TREE_SIDE_EFFECTS (cleanup) = 1;
9435 /* Start a new binding layer that will keep track of all cleanup
9436 actions to be performed. */
9437 expand_start_bindings (2);
9438 target_temp_slot_level = temp_slot_level;
9440 expand_decl_cleanup (NULL_TREE, cleanup);
9441 op0 = expand_expr (try_block, target, tmode, modifier);
9443 preserve_temp_slots (op0);
9444 expand_end_bindings (NULL_TREE, 0, 0);
9445 emit_jump (done_label);
9446 emit_label (finally_label);
9447 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9448 emit_indirect_jump (return_link);
9449 emit_label (done_label);
9451 else
9453 expand_start_bindings (2);
9454 target_temp_slot_level = temp_slot_level;
9456 expand_decl_cleanup (NULL_TREE, finally_block);
9457 op0 = expand_expr (try_block, target, tmode, modifier);
9459 preserve_temp_slots (op0);
9460 expand_end_bindings (NULL_TREE, 0, 0);
9463 return op0;
9466 case GOTO_SUBROUTINE_EXPR:
9468 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9469 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9470 rtx return_address = gen_label_rtx ();
9471 emit_move_insn (return_link,
9472 gen_rtx_LABEL_REF (Pmode, return_address));
9473 emit_jump (subr);
9474 emit_label (return_address);
9475 return const0_rtx;
9478 case VA_ARG_EXPR:
9479 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9481 case EXC_PTR_EXPR:
9482 return get_exception_pointer (cfun);
9484 case FDESC_EXPR:
9485 /* Function descriptors are not valid except for as
9486 initialization constants, and should not be expanded. */
9487 abort ();
9489 default:
9490 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9493 /* Here to do an ordinary binary operator, generating an instruction
9494 from the optab already placed in `this_optab'. */
9495 binop:
9496 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9497 subtarget = 0;
9498 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9499 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9500 binop2:
9501 if (modifier == EXPAND_STACK_PARM)
9502 target = 0;
9503 temp = expand_binop (mode, this_optab, op0, op1, target,
9504 unsignedp, OPTAB_LIB_WIDEN);
9505 if (temp == 0)
9506 abort ();
9507 return temp;
9510 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9511 when applied to the address of EXP produces an address known to be
9512 aligned more than BIGGEST_ALIGNMENT. */
9514 static int
9515 is_aligning_offset (offset, exp)
9516 tree offset;
9517 tree exp;
9519 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9520 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9521 || TREE_CODE (offset) == NOP_EXPR
9522 || TREE_CODE (offset) == CONVERT_EXPR
9523 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9524 offset = TREE_OPERAND (offset, 0);
9526 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9527 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9528 if (TREE_CODE (offset) != BIT_AND_EXPR
9529 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9530 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9531 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9532 return 0;
9534 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9535 It must be NEGATE_EXPR. Then strip any more conversions. */
9536 offset = TREE_OPERAND (offset, 0);
9537 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9538 || TREE_CODE (offset) == NOP_EXPR
9539 || TREE_CODE (offset) == CONVERT_EXPR)
9540 offset = TREE_OPERAND (offset, 0);
9542 if (TREE_CODE (offset) != NEGATE_EXPR)
9543 return 0;
9545 offset = TREE_OPERAND (offset, 0);
9546 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9547 || TREE_CODE (offset) == NOP_EXPR
9548 || TREE_CODE (offset) == CONVERT_EXPR)
9549 offset = TREE_OPERAND (offset, 0);
9551 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9552 whose type is the same as EXP. */
9553 return (TREE_CODE (offset) == ADDR_EXPR
9554 && (TREE_OPERAND (offset, 0) == exp
9555 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9556 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9557 == TREE_TYPE (exp)))));
9560 /* Return the tree node if an ARG corresponds to a string constant or zero
9561 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9562 in bytes within the string that ARG is accessing. The type of the
9563 offset will be `sizetype'. */
9565 tree
9566 string_constant (arg, ptr_offset)
9567 tree arg;
9568 tree *ptr_offset;
9570 STRIP_NOPS (arg);
9572 if (TREE_CODE (arg) == ADDR_EXPR
9573 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9575 *ptr_offset = size_zero_node;
9576 return TREE_OPERAND (arg, 0);
9578 else if (TREE_CODE (arg) == PLUS_EXPR)
9580 tree arg0 = TREE_OPERAND (arg, 0);
9581 tree arg1 = TREE_OPERAND (arg, 1);
9583 STRIP_NOPS (arg0);
9584 STRIP_NOPS (arg1);
9586 if (TREE_CODE (arg0) == ADDR_EXPR
9587 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9589 *ptr_offset = convert (sizetype, arg1);
9590 return TREE_OPERAND (arg0, 0);
9592 else if (TREE_CODE (arg1) == ADDR_EXPR
9593 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9595 *ptr_offset = convert (sizetype, arg0);
9596 return TREE_OPERAND (arg1, 0);
9600 return 0;
9603 /* Expand code for a post- or pre- increment or decrement
9604 and return the RTX for the result.
9605 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9607 static rtx
9608 expand_increment (exp, post, ignore)
9609 tree exp;
9610 int post, ignore;
9612 rtx op0, op1;
9613 rtx temp, value;
9614 tree incremented = TREE_OPERAND (exp, 0);
9615 optab this_optab = add_optab;
9616 int icode;
9617 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9618 int op0_is_copy = 0;
9619 int single_insn = 0;
9620 /* 1 means we can't store into OP0 directly,
9621 because it is a subreg narrower than a word,
9622 and we don't dare clobber the rest of the word. */
9623 int bad_subreg = 0;
9625 /* Stabilize any component ref that might need to be
9626 evaluated more than once below. */
9627 if (!post
9628 || TREE_CODE (incremented) == BIT_FIELD_REF
9629 || (TREE_CODE (incremented) == COMPONENT_REF
9630 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9631 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9632 incremented = stabilize_reference (incremented);
9633 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9634 ones into save exprs so that they don't accidentally get evaluated
9635 more than once by the code below. */
9636 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9637 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9638 incremented = save_expr (incremented);
9640 /* Compute the operands as RTX.
9641 Note whether OP0 is the actual lvalue or a copy of it:
9642 I believe it is a copy iff it is a register or subreg
9643 and insns were generated in computing it. */
9645 temp = get_last_insn ();
9646 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9648 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9649 in place but instead must do sign- or zero-extension during assignment,
9650 so we copy it into a new register and let the code below use it as
9651 a copy.
9653 Note that we can safely modify this SUBREG since it is know not to be
9654 shared (it was made by the expand_expr call above). */
9656 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9658 if (post)
9659 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9660 else
9661 bad_subreg = 1;
9663 else if (GET_CODE (op0) == SUBREG
9664 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9666 /* We cannot increment this SUBREG in place. If we are
9667 post-incrementing, get a copy of the old value. Otherwise,
9668 just mark that we cannot increment in place. */
9669 if (post)
9670 op0 = copy_to_reg (op0);
9671 else
9672 bad_subreg = 1;
9675 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9676 && temp != get_last_insn ());
9677 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9679 /* Decide whether incrementing or decrementing. */
9680 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9681 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9682 this_optab = sub_optab;
9684 /* Convert decrement by a constant into a negative increment. */
9685 if (this_optab == sub_optab
9686 && GET_CODE (op1) == CONST_INT)
9688 op1 = GEN_INT (-INTVAL (op1));
9689 this_optab = add_optab;
9692 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9693 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9695 /* For a preincrement, see if we can do this with a single instruction. */
9696 if (!post)
9698 icode = (int) this_optab->handlers[(int) mode].insn_code;
9699 if (icode != (int) CODE_FOR_nothing
9700 /* Make sure that OP0 is valid for operands 0 and 1
9701 of the insn we want to queue. */
9702 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9703 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9704 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9705 single_insn = 1;
9708 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9709 then we cannot just increment OP0. We must therefore contrive to
9710 increment the original value. Then, for postincrement, we can return
9711 OP0 since it is a copy of the old value. For preincrement, expand here
9712 unless we can do it with a single insn.
9714 Likewise if storing directly into OP0 would clobber high bits
9715 we need to preserve (bad_subreg). */
9716 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9718 /* This is the easiest way to increment the value wherever it is.
9719 Problems with multiple evaluation of INCREMENTED are prevented
9720 because either (1) it is a component_ref or preincrement,
9721 in which case it was stabilized above, or (2) it is an array_ref
9722 with constant index in an array in a register, which is
9723 safe to reevaluate. */
9724 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9725 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9726 ? MINUS_EXPR : PLUS_EXPR),
9727 TREE_TYPE (exp),
9728 incremented,
9729 TREE_OPERAND (exp, 1));
9731 while (TREE_CODE (incremented) == NOP_EXPR
9732 || TREE_CODE (incremented) == CONVERT_EXPR)
9734 newexp = convert (TREE_TYPE (incremented), newexp);
9735 incremented = TREE_OPERAND (incremented, 0);
9738 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9739 return post ? op0 : temp;
9742 if (post)
9744 /* We have a true reference to the value in OP0.
9745 If there is an insn to add or subtract in this mode, queue it.
9746 Queueing the increment insn avoids the register shuffling
9747 that often results if we must increment now and first save
9748 the old value for subsequent use. */
9750 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9751 op0 = stabilize (op0);
9752 #endif
9754 icode = (int) this_optab->handlers[(int) mode].insn_code;
9755 if (icode != (int) CODE_FOR_nothing
9756 /* Make sure that OP0 is valid for operands 0 and 1
9757 of the insn we want to queue. */
9758 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9759 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9761 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9762 op1 = force_reg (mode, op1);
9764 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9766 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9768 rtx addr = (general_operand (XEXP (op0, 0), mode)
9769 ? force_reg (Pmode, XEXP (op0, 0))
9770 : copy_to_reg (XEXP (op0, 0)));
9771 rtx temp, result;
9773 op0 = replace_equiv_address (op0, addr);
9774 temp = force_reg (GET_MODE (op0), op0);
9775 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9776 op1 = force_reg (mode, op1);
9778 /* The increment queue is LIFO, thus we have to `queue'
9779 the instructions in reverse order. */
9780 enqueue_insn (op0, gen_move_insn (op0, temp));
9781 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9782 return result;
9786 /* Preincrement, or we can't increment with one simple insn. */
9787 if (post)
9788 /* Save a copy of the value before inc or dec, to return it later. */
9789 temp = value = copy_to_reg (op0);
9790 else
9791 /* Arrange to return the incremented value. */
9792 /* Copy the rtx because expand_binop will protect from the queue,
9793 and the results of that would be invalid for us to return
9794 if our caller does emit_queue before using our result. */
9795 temp = copy_rtx (value = op0);
9797 /* Increment however we can. */
9798 op1 = expand_binop (mode, this_optab, value, op1, op0,
9799 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9801 /* Make sure the value is stored into OP0. */
9802 if (op1 != op0)
9803 emit_move_insn (op0, op1);
9805 return temp;
9808 /* Generate code to calculate EXP using a store-flag instruction
9809 and return an rtx for the result. EXP is either a comparison
9810 or a TRUTH_NOT_EXPR whose operand is a comparison.
9812 If TARGET is nonzero, store the result there if convenient.
9814 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9815 cheap.
9817 Return zero if there is no suitable set-flag instruction
9818 available on this machine.
9820 Once expand_expr has been called on the arguments of the comparison,
9821 we are committed to doing the store flag, since it is not safe to
9822 re-evaluate the expression. We emit the store-flag insn by calling
9823 emit_store_flag, but only expand the arguments if we have a reason
9824 to believe that emit_store_flag will be successful. If we think that
9825 it will, but it isn't, we have to simulate the store-flag with a
9826 set/jump/set sequence. */
9828 static rtx
9829 do_store_flag (exp, target, mode, only_cheap)
9830 tree exp;
9831 rtx target;
9832 enum machine_mode mode;
9833 int only_cheap;
9835 enum rtx_code code;
9836 tree arg0, arg1, type;
9837 tree tem;
9838 enum machine_mode operand_mode;
9839 int invert = 0;
9840 int unsignedp;
9841 rtx op0, op1;
9842 enum insn_code icode;
9843 rtx subtarget = target;
9844 rtx result, label;
9846 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9847 result at the end. We can't simply invert the test since it would
9848 have already been inverted if it were valid. This case occurs for
9849 some floating-point comparisons. */
9851 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9852 invert = 1, exp = TREE_OPERAND (exp, 0);
9854 arg0 = TREE_OPERAND (exp, 0);
9855 arg1 = TREE_OPERAND (exp, 1);
9857 /* Don't crash if the comparison was erroneous. */
9858 if (arg0 == error_mark_node || arg1 == error_mark_node)
9859 return const0_rtx;
9861 type = TREE_TYPE (arg0);
9862 operand_mode = TYPE_MODE (type);
9863 unsignedp = TREE_UNSIGNED (type);
9865 /* We won't bother with BLKmode store-flag operations because it would mean
9866 passing a lot of information to emit_store_flag. */
9867 if (operand_mode == BLKmode)
9868 return 0;
9870 /* We won't bother with store-flag operations involving function pointers
9871 when function pointers must be canonicalized before comparisons. */
9872 #ifdef HAVE_canonicalize_funcptr_for_compare
9873 if (HAVE_canonicalize_funcptr_for_compare
9874 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9875 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9876 == FUNCTION_TYPE))
9877 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9878 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9879 == FUNCTION_TYPE))))
9880 return 0;
9881 #endif
9883 STRIP_NOPS (arg0);
9884 STRIP_NOPS (arg1);
9886 /* Get the rtx comparison code to use. We know that EXP is a comparison
9887 operation of some type. Some comparisons against 1 and -1 can be
9888 converted to comparisons with zero. Do so here so that the tests
9889 below will be aware that we have a comparison with zero. These
9890 tests will not catch constants in the first operand, but constants
9891 are rarely passed as the first operand. */
9893 switch (TREE_CODE (exp))
9895 case EQ_EXPR:
9896 code = EQ;
9897 break;
9898 case NE_EXPR:
9899 code = NE;
9900 break;
9901 case LT_EXPR:
9902 if (integer_onep (arg1))
9903 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9904 else
9905 code = unsignedp ? LTU : LT;
9906 break;
9907 case LE_EXPR:
9908 if (! unsignedp && integer_all_onesp (arg1))
9909 arg1 = integer_zero_node, code = LT;
9910 else
9911 code = unsignedp ? LEU : LE;
9912 break;
9913 case GT_EXPR:
9914 if (! unsignedp && integer_all_onesp (arg1))
9915 arg1 = integer_zero_node, code = GE;
9916 else
9917 code = unsignedp ? GTU : GT;
9918 break;
9919 case GE_EXPR:
9920 if (integer_onep (arg1))
9921 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9922 else
9923 code = unsignedp ? GEU : GE;
9924 break;
9926 case UNORDERED_EXPR:
9927 code = UNORDERED;
9928 break;
9929 case ORDERED_EXPR:
9930 code = ORDERED;
9931 break;
9932 case UNLT_EXPR:
9933 code = UNLT;
9934 break;
9935 case UNLE_EXPR:
9936 code = UNLE;
9937 break;
9938 case UNGT_EXPR:
9939 code = UNGT;
9940 break;
9941 case UNGE_EXPR:
9942 code = UNGE;
9943 break;
9944 case UNEQ_EXPR:
9945 code = UNEQ;
9946 break;
9948 default:
9949 abort ();
9952 /* Put a constant second. */
9953 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9955 tem = arg0; arg0 = arg1; arg1 = tem;
9956 code = swap_condition (code);
9959 /* If this is an equality or inequality test of a single bit, we can
9960 do this by shifting the bit being tested to the low-order bit and
9961 masking the result with the constant 1. If the condition was EQ,
9962 we xor it with 1. This does not require an scc insn and is faster
9963 than an scc insn even if we have it. */
9965 if ((code == NE || code == EQ)
9966 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9967 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9969 tree inner = TREE_OPERAND (arg0, 0);
9970 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9971 int ops_unsignedp;
9973 /* If INNER is a right shift of a constant and it plus BITNUM does
9974 not overflow, adjust BITNUM and INNER. */
9976 if (TREE_CODE (inner) == RSHIFT_EXPR
9977 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9978 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9979 && bitnum < TYPE_PRECISION (type)
9980 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
9981 bitnum - TYPE_PRECISION (type)))
9983 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9984 inner = TREE_OPERAND (inner, 0);
9987 /* If we are going to be able to omit the AND below, we must do our
9988 operations as unsigned. If we must use the AND, we have a choice.
9989 Normally unsigned is faster, but for some machines signed is. */
9990 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9991 #ifdef LOAD_EXTEND_OP
9992 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9993 #else
9995 #endif
9998 if (! get_subtarget (subtarget)
9999 || GET_MODE (subtarget) != operand_mode
10000 || ! safe_from_p (subtarget, inner, 1))
10001 subtarget = 0;
10003 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10005 if (bitnum != 0)
10006 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10007 size_int (bitnum), subtarget, ops_unsignedp);
10009 if (GET_MODE (op0) != mode)
10010 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10012 if ((code == EQ && ! invert) || (code == NE && invert))
10013 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10014 ops_unsignedp, OPTAB_LIB_WIDEN);
10016 /* Put the AND last so it can combine with more things. */
10017 if (bitnum != TYPE_PRECISION (type) - 1)
10018 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10020 return op0;
10023 /* Now see if we are likely to be able to do this. Return if not. */
10024 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10025 return 0;
10027 icode = setcc_gen_code[(int) code];
10028 if (icode == CODE_FOR_nothing
10029 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10031 /* We can only do this if it is one of the special cases that
10032 can be handled without an scc insn. */
10033 if ((code == LT && integer_zerop (arg1))
10034 || (! only_cheap && code == GE && integer_zerop (arg1)))
10036 else if (BRANCH_COST >= 0
10037 && ! only_cheap && (code == NE || code == EQ)
10038 && TREE_CODE (type) != REAL_TYPE
10039 && ((abs_optab->handlers[(int) operand_mode].insn_code
10040 != CODE_FOR_nothing)
10041 || (ffs_optab->handlers[(int) operand_mode].insn_code
10042 != CODE_FOR_nothing)))
10044 else
10045 return 0;
10048 if (! get_subtarget (target)
10049 || GET_MODE (subtarget) != operand_mode
10050 || ! safe_from_p (subtarget, arg1, 1))
10051 subtarget = 0;
10053 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10054 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10056 if (target == 0)
10057 target = gen_reg_rtx (mode);
10059 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10060 because, if the emit_store_flag does anything it will succeed and
10061 OP0 and OP1 will not be used subsequently. */
10063 result = emit_store_flag (target, code,
10064 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10065 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10066 operand_mode, unsignedp, 1);
10068 if (result)
10070 if (invert)
10071 result = expand_binop (mode, xor_optab, result, const1_rtx,
10072 result, 0, OPTAB_LIB_WIDEN);
10073 return result;
10076 /* If this failed, we have to do this with set/compare/jump/set code. */
10077 if (GET_CODE (target) != REG
10078 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10079 target = gen_reg_rtx (GET_MODE (target));
10081 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10082 result = compare_from_rtx (op0, op1, code, unsignedp,
10083 operand_mode, NULL_RTX);
10084 if (GET_CODE (result) == CONST_INT)
10085 return (((result == const0_rtx && ! invert)
10086 || (result != const0_rtx && invert))
10087 ? const0_rtx : const1_rtx);
10089 /* The code of RESULT may not match CODE if compare_from_rtx
10090 decided to swap its operands and reverse the original code.
10092 We know that compare_from_rtx returns either a CONST_INT or
10093 a new comparison code, so it is safe to just extract the
10094 code from RESULT. */
10095 code = GET_CODE (result);
10097 label = gen_label_rtx ();
10098 if (bcc_gen_fctn[(int) code] == 0)
10099 abort ();
10101 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10102 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10103 emit_label (label);
10105 return target;
10109 /* Stubs in case we haven't got a casesi insn. */
10110 #ifndef HAVE_casesi
10111 # define HAVE_casesi 0
10112 # define gen_casesi(a, b, c, d, e) (0)
10113 # define CODE_FOR_casesi CODE_FOR_nothing
10114 #endif
10116 /* If the machine does not have a case insn that compares the bounds,
10117 this means extra overhead for dispatch tables, which raises the
10118 threshold for using them. */
10119 #ifndef CASE_VALUES_THRESHOLD
10120 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10121 #endif /* CASE_VALUES_THRESHOLD */
10123 unsigned int
10124 case_values_threshold ()
10126 return CASE_VALUES_THRESHOLD;
10129 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10130 0 otherwise (i.e. if there is no casesi instruction). */
10132 try_casesi (index_type, index_expr, minval, range,
10133 table_label, default_label)
10134 tree index_type, index_expr, minval, range;
10135 rtx table_label ATTRIBUTE_UNUSED;
10136 rtx default_label;
10138 enum machine_mode index_mode = SImode;
10139 int index_bits = GET_MODE_BITSIZE (index_mode);
10140 rtx op1, op2, index;
10141 enum machine_mode op_mode;
10143 if (! HAVE_casesi)
10144 return 0;
10146 /* Convert the index to SImode. */
10147 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10149 enum machine_mode omode = TYPE_MODE (index_type);
10150 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10152 /* We must handle the endpoints in the original mode. */
10153 index_expr = build (MINUS_EXPR, index_type,
10154 index_expr, minval);
10155 minval = integer_zero_node;
10156 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10157 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10158 omode, 1, default_label);
10159 /* Now we can safely truncate. */
10160 index = convert_to_mode (index_mode, index, 0);
10162 else
10164 if (TYPE_MODE (index_type) != index_mode)
10166 index_expr = convert ((*lang_hooks.types.type_for_size)
10167 (index_bits, 0), index_expr);
10168 index_type = TREE_TYPE (index_expr);
10171 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10173 emit_queue ();
10174 index = protect_from_queue (index, 0);
10175 do_pending_stack_adjust ();
10177 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10178 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10179 (index, op_mode))
10180 index = copy_to_mode_reg (op_mode, index);
10182 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10184 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10185 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10186 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10187 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10188 (op1, op_mode))
10189 op1 = copy_to_mode_reg (op_mode, op1);
10191 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10193 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10194 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10195 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10196 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10197 (op2, op_mode))
10198 op2 = copy_to_mode_reg (op_mode, op2);
10200 emit_jump_insn (gen_casesi (index, op1, op2,
10201 table_label, default_label));
10202 return 1;
10205 /* Attempt to generate a tablejump instruction; same concept. */
10206 #ifndef HAVE_tablejump
10207 #define HAVE_tablejump 0
10208 #define gen_tablejump(x, y) (0)
10209 #endif
10211 /* Subroutine of the next function.
10213 INDEX is the value being switched on, with the lowest value
10214 in the table already subtracted.
10215 MODE is its expected mode (needed if INDEX is constant).
10216 RANGE is the length of the jump table.
10217 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10219 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10220 index value is out of range. */
10222 static void
10223 do_tablejump (index, mode, range, table_label, default_label)
10224 rtx index, range, table_label, default_label;
10225 enum machine_mode mode;
10227 rtx temp, vector;
10229 if (INTVAL (range) > cfun->max_jumptable_ents)
10230 cfun->max_jumptable_ents = INTVAL (range);
10232 /* Do an unsigned comparison (in the proper mode) between the index
10233 expression and the value which represents the length of the range.
10234 Since we just finished subtracting the lower bound of the range
10235 from the index expression, this comparison allows us to simultaneously
10236 check that the original index expression value is both greater than
10237 or equal to the minimum value of the range and less than or equal to
10238 the maximum value of the range. */
10240 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10241 default_label);
10243 /* If index is in range, it must fit in Pmode.
10244 Convert to Pmode so we can index with it. */
10245 if (mode != Pmode)
10246 index = convert_to_mode (Pmode, index, 1);
10248 /* Don't let a MEM slip thru, because then INDEX that comes
10249 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10250 and break_out_memory_refs will go to work on it and mess it up. */
10251 #ifdef PIC_CASE_VECTOR_ADDRESS
10252 if (flag_pic && GET_CODE (index) != REG)
10253 index = copy_to_mode_reg (Pmode, index);
10254 #endif
10256 /* If flag_force_addr were to affect this address
10257 it could interfere with the tricky assumptions made
10258 about addresses that contain label-refs,
10259 which may be valid only very near the tablejump itself. */
10260 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10261 GET_MODE_SIZE, because this indicates how large insns are. The other
10262 uses should all be Pmode, because they are addresses. This code
10263 could fail if addresses and insns are not the same size. */
10264 index = gen_rtx_PLUS (Pmode,
10265 gen_rtx_MULT (Pmode, index,
10266 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10267 gen_rtx_LABEL_REF (Pmode, table_label));
10268 #ifdef PIC_CASE_VECTOR_ADDRESS
10269 if (flag_pic)
10270 index = PIC_CASE_VECTOR_ADDRESS (index);
10271 else
10272 #endif
10273 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10274 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10275 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10276 RTX_UNCHANGING_P (vector) = 1;
10277 convert_move (temp, vector, 0);
10279 emit_jump_insn (gen_tablejump (temp, table_label));
10281 /* If we are generating PIC code or if the table is PC-relative, the
10282 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10283 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10284 emit_barrier ();
10288 try_tablejump (index_type, index_expr, minval, range,
10289 table_label, default_label)
10290 tree index_type, index_expr, minval, range;
10291 rtx table_label, default_label;
10293 rtx index;
10295 if (! HAVE_tablejump)
10296 return 0;
10298 index_expr = fold (build (MINUS_EXPR, index_type,
10299 convert (index_type, index_expr),
10300 convert (index_type, minval)));
10301 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10302 emit_queue ();
10303 index = protect_from_queue (index, 0);
10304 do_pending_stack_adjust ();
10306 do_tablejump (index, TYPE_MODE (index_type),
10307 convert_modes (TYPE_MODE (index_type),
10308 TYPE_MODE (TREE_TYPE (range)),
10309 expand_expr (range, NULL_RTX,
10310 VOIDmode, 0),
10311 TREE_UNSIGNED (TREE_TYPE (range))),
10312 table_label, default_label);
10313 return 1;
10316 /* Nonzero if the mode is a valid vector mode for this architecture.
10317 This returns nonzero even if there is no hardware support for the
10318 vector mode, but we can emulate with narrower modes. */
10321 vector_mode_valid_p (mode)
10322 enum machine_mode mode;
10324 enum mode_class class = GET_MODE_CLASS (mode);
10325 enum machine_mode innermode;
10327 /* Doh! What's going on? */
10328 if (class != MODE_VECTOR_INT
10329 && class != MODE_VECTOR_FLOAT)
10330 return 0;
10332 /* Hardware support. Woo hoo! */
10333 if (VECTOR_MODE_SUPPORTED_P (mode))
10334 return 1;
10336 innermode = GET_MODE_INNER (mode);
10338 /* We should probably return 1 if requesting V4DI and we have no DI,
10339 but we have V2DI, but this is probably very unlikely. */
10341 /* If we have support for the inner mode, we can safely emulate it.
10342 We may not have V2DI, but me can emulate with a pair of DIs. */
10343 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10346 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10347 static rtx
10348 const_vector_from_tree (exp)
10349 tree exp;
10351 rtvec v;
10352 int units, i;
10353 tree link, elt;
10354 enum machine_mode inner, mode;
10356 mode = TYPE_MODE (TREE_TYPE (exp));
10358 if (is_zeros_p (exp))
10359 return CONST0_RTX (mode);
10361 units = GET_MODE_NUNITS (mode);
10362 inner = GET_MODE_INNER (mode);
10364 v = rtvec_alloc (units);
10366 link = TREE_VECTOR_CST_ELTS (exp);
10367 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10369 elt = TREE_VALUE (link);
10371 if (TREE_CODE (elt) == REAL_CST)
10372 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10373 inner);
10374 else
10375 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10376 TREE_INT_CST_HIGH (elt),
10377 inner);
10380 return gen_rtx_raw_CONST_VECTOR (mode, v);
10383 #include "gt-expr.h"