* fpa.md: New file. Move all patterns relating to FPA co-processor
[official-gcc.git] / gcc / expr.c
blob46b264ea584e601f315fe20c50e0b1ecf0532f41
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 static tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
169 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
170 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
171 static int is_aligning_offset PARAMS ((tree, tree));
172 static rtx expand_increment PARAMS ((tree, int, int));
173 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
174 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
175 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
176 rtx, rtx));
177 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
178 #ifdef PUSH_ROUNDING
179 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
180 #endif
181 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 #endif
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
216 #ifndef CLEAR_RATIO
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
219 #else
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 #endif
223 #endif
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 #endif
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237 #endif
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
249 #endif
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
254 void
255 init_expr_once ()
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 rtx reg;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
280 int regno;
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
298 REGNO (reg) = regno;
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
331 enum insn_code ic;
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
347 void
348 init_expr ()
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
352 pending_chain = 0;
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
363 void
364 finish_expr_for_function ()
366 if (pending_chain)
367 abort ();
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
380 static rtx
381 enqueue_insn (var, body)
382 rtx var, body;
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
406 rtx x;
407 int modify;
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415 #endif
417 if (code != QUEUED)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
430 if (QUEUED_INSN (y))
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
435 QUEUED_INSN (y));
436 return temp;
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
466 return x;
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
491 queued_subexp_p (x)
492 rtx x;
494 enum rtx_code code = GET_CODE (x);
495 switch (code)
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
511 /* Perform all the pending incrementations. */
513 void
514 emit_queue ()
516 rtx p;
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
533 #ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537 #endif
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
553 void
554 convert_move (to, from, unsignedp)
555 rtx to, from;
556 int unsignedp;
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
573 abort ();
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
592 return;
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
602 else
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
606 return;
609 if (to_real != from_real)
610 abort ();
612 if (to_real)
614 rtx value, insns;
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
620 != CODE_FOR_nothing)
622 emit_unop_insn (code, to, from, UNKNOWN);
623 return;
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 return;
633 #endif
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 return;
647 #endif
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
666 return;
668 #endif
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 return;
676 #endif
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
702 return;
704 #endif
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 return;
712 #endif
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 return;
719 #endif
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 return;
726 #endif
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
731 return;
733 #endif
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
741 #endif
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
748 #endif
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
755 #endif
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
762 #endif
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
769 #endif
771 libcall = (rtx) 0;
772 switch (from_mode)
774 case SFmode:
775 switch (to_mode)
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 case DFmode:
795 switch (to_mode)
797 case SFmode:
798 libcall = truncdfsf2_libfunc;
799 break;
801 case XFmode:
802 libcall = extenddfxf2_libfunc;
803 break;
805 case TFmode:
806 libcall = extenddftf2_libfunc;
807 break;
809 default:
810 break;
812 break;
814 case XFmode:
815 switch (to_mode)
817 case SFmode:
818 libcall = truncxfsf2_libfunc;
819 break;
821 case DFmode:
822 libcall = truncxfdf2_libfunc;
823 break;
825 default:
826 break;
828 break;
830 case TFmode:
831 switch (to_mode)
833 case SFmode:
834 libcall = trunctfsf2_libfunc;
835 break;
837 case DFmode:
838 libcall = trunctfdf2_libfunc;
839 break;
841 default:
842 break;
844 break;
846 default:
847 break;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
852 abort ();
854 start_sequence ();
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
856 1, from, from_mode);
857 insns = get_insns ();
858 end_sequence ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
860 from));
861 return;
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
870 rtx insns;
871 rtx lowpart;
872 rtx fill_value;
873 rtx lowfrom;
874 int i;
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
880 != CODE_FOR_nothing)
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
889 return;
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
901 return;
904 /* No special multiword conversion insn; do it by hand. */
905 start_sequence ();
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
916 else
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
925 if (unsignedp)
926 fill_value = const0_rtx;
927 else
929 #ifdef HAVE_slt
930 if (HAVE_slt
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
935 lowpart_mode, 0);
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
939 else
940 #endif
942 fill_value
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
945 NULL_RTX, 0);
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
956 if (subword == 0)
957 abort ();
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
964 end_sequence ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
968 return;
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
983 return;
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
996 return;
998 #endif /* HAVE_truncqipqi2 */
999 abort ();
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1007 from_mode = QImode;
1009 else
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1015 return;
1017 #endif /* HAVE_extendpqiqi2 */
1018 abort ();
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1031 return;
1033 #endif /* HAVE_truncsipsi2 */
1034 abort ();
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1042 from_mode = SImode;
1044 else
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1050 return;
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1057 return;
1059 #endif /* HAVE_zero_extendpsisi2 */
1060 abort ();
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 return;
1075 #endif /* HAVE_truncdipdi2 */
1076 abort ();
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1084 from_mode = DImode;
1086 else
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 return;
1094 #endif /* HAVE_extendpdidi2 */
1095 abort ();
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 return;
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1128 if (flag_force_mem)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1132 return;
1134 else
1136 enum machine_mode intermediate;
1137 rtx tmp;
1138 tree shift_amount;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1153 return;
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1162 to, unsignedp);
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1164 to, unsignedp);
1165 if (tmp != to)
1166 emit_move_insn (to, tmp);
1167 return;
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 return;
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 return;
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 return;
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 return;
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 return;
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 return;
1246 #endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 return;
1259 #endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 return;
1272 #endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 return;
1285 #endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 return;
1298 #endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1310 return;
1313 /* Mode combination is not recognized. */
1314 abort ();
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1330 rtx x;
1331 int unsignedp;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1352 rtx x;
1353 int unsignedp;
1355 rtx temp;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1369 return x;
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1426 if (! unsignedp
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1430 return gen_int_mode (val, mode);
1433 return gen_lowpart (mode, x);
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1438 return temp;
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1450 #endif
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1459 /* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
1466 ALIGN is maximum alignment we can assume. */
1468 void
1469 move_by_pieces (to, from, len, align)
1470 rtx to, from;
1471 unsigned HOST_WIDE_INT len;
1472 unsigned int align;
1474 struct move_by_pieces data;
1475 rtx to_addr, from_addr = XEXP (from, 0);
1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
1480 data.offset = 0;
1481 data.from_addr = from_addr;
1482 if (to)
1484 to_addr = XEXP (to, 0);
1485 data.to = to;
1486 data.autinc_to
1487 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1488 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1489 data.reverse
1490 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1492 else
1494 to_addr = NULL_RTX;
1495 data.to = NULL_RTX;
1496 data.autinc_to = 1;
1497 #ifdef STACK_GROWS_DOWNWARD
1498 data.reverse = 1;
1499 #else
1500 data.reverse = 0;
1501 #endif
1503 data.to_addr = to_addr;
1504 data.from = from;
1505 data.autinc_from
1506 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1507 || GET_CODE (from_addr) == POST_INC
1508 || GET_CODE (from_addr) == POST_DEC);
1510 data.explicit_inc_from = 0;
1511 data.explicit_inc_to = 0;
1512 if (data.reverse) data.offset = len;
1513 data.len = len;
1515 /* If copying requires more than two move insns,
1516 copy addresses to registers (to make displacements shorter)
1517 and use post-increment if available. */
1518 if (!(data.autinc_from && data.autinc_to)
1519 && move_by_pieces_ninsns (len, align) > 2)
1521 /* Find the mode of the largest move... */
1522 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1523 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1524 if (GET_MODE_SIZE (tmode) < max_size)
1525 mode = tmode;
1527 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1529 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1530 data.autinc_from = 1;
1531 data.explicit_inc_from = -1;
1533 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1535 data.from_addr = copy_addr_to_reg (from_addr);
1536 data.autinc_from = 1;
1537 data.explicit_inc_from = 1;
1539 if (!data.autinc_from && CONSTANT_P (from_addr))
1540 data.from_addr = copy_addr_to_reg (from_addr);
1541 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1543 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1544 data.autinc_to = 1;
1545 data.explicit_inc_to = -1;
1547 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1549 data.to_addr = copy_addr_to_reg (to_addr);
1550 data.autinc_to = 1;
1551 data.explicit_inc_to = 1;
1553 if (!data.autinc_to && CONSTANT_P (to_addr))
1554 data.to_addr = copy_addr_to_reg (to_addr);
1557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1559 align = MOVE_MAX * BITS_PER_UNIT;
1561 /* First move what we can in the largest integer mode, then go to
1562 successively smaller modes. */
1564 while (max_size > 1)
1566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1568 if (GET_MODE_SIZE (tmode) < max_size)
1569 mode = tmode;
1571 if (mode == VOIDmode)
1572 break;
1574 icode = mov_optab->handlers[(int) mode].insn_code;
1575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1576 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1578 max_size = GET_MODE_SIZE (mode);
1581 /* The code above should have handled everything. */
1582 if (data.len > 0)
1583 abort ();
1586 /* Return number of insns required to move L bytes by pieces.
1587 ALIGN (in bits) is maximum alignment we can assume. */
1589 static unsigned HOST_WIDE_INT
1590 move_by_pieces_ninsns (l, align)
1591 unsigned HOST_WIDE_INT l;
1592 unsigned int align;
1594 unsigned HOST_WIDE_INT n_insns = 0;
1595 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1597 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1598 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1599 align = MOVE_MAX * BITS_PER_UNIT;
1601 while (max_size > 1)
1603 enum machine_mode mode = VOIDmode, tmode;
1604 enum insn_code icode;
1606 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1607 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1608 if (GET_MODE_SIZE (tmode) < max_size)
1609 mode = tmode;
1611 if (mode == VOIDmode)
1612 break;
1614 icode = mov_optab->handlers[(int) mode].insn_code;
1615 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1616 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1618 max_size = GET_MODE_SIZE (mode);
1621 if (l)
1622 abort ();
1623 return n_insns;
1626 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1627 with move instructions for mode MODE. GENFUN is the gen_... function
1628 to make a move insn for that mode. DATA has all the other info. */
1630 static void
1631 move_by_pieces_1 (genfun, mode, data)
1632 rtx (*genfun) PARAMS ((rtx, ...));
1633 enum machine_mode mode;
1634 struct move_by_pieces *data;
1636 unsigned int size = GET_MODE_SIZE (mode);
1637 rtx to1 = NULL_RTX, from1;
1639 while (data->len >= size)
1641 if (data->reverse)
1642 data->offset -= size;
1644 if (data->to)
1646 if (data->autinc_to)
1647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1648 data->offset);
1649 else
1650 to1 = adjust_address (data->to, mode, data->offset);
1653 if (data->autinc_from)
1654 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1655 data->offset);
1656 else
1657 from1 = adjust_address (data->from, mode, data->offset);
1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1660 emit_insn (gen_add2_insn (data->to_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
1662 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1663 emit_insn (gen_add2_insn (data->from_addr,
1664 GEN_INT (-(HOST_WIDE_INT)size)));
1666 if (data->to)
1667 emit_insn ((*genfun) (to1, from1));
1668 else
1670 #ifdef PUSH_ROUNDING
1671 emit_single_push_insn (mode, from1, NULL);
1672 #else
1673 abort ();
1674 #endif
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1678 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1679 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1680 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1682 if (! data->reverse)
1683 data->offset += size;
1685 data->len -= size;
1689 /* Emit code to move a block Y to a block X. This may be done with
1690 string-move instructions, with multiple scalar move instructions,
1691 or with a library call.
1693 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1694 SIZE is an rtx that says how long they are.
1695 ALIGN is the maximum alignment we can assume they have.
1696 METHOD describes what kind of copy this is, and what mechanisms may be used.
1698 Return the address of the new block, if memcpy is called and returns it,
1699 0 otherwise. */
1702 emit_block_move (x, y, size, method)
1703 rtx x, y, size;
1704 enum block_op_methods method;
1706 bool may_use_call;
1707 rtx retval = 0;
1708 unsigned int align;
1710 switch (method)
1712 case BLOCK_OP_NORMAL:
1713 may_use_call = true;
1714 break;
1716 case BLOCK_OP_CALL_PARM:
1717 may_use_call = block_move_libcall_safe_for_call_parm ();
1719 /* Make inhibit_defer_pop nonzero around the library call
1720 to force it to pop the arguments right away. */
1721 NO_DEFER_POP;
1722 break;
1724 case BLOCK_OP_NO_LIBCALL:
1725 may_use_call = false;
1726 break;
1728 default:
1729 abort ();
1732 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1734 if (GET_MODE (x) != BLKmode)
1735 abort ();
1736 if (GET_MODE (y) != BLKmode)
1737 abort ();
1739 x = protect_from_queue (x, 1);
1740 y = protect_from_queue (y, 0);
1741 size = protect_from_queue (size, 0);
1743 if (GET_CODE (x) != MEM)
1744 abort ();
1745 if (GET_CODE (y) != MEM)
1746 abort ();
1747 if (size == 0)
1748 abort ();
1750 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1751 can be incorrect is coming from __builtin_memcpy. */
1752 if (GET_CODE (size) == CONST_INT)
1754 x = shallow_copy_rtx (x);
1755 y = shallow_copy_rtx (y);
1756 set_mem_size (x, size);
1757 set_mem_size (y, size);
1760 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1761 move_by_pieces (x, y, INTVAL (size), align);
1762 else if (emit_block_move_via_movstr (x, y, size, align))
1764 else if (may_use_call)
1765 retval = emit_block_move_via_libcall (x, y, size);
1766 else
1767 emit_block_move_via_loop (x, y, size, align);
1769 if (method == BLOCK_OP_CALL_PARM)
1770 OK_DEFER_POP;
1772 return retval;
1775 /* A subroutine of emit_block_move. Returns true if calling the
1776 block move libcall will not clobber any parameters which may have
1777 already been placed on the stack. */
1779 static bool
1780 block_move_libcall_safe_for_call_parm ()
1782 if (PUSH_ARGS)
1783 return true;
1784 else
1786 /* Check to see whether memcpy takes all register arguments. */
1787 static enum {
1788 takes_regs_uninit, takes_regs_no, takes_regs_yes
1789 } takes_regs = takes_regs_uninit;
1791 switch (takes_regs)
1793 case takes_regs_uninit:
1795 CUMULATIVE_ARGS args_so_far;
1796 tree fn, arg;
1798 fn = emit_block_move_libcall_fn (false);
1799 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1801 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1802 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1804 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1805 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1806 if (!tmp || !REG_P (tmp))
1807 goto fail_takes_regs;
1808 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1809 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1810 NULL_TREE, 1))
1811 goto fail_takes_regs;
1812 #endif
1813 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1816 takes_regs = takes_regs_yes;
1817 /* FALLTHRU */
1819 case takes_regs_yes:
1820 return true;
1822 fail_takes_regs:
1823 takes_regs = takes_regs_no;
1824 /* FALLTHRU */
1825 case takes_regs_no:
1826 return false;
1828 default:
1829 abort ();
1834 /* A subroutine of emit_block_move. Expand a movstr pattern;
1835 return true if successful. */
1837 static bool
1838 emit_block_move_via_movstr (x, y, size, align)
1839 rtx x, y, size;
1840 unsigned int align;
1842 /* Try the most limited insn first, because there's no point
1843 including more than one in the machine description unless
1844 the more limited one has some advantage. */
1846 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1847 enum machine_mode mode;
1849 /* Since this is a move insn, we don't care about volatility. */
1850 volatile_ok = 1;
1852 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1853 mode = GET_MODE_WIDER_MODE (mode))
1855 enum insn_code code = movstr_optab[(int) mode];
1856 insn_operand_predicate_fn pred;
1858 if (code != CODE_FOR_nothing
1859 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1860 here because if SIZE is less than the mode mask, as it is
1861 returned by the macro, it will definitely be less than the
1862 actual mode mask. */
1863 && ((GET_CODE (size) == CONST_INT
1864 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1865 <= (GET_MODE_MASK (mode) >> 1)))
1866 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1867 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1868 || (*pred) (x, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1870 || (*pred) (y, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1872 || (*pred) (opalign, VOIDmode)))
1874 rtx op2;
1875 rtx last = get_last_insn ();
1876 rtx pat;
1878 op2 = convert_to_mode (mode, size, 1);
1879 pred = insn_data[(int) code].operand[2].predicate;
1880 if (pred != 0 && ! (*pred) (op2, mode))
1881 op2 = copy_to_mode_reg (mode, op2);
1883 /* ??? When called via emit_block_move_for_call, it'd be
1884 nice if there were some way to inform the backend, so
1885 that it doesn't fail the expansion because it thinks
1886 emitting the libcall would be more efficient. */
1888 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1889 if (pat)
1891 emit_insn (pat);
1892 volatile_ok = 0;
1893 return true;
1895 else
1896 delete_insns_since (last);
1900 volatile_ok = 0;
1901 return false;
1904 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1905 Return the return value from memcpy, 0 otherwise. */
1907 static rtx
1908 emit_block_move_via_libcall (dst, src, size)
1909 rtx dst, src, size;
1911 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1912 enum machine_mode size_mode;
1913 rtx retval;
1915 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1917 It is unsafe to save the value generated by protect_from_queue
1918 and reuse it later. Consider what happens if emit_queue is
1919 called before the return value from protect_from_queue is used.
1921 Expansion of the CALL_EXPR below will call emit_queue before
1922 we are finished emitting RTL for argument setup. So if we are
1923 not careful we could get the wrong value for an argument.
1925 To avoid this problem we go ahead and emit code to copy X, Y &
1926 SIZE into new pseudos. We can then place those new pseudos
1927 into an RTL_EXPR and use them later, even after a call to
1928 emit_queue.
1930 Note this is not strictly needed for library calls since they
1931 do not call emit_queue before loading their arguments. However,
1932 we may need to have library calls call emit_queue in the future
1933 since failing to do so could cause problems for targets which
1934 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1936 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1937 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1939 if (TARGET_MEM_FUNCTIONS)
1940 size_mode = TYPE_MODE (sizetype);
1941 else
1942 size_mode = TYPE_MODE (unsigned_type_node);
1943 size = convert_to_mode (size_mode, size, 1);
1944 size = copy_to_mode_reg (size_mode, size);
1946 /* It is incorrect to use the libcall calling conventions to call
1947 memcpy in this context. This could be a user call to memcpy and
1948 the user may wish to examine the return value from memcpy. For
1949 targets where libcalls and normal calls have different conventions
1950 for returning pointers, we could end up generating incorrect code.
1952 For convenience, we generate the call to bcopy this way as well. */
1954 dst_tree = make_tree (ptr_type_node, dst);
1955 src_tree = make_tree (ptr_type_node, src);
1956 if (TARGET_MEM_FUNCTIONS)
1957 size_tree = make_tree (sizetype, size);
1958 else
1959 size_tree = make_tree (unsigned_type_node, size);
1961 fn = emit_block_move_libcall_fn (true);
1962 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1963 if (TARGET_MEM_FUNCTIONS)
1965 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1966 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1968 else
1970 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1971 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1974 /* Now we have to build up the CALL_EXPR itself. */
1975 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1976 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1977 call_expr, arg_list, NULL_TREE);
1978 TREE_SIDE_EFFECTS (call_expr) = 1;
1980 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1982 /* If we are initializing a readonly value, show the above call
1983 clobbered it. Otherwise, a load from it may erroneously be
1984 hoisted from a loop. */
1985 if (RTX_UNCHANGING_P (dst))
1986 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1988 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1991 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1992 for the function we use for block copies. The first time FOR_CALL
1993 is true, we call assemble_external. */
1995 static GTY(()) tree block_move_fn;
1997 static tree
1998 emit_block_move_libcall_fn (for_call)
1999 int for_call;
2001 static bool emitted_extern;
2002 tree fn = block_move_fn, args;
2004 if (!fn)
2006 if (TARGET_MEM_FUNCTIONS)
2008 fn = get_identifier ("memcpy");
2009 args = build_function_type_list (ptr_type_node, ptr_type_node,
2010 const_ptr_type_node, sizetype,
2011 NULL_TREE);
2013 else
2015 fn = get_identifier ("bcopy");
2016 args = build_function_type_list (void_type_node, const_ptr_type_node,
2017 ptr_type_node, unsigned_type_node,
2018 NULL_TREE);
2021 fn = build_decl (FUNCTION_DECL, fn, args);
2022 DECL_EXTERNAL (fn) = 1;
2023 TREE_PUBLIC (fn) = 1;
2024 DECL_ARTIFICIAL (fn) = 1;
2025 TREE_NOTHROW (fn) = 1;
2027 block_move_fn = fn;
2030 if (for_call && !emitted_extern)
2032 emitted_extern = true;
2033 make_decl_rtl (fn, NULL);
2034 assemble_external (fn);
2037 return fn;
2040 /* A subroutine of emit_block_move. Copy the data via an explicit
2041 loop. This is used only when libcalls are forbidden. */
2042 /* ??? It'd be nice to copy in hunks larger than QImode. */
2044 static void
2045 emit_block_move_via_loop (x, y, size, align)
2046 rtx x, y, size;
2047 unsigned int align ATTRIBUTE_UNUSED;
2049 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2050 enum machine_mode iter_mode;
2052 iter_mode = GET_MODE (size);
2053 if (iter_mode == VOIDmode)
2054 iter_mode = word_mode;
2056 top_label = gen_label_rtx ();
2057 cmp_label = gen_label_rtx ();
2058 iter = gen_reg_rtx (iter_mode);
2060 emit_move_insn (iter, const0_rtx);
2062 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2063 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2064 do_pending_stack_adjust ();
2066 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2068 emit_jump (cmp_label);
2069 emit_label (top_label);
2071 tmp = convert_modes (Pmode, iter_mode, iter, true);
2072 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2073 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2074 x = change_address (x, QImode, x_addr);
2075 y = change_address (y, QImode, y_addr);
2077 emit_move_insn (x, y);
2079 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2080 true, OPTAB_LIB_WIDEN);
2081 if (tmp != iter)
2082 emit_move_insn (iter, tmp);
2084 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2085 emit_label (cmp_label);
2087 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2088 true, top_label);
2090 emit_note (NULL, NOTE_INSN_LOOP_END);
2093 /* Copy all or part of a value X into registers starting at REGNO.
2094 The number of registers to be filled is NREGS. */
2096 void
2097 move_block_to_reg (regno, x, nregs, mode)
2098 int regno;
2099 rtx x;
2100 int nregs;
2101 enum machine_mode mode;
2103 int i;
2104 #ifdef HAVE_load_multiple
2105 rtx pat;
2106 rtx last;
2107 #endif
2109 if (nregs == 0)
2110 return;
2112 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2113 x = validize_mem (force_const_mem (mode, x));
2115 /* See if the machine can do this with a load multiple insn. */
2116 #ifdef HAVE_load_multiple
2117 if (HAVE_load_multiple)
2119 last = get_last_insn ();
2120 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2121 GEN_INT (nregs));
2122 if (pat)
2124 emit_insn (pat);
2125 return;
2127 else
2128 delete_insns_since (last);
2130 #endif
2132 for (i = 0; i < nregs; i++)
2133 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2134 operand_subword_force (x, i, mode));
2137 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2138 The number of registers to be filled is NREGS. SIZE indicates the number
2139 of bytes in the object X. */
2141 void
2142 move_block_from_reg (regno, x, nregs, size)
2143 int regno;
2144 rtx x;
2145 int nregs;
2146 int size;
2148 int i;
2149 #ifdef HAVE_store_multiple
2150 rtx pat;
2151 rtx last;
2152 #endif
2153 enum machine_mode mode;
2155 if (nregs == 0)
2156 return;
2158 /* If SIZE is that of a mode no bigger than a word, just use that
2159 mode's store operation. */
2160 if (size <= UNITS_PER_WORD
2161 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2163 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2164 return;
2167 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2168 to the left before storing to memory. Note that the previous test
2169 doesn't handle all cases (e.g. SIZE == 3). */
2170 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2172 rtx tem = operand_subword (x, 0, 1, BLKmode);
2173 rtx shift;
2175 if (tem == 0)
2176 abort ();
2178 shift = expand_shift (LSHIFT_EXPR, word_mode,
2179 gen_rtx_REG (word_mode, regno),
2180 build_int_2 ((UNITS_PER_WORD - size)
2181 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2182 emit_move_insn (tem, shift);
2183 return;
2186 /* See if the machine can do this with a store multiple insn. */
2187 #ifdef HAVE_store_multiple
2188 if (HAVE_store_multiple)
2190 last = get_last_insn ();
2191 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2192 GEN_INT (nregs));
2193 if (pat)
2195 emit_insn (pat);
2196 return;
2198 else
2199 delete_insns_since (last);
2201 #endif
2203 for (i = 0; i < nregs; i++)
2205 rtx tem = operand_subword (x, i, 1, BLKmode);
2207 if (tem == 0)
2208 abort ();
2210 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2214 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2215 ORIG, where ORIG is a non-consecutive group of registers represented by
2216 a PARALLEL. The clone is identical to the original except in that the
2217 original set of registers is replaced by a new set of pseudo registers.
2218 The new set has the same modes as the original set. */
2221 gen_group_rtx (orig)
2222 rtx orig;
2224 int i, length;
2225 rtx *tmps;
2227 if (GET_CODE (orig) != PARALLEL)
2228 abort ();
2230 length = XVECLEN (orig, 0);
2231 tmps = (rtx *) alloca (sizeof (rtx) * length);
2233 /* Skip a NULL entry in first slot. */
2234 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2236 if (i)
2237 tmps[0] = 0;
2239 for (; i < length; i++)
2241 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2242 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2244 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2247 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2250 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2251 registers represented by a PARALLEL. SSIZE represents the total size of
2252 block SRC in bytes, or -1 if not known. */
2253 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2254 the balance will be in what would be the low-order memory addresses, i.e.
2255 left justified for big endian, right justified for little endian. This
2256 happens to be true for the targets currently using this support. If this
2257 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2258 would be needed. */
2260 void
2261 emit_group_load (dst, orig_src, ssize)
2262 rtx dst, orig_src;
2263 int ssize;
2265 rtx *tmps, src;
2266 int start, i;
2268 if (GET_CODE (dst) != PARALLEL)
2269 abort ();
2271 /* Check for a NULL entry, used to indicate that the parameter goes
2272 both on the stack and in registers. */
2273 if (XEXP (XVECEXP (dst, 0, 0), 0))
2274 start = 0;
2275 else
2276 start = 1;
2278 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2280 /* Process the pieces. */
2281 for (i = start; i < XVECLEN (dst, 0); i++)
2283 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2284 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2285 unsigned int bytelen = GET_MODE_SIZE (mode);
2286 int shift = 0;
2288 /* Handle trailing fragments that run over the size of the struct. */
2289 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2291 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2292 bytelen = ssize - bytepos;
2293 if (bytelen <= 0)
2294 abort ();
2297 /* If we won't be loading directly from memory, protect the real source
2298 from strange tricks we might play; but make sure that the source can
2299 be loaded directly into the destination. */
2300 src = orig_src;
2301 if (GET_CODE (orig_src) != MEM
2302 && (!CONSTANT_P (orig_src)
2303 || (GET_MODE (orig_src) != mode
2304 && GET_MODE (orig_src) != VOIDmode)))
2306 if (GET_MODE (orig_src) == VOIDmode)
2307 src = gen_reg_rtx (mode);
2308 else
2309 src = gen_reg_rtx (GET_MODE (orig_src));
2311 emit_move_insn (src, orig_src);
2314 /* Optimize the access just a bit. */
2315 if (GET_CODE (src) == MEM
2316 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2317 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2318 && bytelen == GET_MODE_SIZE (mode))
2320 tmps[i] = gen_reg_rtx (mode);
2321 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2323 else if (GET_CODE (src) == CONCAT)
2325 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2326 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2328 if ((bytepos == 0 && bytelen == slen0)
2329 || (bytepos != 0 && bytepos + bytelen <= slen))
2331 /* The following assumes that the concatenated objects all
2332 have the same size. In this case, a simple calculation
2333 can be used to determine the object and the bit field
2334 to be extracted. */
2335 tmps[i] = XEXP (src, bytepos / slen0);
2336 if (! CONSTANT_P (tmps[i])
2337 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2338 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2339 (bytepos % slen0) * BITS_PER_UNIT,
2340 1, NULL_RTX, mode, mode, ssize);
2342 else if (bytepos == 0)
2344 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2345 emit_move_insn (mem, src);
2346 tmps[i] = adjust_address (mem, mode, 0);
2348 else
2349 abort ();
2351 else if (CONSTANT_P (src)
2352 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2353 tmps[i] = src;
2354 else
2355 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2356 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2357 mode, mode, ssize);
2359 if (BYTES_BIG_ENDIAN && shift)
2360 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2361 tmps[i], 0, OPTAB_WIDEN);
2364 emit_queue ();
2366 /* Copy the extracted pieces into the proper (probable) hard regs. */
2367 for (i = start; i < XVECLEN (dst, 0); i++)
2368 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2371 /* Emit code to move a block SRC to block DST, where SRC and DST are
2372 non-consecutive groups of registers, each represented by a PARALLEL. */
2374 void
2375 emit_group_move (dst, src)
2376 rtx dst, src;
2378 int i;
2380 if (GET_CODE (src) != PARALLEL
2381 || GET_CODE (dst) != PARALLEL
2382 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2383 abort ();
2385 /* Skip first entry if NULL. */
2386 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2387 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2388 XEXP (XVECEXP (src, 0, i), 0));
2391 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2392 registers represented by a PARALLEL. SSIZE represents the total size of
2393 block DST, or -1 if not known. */
2395 void
2396 emit_group_store (orig_dst, src, ssize)
2397 rtx orig_dst, src;
2398 int ssize;
2400 rtx *tmps, dst;
2401 int start, i;
2403 if (GET_CODE (src) != PARALLEL)
2404 abort ();
2406 /* Check for a NULL entry, used to indicate that the parameter goes
2407 both on the stack and in registers. */
2408 if (XEXP (XVECEXP (src, 0, 0), 0))
2409 start = 0;
2410 else
2411 start = 1;
2413 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2415 /* Copy the (probable) hard regs into pseudos. */
2416 for (i = start; i < XVECLEN (src, 0); i++)
2418 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2419 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2420 emit_move_insn (tmps[i], reg);
2422 emit_queue ();
2424 /* If we won't be storing directly into memory, protect the real destination
2425 from strange tricks we might play. */
2426 dst = orig_dst;
2427 if (GET_CODE (dst) == PARALLEL)
2429 rtx temp;
2431 /* We can get a PARALLEL dst if there is a conditional expression in
2432 a return statement. In that case, the dst and src are the same,
2433 so no action is necessary. */
2434 if (rtx_equal_p (dst, src))
2435 return;
2437 /* It is unclear if we can ever reach here, but we may as well handle
2438 it. Allocate a temporary, and split this into a store/load to/from
2439 the temporary. */
2441 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2442 emit_group_store (temp, src, ssize);
2443 emit_group_load (dst, temp, ssize);
2444 return;
2446 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2448 dst = gen_reg_rtx (GET_MODE (orig_dst));
2449 /* Make life a bit easier for combine. */
2450 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2453 /* Process the pieces. */
2454 for (i = start; i < XVECLEN (src, 0); i++)
2456 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2457 enum machine_mode mode = GET_MODE (tmps[i]);
2458 unsigned int bytelen = GET_MODE_SIZE (mode);
2459 rtx dest = dst;
2461 /* Handle trailing fragments that run over the size of the struct. */
2462 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2464 if (BYTES_BIG_ENDIAN)
2466 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2467 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2468 tmps[i], 0, OPTAB_WIDEN);
2470 bytelen = ssize - bytepos;
2473 if (GET_CODE (dst) == CONCAT)
2475 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2476 dest = XEXP (dst, 0);
2477 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2479 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2480 dest = XEXP (dst, 1);
2482 else if (bytepos == 0 && XVECLEN (src, 0))
2484 dest = assign_stack_temp (GET_MODE (dest),
2485 GET_MODE_SIZE (GET_MODE (dest)), 0);
2486 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2487 tmps[i]);
2488 dst = dest;
2489 break;
2491 else
2492 abort ();
2495 /* Optimize the access just a bit. */
2496 if (GET_CODE (dest) == MEM
2497 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2498 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2499 && bytelen == GET_MODE_SIZE (mode))
2500 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2501 else
2502 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2503 mode, tmps[i], ssize);
2506 emit_queue ();
2508 /* Copy from the pseudo into the (probable) hard reg. */
2509 if (orig_dst != dst)
2510 emit_move_insn (orig_dst, dst);
2513 /* Generate code to copy a BLKmode object of TYPE out of a
2514 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2515 is null, a stack temporary is created. TGTBLK is returned.
2517 The primary purpose of this routine is to handle functions
2518 that return BLKmode structures in registers. Some machines
2519 (the PA for example) want to return all small structures
2520 in registers regardless of the structure's alignment. */
2523 copy_blkmode_from_reg (tgtblk, srcreg, type)
2524 rtx tgtblk;
2525 rtx srcreg;
2526 tree type;
2528 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2529 rtx src = NULL, dst = NULL;
2530 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2531 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2533 if (tgtblk == 0)
2535 tgtblk = assign_temp (build_qualified_type (type,
2536 (TYPE_QUALS (type)
2537 | TYPE_QUAL_CONST)),
2538 0, 1, 1);
2539 preserve_temp_slots (tgtblk);
2542 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2543 into a new pseudo which is a full word. */
2545 if (GET_MODE (srcreg) != BLKmode
2546 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2547 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2549 /* Structures whose size is not a multiple of a word are aligned
2550 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2551 machine, this means we must skip the empty high order bytes when
2552 calculating the bit offset. */
2553 if (BYTES_BIG_ENDIAN
2554 && bytes % UNITS_PER_WORD)
2555 big_endian_correction
2556 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2558 /* Copy the structure BITSIZE bites at a time.
2560 We could probably emit more efficient code for machines which do not use
2561 strict alignment, but it doesn't seem worth the effort at the current
2562 time. */
2563 for (bitpos = 0, xbitpos = big_endian_correction;
2564 bitpos < bytes * BITS_PER_UNIT;
2565 bitpos += bitsize, xbitpos += bitsize)
2567 /* We need a new source operand each time xbitpos is on a
2568 word boundary and when xbitpos == big_endian_correction
2569 (the first time through). */
2570 if (xbitpos % BITS_PER_WORD == 0
2571 || xbitpos == big_endian_correction)
2572 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2573 GET_MODE (srcreg));
2575 /* We need a new destination operand each time bitpos is on
2576 a word boundary. */
2577 if (bitpos % BITS_PER_WORD == 0)
2578 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2580 /* Use xbitpos for the source extraction (right justified) and
2581 xbitpos for the destination store (left justified). */
2582 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2583 extract_bit_field (src, bitsize,
2584 xbitpos % BITS_PER_WORD, 1,
2585 NULL_RTX, word_mode, word_mode,
2586 BITS_PER_WORD),
2587 BITS_PER_WORD);
2590 return tgtblk;
2593 /* Add a USE expression for REG to the (possibly empty) list pointed
2594 to by CALL_FUSAGE. REG must denote a hard register. */
2596 void
2597 use_reg (call_fusage, reg)
2598 rtx *call_fusage, reg;
2600 if (GET_CODE (reg) != REG
2601 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2602 abort ();
2604 *call_fusage
2605 = gen_rtx_EXPR_LIST (VOIDmode,
2606 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2609 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2610 starting at REGNO. All of these registers must be hard registers. */
2612 void
2613 use_regs (call_fusage, regno, nregs)
2614 rtx *call_fusage;
2615 int regno;
2616 int nregs;
2618 int i;
2620 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2621 abort ();
2623 for (i = 0; i < nregs; i++)
2624 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2627 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2628 PARALLEL REGS. This is for calls that pass values in multiple
2629 non-contiguous locations. The Irix 6 ABI has examples of this. */
2631 void
2632 use_group_regs (call_fusage, regs)
2633 rtx *call_fusage;
2634 rtx regs;
2636 int i;
2638 for (i = 0; i < XVECLEN (regs, 0); i++)
2640 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2642 /* A NULL entry means the parameter goes both on the stack and in
2643 registers. This can also be a MEM for targets that pass values
2644 partially on the stack and partially in registers. */
2645 if (reg != 0 && GET_CODE (reg) == REG)
2646 use_reg (call_fusage, reg);
2651 /* Determine whether the LEN bytes generated by CONSTFUN can be
2652 stored to memory using several move instructions. CONSTFUNDATA is
2653 a pointer which will be passed as argument in every CONSTFUN call.
2654 ALIGN is maximum alignment we can assume. Return nonzero if a
2655 call to store_by_pieces should succeed. */
2658 can_store_by_pieces (len, constfun, constfundata, align)
2659 unsigned HOST_WIDE_INT len;
2660 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2661 PTR constfundata;
2662 unsigned int align;
2664 unsigned HOST_WIDE_INT max_size, l;
2665 HOST_WIDE_INT offset = 0;
2666 enum machine_mode mode, tmode;
2667 enum insn_code icode;
2668 int reverse;
2669 rtx cst;
2671 if (! STORE_BY_PIECES_P (len, align))
2672 return 0;
2674 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2675 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2676 align = MOVE_MAX * BITS_PER_UNIT;
2678 /* We would first store what we can in the largest integer mode, then go to
2679 successively smaller modes. */
2681 for (reverse = 0;
2682 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2683 reverse++)
2685 l = len;
2686 mode = VOIDmode;
2687 max_size = STORE_MAX_PIECES + 1;
2688 while (max_size > 1)
2690 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2691 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2692 if (GET_MODE_SIZE (tmode) < max_size)
2693 mode = tmode;
2695 if (mode == VOIDmode)
2696 break;
2698 icode = mov_optab->handlers[(int) mode].insn_code;
2699 if (icode != CODE_FOR_nothing
2700 && align >= GET_MODE_ALIGNMENT (mode))
2702 unsigned int size = GET_MODE_SIZE (mode);
2704 while (l >= size)
2706 if (reverse)
2707 offset -= size;
2709 cst = (*constfun) (constfundata, offset, mode);
2710 if (!LEGITIMATE_CONSTANT_P (cst))
2711 return 0;
2713 if (!reverse)
2714 offset += size;
2716 l -= size;
2720 max_size = GET_MODE_SIZE (mode);
2723 /* The code above should have handled everything. */
2724 if (l != 0)
2725 abort ();
2728 return 1;
2731 /* Generate several move instructions to store LEN bytes generated by
2732 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2733 pointer which will be passed as argument in every CONSTFUN call.
2734 ALIGN is maximum alignment we can assume. */
2736 void
2737 store_by_pieces (to, len, constfun, constfundata, align)
2738 rtx to;
2739 unsigned HOST_WIDE_INT len;
2740 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2741 PTR constfundata;
2742 unsigned int align;
2744 struct store_by_pieces data;
2746 if (! STORE_BY_PIECES_P (len, align))
2747 abort ();
2748 to = protect_from_queue (to, 1);
2749 data.constfun = constfun;
2750 data.constfundata = constfundata;
2751 data.len = len;
2752 data.to = to;
2753 store_by_pieces_1 (&data, align);
2756 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2757 rtx with BLKmode). The caller must pass TO through protect_from_queue
2758 before calling. ALIGN is maximum alignment we can assume. */
2760 static void
2761 clear_by_pieces (to, len, align)
2762 rtx to;
2763 unsigned HOST_WIDE_INT len;
2764 unsigned int align;
2766 struct store_by_pieces data;
2768 data.constfun = clear_by_pieces_1;
2769 data.constfundata = NULL;
2770 data.len = len;
2771 data.to = to;
2772 store_by_pieces_1 (&data, align);
2775 /* Callback routine for clear_by_pieces.
2776 Return const0_rtx unconditionally. */
2778 static rtx
2779 clear_by_pieces_1 (data, offset, mode)
2780 PTR data ATTRIBUTE_UNUSED;
2781 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2782 enum machine_mode mode ATTRIBUTE_UNUSED;
2784 return const0_rtx;
2787 /* Subroutine of clear_by_pieces and store_by_pieces.
2788 Generate several move instructions to store LEN bytes of block TO. (A MEM
2789 rtx with BLKmode). The caller must pass TO through protect_from_queue
2790 before calling. ALIGN is maximum alignment we can assume. */
2792 static void
2793 store_by_pieces_1 (data, align)
2794 struct store_by_pieces *data;
2795 unsigned int align;
2797 rtx to_addr = XEXP (data->to, 0);
2798 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2799 enum machine_mode mode = VOIDmode, tmode;
2800 enum insn_code icode;
2802 data->offset = 0;
2803 data->to_addr = to_addr;
2804 data->autinc_to
2805 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2806 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2808 data->explicit_inc_to = 0;
2809 data->reverse
2810 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2811 if (data->reverse)
2812 data->offset = data->len;
2814 /* If storing requires more than two move insns,
2815 copy addresses to registers (to make displacements shorter)
2816 and use post-increment if available. */
2817 if (!data->autinc_to
2818 && move_by_pieces_ninsns (data->len, align) > 2)
2820 /* Determine the main mode we'll be using. */
2821 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2822 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2823 if (GET_MODE_SIZE (tmode) < max_size)
2824 mode = tmode;
2826 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2828 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2829 data->autinc_to = 1;
2830 data->explicit_inc_to = -1;
2833 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2834 && ! data->autinc_to)
2836 data->to_addr = copy_addr_to_reg (to_addr);
2837 data->autinc_to = 1;
2838 data->explicit_inc_to = 1;
2841 if ( !data->autinc_to && CONSTANT_P (to_addr))
2842 data->to_addr = copy_addr_to_reg (to_addr);
2845 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2846 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2847 align = MOVE_MAX * BITS_PER_UNIT;
2849 /* First store what we can in the largest integer mode, then go to
2850 successively smaller modes. */
2852 while (max_size > 1)
2854 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2855 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2856 if (GET_MODE_SIZE (tmode) < max_size)
2857 mode = tmode;
2859 if (mode == VOIDmode)
2860 break;
2862 icode = mov_optab->handlers[(int) mode].insn_code;
2863 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2864 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2866 max_size = GET_MODE_SIZE (mode);
2869 /* The code above should have handled everything. */
2870 if (data->len != 0)
2871 abort ();
2874 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2875 with move instructions for mode MODE. GENFUN is the gen_... function
2876 to make a move insn for that mode. DATA has all the other info. */
2878 static void
2879 store_by_pieces_2 (genfun, mode, data)
2880 rtx (*genfun) PARAMS ((rtx, ...));
2881 enum machine_mode mode;
2882 struct store_by_pieces *data;
2884 unsigned int size = GET_MODE_SIZE (mode);
2885 rtx to1, cst;
2887 while (data->len >= size)
2889 if (data->reverse)
2890 data->offset -= size;
2892 if (data->autinc_to)
2893 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2894 data->offset);
2895 else
2896 to1 = adjust_address (data->to, mode, data->offset);
2898 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2899 emit_insn (gen_add2_insn (data->to_addr,
2900 GEN_INT (-(HOST_WIDE_INT) size)));
2902 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2903 emit_insn ((*genfun) (to1, cst));
2905 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2906 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2908 if (! data->reverse)
2909 data->offset += size;
2911 data->len -= size;
2915 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2916 its length in bytes. */
2919 clear_storage (object, size)
2920 rtx object;
2921 rtx size;
2923 rtx retval = 0;
2924 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2925 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2927 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2928 just move a zero. Otherwise, do this a piece at a time. */
2929 if (GET_MODE (object) != BLKmode
2930 && GET_CODE (size) == CONST_INT
2931 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2932 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2933 else
2935 object = protect_from_queue (object, 1);
2936 size = protect_from_queue (size, 0);
2938 if (GET_CODE (size) == CONST_INT
2939 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2940 clear_by_pieces (object, INTVAL (size), align);
2941 else if (clear_storage_via_clrstr (object, size, align))
2943 else
2944 retval = clear_storage_via_libcall (object, size);
2947 return retval;
2950 /* A subroutine of clear_storage. Expand a clrstr pattern;
2951 return true if successful. */
2953 static bool
2954 clear_storage_via_clrstr (object, size, align)
2955 rtx object, size;
2956 unsigned int align;
2958 /* Try the most limited insn first, because there's no point
2959 including more than one in the machine description unless
2960 the more limited one has some advantage. */
2962 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2963 enum machine_mode mode;
2965 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2966 mode = GET_MODE_WIDER_MODE (mode))
2968 enum insn_code code = clrstr_optab[(int) mode];
2969 insn_operand_predicate_fn pred;
2971 if (code != CODE_FOR_nothing
2972 /* We don't need MODE to be narrower than
2973 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2974 the mode mask, as it is returned by the macro, it will
2975 definitely be less than the actual mode mask. */
2976 && ((GET_CODE (size) == CONST_INT
2977 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2978 <= (GET_MODE_MASK (mode) >> 1)))
2979 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2980 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2981 || (*pred) (object, BLKmode))
2982 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2983 || (*pred) (opalign, VOIDmode)))
2985 rtx op1;
2986 rtx last = get_last_insn ();
2987 rtx pat;
2989 op1 = convert_to_mode (mode, size, 1);
2990 pred = insn_data[(int) code].operand[1].predicate;
2991 if (pred != 0 && ! (*pred) (op1, mode))
2992 op1 = copy_to_mode_reg (mode, op1);
2994 pat = GEN_FCN ((int) code) (object, op1, opalign);
2995 if (pat)
2997 emit_insn (pat);
2998 return true;
3000 else
3001 delete_insns_since (last);
3005 return false;
3008 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3009 Return the return value of memset, 0 otherwise. */
3011 static rtx
3012 clear_storage_via_libcall (object, size)
3013 rtx object, size;
3015 tree call_expr, arg_list, fn, object_tree, size_tree;
3016 enum machine_mode size_mode;
3017 rtx retval;
3019 /* OBJECT or SIZE may have been passed through protect_from_queue.
3021 It is unsafe to save the value generated by protect_from_queue
3022 and reuse it later. Consider what happens if emit_queue is
3023 called before the return value from protect_from_queue is used.
3025 Expansion of the CALL_EXPR below will call emit_queue before
3026 we are finished emitting RTL for argument setup. So if we are
3027 not careful we could get the wrong value for an argument.
3029 To avoid this problem we go ahead and emit code to copy OBJECT
3030 and SIZE into new pseudos. We can then place those new pseudos
3031 into an RTL_EXPR and use them later, even after a call to
3032 emit_queue.
3034 Note this is not strictly needed for library calls since they
3035 do not call emit_queue before loading their arguments. However,
3036 we may need to have library calls call emit_queue in the future
3037 since failing to do so could cause problems for targets which
3038 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3040 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3042 if (TARGET_MEM_FUNCTIONS)
3043 size_mode = TYPE_MODE (sizetype);
3044 else
3045 size_mode = TYPE_MODE (unsigned_type_node);
3046 size = convert_to_mode (size_mode, size, 1);
3047 size = copy_to_mode_reg (size_mode, size);
3049 /* It is incorrect to use the libcall calling conventions to call
3050 memset in this context. This could be a user call to memset and
3051 the user may wish to examine the return value from memset. For
3052 targets where libcalls and normal calls have different conventions
3053 for returning pointers, we could end up generating incorrect code.
3055 For convenience, we generate the call to bzero this way as well. */
3057 object_tree = make_tree (ptr_type_node, object);
3058 if (TARGET_MEM_FUNCTIONS)
3059 size_tree = make_tree (sizetype, size);
3060 else
3061 size_tree = make_tree (unsigned_type_node, size);
3063 fn = clear_storage_libcall_fn (true);
3064 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3065 if (TARGET_MEM_FUNCTIONS)
3066 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3067 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3069 /* Now we have to build up the CALL_EXPR itself. */
3070 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3071 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3072 call_expr, arg_list, NULL_TREE);
3073 TREE_SIDE_EFFECTS (call_expr) = 1;
3075 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3077 /* If we are initializing a readonly value, show the above call
3078 clobbered it. Otherwise, a load from it may erroneously be
3079 hoisted from a loop. */
3080 if (RTX_UNCHANGING_P (object))
3081 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3083 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3086 /* A subroutine of clear_storage_via_libcall. Create the tree node
3087 for the function we use for block clears. The first time FOR_CALL
3088 is true, we call assemble_external. */
3090 static GTY(()) tree block_clear_fn;
3092 static tree
3093 clear_storage_libcall_fn (for_call)
3094 int for_call;
3096 static bool emitted_extern;
3097 tree fn = block_clear_fn, args;
3099 if (!fn)
3101 if (TARGET_MEM_FUNCTIONS)
3103 fn = get_identifier ("memset");
3104 args = build_function_type_list (ptr_type_node, ptr_type_node,
3105 integer_type_node, sizetype,
3106 NULL_TREE);
3108 else
3110 fn = get_identifier ("bzero");
3111 args = build_function_type_list (void_type_node, ptr_type_node,
3112 unsigned_type_node, NULL_TREE);
3115 fn = build_decl (FUNCTION_DECL, fn, args);
3116 DECL_EXTERNAL (fn) = 1;
3117 TREE_PUBLIC (fn) = 1;
3118 DECL_ARTIFICIAL (fn) = 1;
3119 TREE_NOTHROW (fn) = 1;
3121 block_clear_fn = fn;
3124 if (for_call && !emitted_extern)
3126 emitted_extern = true;
3127 make_decl_rtl (fn, NULL);
3128 assemble_external (fn);
3131 return fn;
3134 /* Generate code to copy Y into X.
3135 Both Y and X must have the same mode, except that
3136 Y can be a constant with VOIDmode.
3137 This mode cannot be BLKmode; use emit_block_move for that.
3139 Return the last instruction emitted. */
3142 emit_move_insn (x, y)
3143 rtx x, y;
3145 enum machine_mode mode = GET_MODE (x);
3146 rtx y_cst = NULL_RTX;
3147 rtx last_insn;
3149 x = protect_from_queue (x, 1);
3150 y = protect_from_queue (y, 0);
3152 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3153 abort ();
3155 /* Never force constant_p_rtx to memory. */
3156 if (GET_CODE (y) == CONSTANT_P_RTX)
3158 else if (CONSTANT_P (y))
3160 if (optimize
3161 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3162 && (last_insn = compress_float_constant (x, y)))
3163 return last_insn;
3165 if (!LEGITIMATE_CONSTANT_P (y))
3167 y_cst = y;
3168 y = force_const_mem (mode, y);
3170 /* If the target's cannot_force_const_mem prevented the spill,
3171 assume that the target's move expanders will also take care
3172 of the non-legitimate constant. */
3173 if (!y)
3174 y = y_cst;
3178 /* If X or Y are memory references, verify that their addresses are valid
3179 for the machine. */
3180 if (GET_CODE (x) == MEM
3181 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3182 && ! push_operand (x, GET_MODE (x)))
3183 || (flag_force_addr
3184 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3185 x = validize_mem (x);
3187 if (GET_CODE (y) == MEM
3188 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3189 || (flag_force_addr
3190 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3191 y = validize_mem (y);
3193 if (mode == BLKmode)
3194 abort ();
3196 last_insn = emit_move_insn_1 (x, y);
3198 if (y_cst && GET_CODE (x) == REG)
3199 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3201 return last_insn;
3204 /* Low level part of emit_move_insn.
3205 Called just like emit_move_insn, but assumes X and Y
3206 are basically valid. */
3209 emit_move_insn_1 (x, y)
3210 rtx x, y;
3212 enum machine_mode mode = GET_MODE (x);
3213 enum machine_mode submode;
3214 enum mode_class class = GET_MODE_CLASS (mode);
3216 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3217 abort ();
3219 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3220 return
3221 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3223 /* Expand complex moves by moving real part and imag part, if possible. */
3224 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3225 && BLKmode != (submode = GET_MODE_INNER (mode))
3226 && (mov_optab->handlers[(int) submode].insn_code
3227 != CODE_FOR_nothing))
3229 /* Don't split destination if it is a stack push. */
3230 int stack = push_operand (x, GET_MODE (x));
3232 #ifdef PUSH_ROUNDING
3233 /* In case we output to the stack, but the size is smaller machine can
3234 push exactly, we need to use move instructions. */
3235 if (stack
3236 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3237 != GET_MODE_SIZE (submode)))
3239 rtx temp;
3240 HOST_WIDE_INT offset1, offset2;
3242 /* Do not use anti_adjust_stack, since we don't want to update
3243 stack_pointer_delta. */
3244 temp = expand_binop (Pmode,
3245 #ifdef STACK_GROWS_DOWNWARD
3246 sub_optab,
3247 #else
3248 add_optab,
3249 #endif
3250 stack_pointer_rtx,
3251 GEN_INT
3252 (PUSH_ROUNDING
3253 (GET_MODE_SIZE (GET_MODE (x)))),
3254 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3256 if (temp != stack_pointer_rtx)
3257 emit_move_insn (stack_pointer_rtx, temp);
3259 #ifdef STACK_GROWS_DOWNWARD
3260 offset1 = 0;
3261 offset2 = GET_MODE_SIZE (submode);
3262 #else
3263 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3264 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3265 + GET_MODE_SIZE (submode));
3266 #endif
3268 emit_move_insn (change_address (x, submode,
3269 gen_rtx_PLUS (Pmode,
3270 stack_pointer_rtx,
3271 GEN_INT (offset1))),
3272 gen_realpart (submode, y));
3273 emit_move_insn (change_address (x, submode,
3274 gen_rtx_PLUS (Pmode,
3275 stack_pointer_rtx,
3276 GEN_INT (offset2))),
3277 gen_imagpart (submode, y));
3279 else
3280 #endif
3281 /* If this is a stack, push the highpart first, so it
3282 will be in the argument order.
3284 In that case, change_address is used only to convert
3285 the mode, not to change the address. */
3286 if (stack)
3288 /* Note that the real part always precedes the imag part in memory
3289 regardless of machine's endianness. */
3290 #ifdef STACK_GROWS_DOWNWARD
3291 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3292 (gen_rtx_MEM (submode, XEXP (x, 0)),
3293 gen_imagpart (submode, y)));
3294 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3295 (gen_rtx_MEM (submode, XEXP (x, 0)),
3296 gen_realpart (submode, y)));
3297 #else
3298 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3299 (gen_rtx_MEM (submode, XEXP (x, 0)),
3300 gen_realpart (submode, y)));
3301 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3302 (gen_rtx_MEM (submode, XEXP (x, 0)),
3303 gen_imagpart (submode, y)));
3304 #endif
3306 else
3308 rtx realpart_x, realpart_y;
3309 rtx imagpart_x, imagpart_y;
3311 /* If this is a complex value with each part being smaller than a
3312 word, the usual calling sequence will likely pack the pieces into
3313 a single register. Unfortunately, SUBREG of hard registers only
3314 deals in terms of words, so we have a problem converting input
3315 arguments to the CONCAT of two registers that is used elsewhere
3316 for complex values. If this is before reload, we can copy it into
3317 memory and reload. FIXME, we should see about using extract and
3318 insert on integer registers, but complex short and complex char
3319 variables should be rarely used. */
3320 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3321 && (reload_in_progress | reload_completed) == 0)
3323 int packed_dest_p
3324 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3325 int packed_src_p
3326 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3328 if (packed_dest_p || packed_src_p)
3330 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3331 ? MODE_FLOAT : MODE_INT);
3333 enum machine_mode reg_mode
3334 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3336 if (reg_mode != BLKmode)
3338 rtx mem = assign_stack_temp (reg_mode,
3339 GET_MODE_SIZE (mode), 0);
3340 rtx cmem = adjust_address (mem, mode, 0);
3342 cfun->cannot_inline
3343 = N_("function using short complex types cannot be inline");
3345 if (packed_dest_p)
3347 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3349 emit_move_insn_1 (cmem, y);
3350 return emit_move_insn_1 (sreg, mem);
3352 else
3354 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3356 emit_move_insn_1 (mem, sreg);
3357 return emit_move_insn_1 (x, cmem);
3363 realpart_x = gen_realpart (submode, x);
3364 realpart_y = gen_realpart (submode, y);
3365 imagpart_x = gen_imagpart (submode, x);
3366 imagpart_y = gen_imagpart (submode, y);
3368 /* Show the output dies here. This is necessary for SUBREGs
3369 of pseudos since we cannot track their lifetimes correctly;
3370 hard regs shouldn't appear here except as return values.
3371 We never want to emit such a clobber after reload. */
3372 if (x != y
3373 && ! (reload_in_progress || reload_completed)
3374 && (GET_CODE (realpart_x) == SUBREG
3375 || GET_CODE (imagpart_x) == SUBREG))
3376 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3378 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3379 (realpart_x, realpart_y));
3380 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3381 (imagpart_x, imagpart_y));
3384 return get_last_insn ();
3387 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3388 find a mode to do it in. If we have a movcc, use it. Otherwise,
3389 find the MODE_INT mode of the same width. */
3390 else if (GET_MODE_CLASS (mode) == MODE_CC
3391 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3393 enum insn_code insn_code;
3394 enum machine_mode tmode = VOIDmode;
3395 rtx x1 = x, y1 = y;
3397 if (mode != CCmode
3398 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3399 tmode = CCmode;
3400 else
3401 for (tmode = QImode; tmode != VOIDmode;
3402 tmode = GET_MODE_WIDER_MODE (tmode))
3403 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3404 break;
3406 if (tmode == VOIDmode)
3407 abort ();
3409 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3410 may call change_address which is not appropriate if we were
3411 called when a reload was in progress. We don't have to worry
3412 about changing the address since the size in bytes is supposed to
3413 be the same. Copy the MEM to change the mode and move any
3414 substitutions from the old MEM to the new one. */
3416 if (reload_in_progress)
3418 x = gen_lowpart_common (tmode, x1);
3419 if (x == 0 && GET_CODE (x1) == MEM)
3421 x = adjust_address_nv (x1, tmode, 0);
3422 copy_replacements (x1, x);
3425 y = gen_lowpart_common (tmode, y1);
3426 if (y == 0 && GET_CODE (y1) == MEM)
3428 y = adjust_address_nv (y1, tmode, 0);
3429 copy_replacements (y1, y);
3432 else
3434 x = gen_lowpart (tmode, x);
3435 y = gen_lowpart (tmode, y);
3438 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3439 return emit_insn (GEN_FCN (insn_code) (x, y));
3442 /* This will handle any multi-word or full-word mode that lacks a move_insn
3443 pattern. However, you will get better code if you define such patterns,
3444 even if they must turn into multiple assembler instructions. */
3445 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3447 rtx last_insn = 0;
3448 rtx seq, inner;
3449 int need_clobber;
3450 int i;
3452 #ifdef PUSH_ROUNDING
3454 /* If X is a push on the stack, do the push now and replace
3455 X with a reference to the stack pointer. */
3456 if (push_operand (x, GET_MODE (x)))
3458 rtx temp;
3459 enum rtx_code code;
3461 /* Do not use anti_adjust_stack, since we don't want to update
3462 stack_pointer_delta. */
3463 temp = expand_binop (Pmode,
3464 #ifdef STACK_GROWS_DOWNWARD
3465 sub_optab,
3466 #else
3467 add_optab,
3468 #endif
3469 stack_pointer_rtx,
3470 GEN_INT
3471 (PUSH_ROUNDING
3472 (GET_MODE_SIZE (GET_MODE (x)))),
3473 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3475 if (temp != stack_pointer_rtx)
3476 emit_move_insn (stack_pointer_rtx, temp);
3478 code = GET_CODE (XEXP (x, 0));
3480 /* Just hope that small offsets off SP are OK. */
3481 if (code == POST_INC)
3482 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3483 GEN_INT (-((HOST_WIDE_INT)
3484 GET_MODE_SIZE (GET_MODE (x)))));
3485 else if (code == POST_DEC)
3486 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3487 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3488 else
3489 temp = stack_pointer_rtx;
3491 x = change_address (x, VOIDmode, temp);
3493 #endif
3495 /* If we are in reload, see if either operand is a MEM whose address
3496 is scheduled for replacement. */
3497 if (reload_in_progress && GET_CODE (x) == MEM
3498 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3499 x = replace_equiv_address_nv (x, inner);
3500 if (reload_in_progress && GET_CODE (y) == MEM
3501 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3502 y = replace_equiv_address_nv (y, inner);
3504 start_sequence ();
3506 need_clobber = 0;
3507 for (i = 0;
3508 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3509 i++)
3511 rtx xpart = operand_subword (x, i, 1, mode);
3512 rtx ypart = operand_subword (y, i, 1, mode);
3514 /* If we can't get a part of Y, put Y into memory if it is a
3515 constant. Otherwise, force it into a register. If we still
3516 can't get a part of Y, abort. */
3517 if (ypart == 0 && CONSTANT_P (y))
3519 y = force_const_mem (mode, y);
3520 ypart = operand_subword (y, i, 1, mode);
3522 else if (ypart == 0)
3523 ypart = operand_subword_force (y, i, mode);
3525 if (xpart == 0 || ypart == 0)
3526 abort ();
3528 need_clobber |= (GET_CODE (xpart) == SUBREG);
3530 last_insn = emit_move_insn (xpart, ypart);
3533 seq = get_insns ();
3534 end_sequence ();
3536 /* Show the output dies here. This is necessary for SUBREGs
3537 of pseudos since we cannot track their lifetimes correctly;
3538 hard regs shouldn't appear here except as return values.
3539 We never want to emit such a clobber after reload. */
3540 if (x != y
3541 && ! (reload_in_progress || reload_completed)
3542 && need_clobber != 0)
3543 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3545 emit_insn (seq);
3547 return last_insn;
3549 else
3550 abort ();
3553 /* If Y is representable exactly in a narrower mode, and the target can
3554 perform the extension directly from constant or memory, then emit the
3555 move as an extension. */
3557 static rtx
3558 compress_float_constant (x, y)
3559 rtx x, y;
3561 enum machine_mode dstmode = GET_MODE (x);
3562 enum machine_mode orig_srcmode = GET_MODE (y);
3563 enum machine_mode srcmode;
3564 REAL_VALUE_TYPE r;
3566 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3568 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3569 srcmode != orig_srcmode;
3570 srcmode = GET_MODE_WIDER_MODE (srcmode))
3572 enum insn_code ic;
3573 rtx trunc_y, last_insn;
3575 /* Skip if the target can't extend this way. */
3576 ic = can_extend_p (dstmode, srcmode, 0);
3577 if (ic == CODE_FOR_nothing)
3578 continue;
3580 /* Skip if the narrowed value isn't exact. */
3581 if (! exact_real_truncate (srcmode, &r))
3582 continue;
3584 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3586 if (LEGITIMATE_CONSTANT_P (trunc_y))
3588 /* Skip if the target needs extra instructions to perform
3589 the extension. */
3590 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3591 continue;
3593 else if (float_extend_from_mem[dstmode][srcmode])
3594 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3595 else
3596 continue;
3598 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3599 last_insn = get_last_insn ();
3601 if (GET_CODE (x) == REG)
3602 REG_NOTES (last_insn)
3603 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3605 return last_insn;
3608 return NULL_RTX;
3611 /* Pushing data onto the stack. */
3613 /* Push a block of length SIZE (perhaps variable)
3614 and return an rtx to address the beginning of the block.
3615 Note that it is not possible for the value returned to be a QUEUED.
3616 The value may be virtual_outgoing_args_rtx.
3618 EXTRA is the number of bytes of padding to push in addition to SIZE.
3619 BELOW nonzero means this padding comes at low addresses;
3620 otherwise, the padding comes at high addresses. */
3623 push_block (size, extra, below)
3624 rtx size;
3625 int extra, below;
3627 rtx temp;
3629 size = convert_modes (Pmode, ptr_mode, size, 1);
3630 if (CONSTANT_P (size))
3631 anti_adjust_stack (plus_constant (size, extra));
3632 else if (GET_CODE (size) == REG && extra == 0)
3633 anti_adjust_stack (size);
3634 else
3636 temp = copy_to_mode_reg (Pmode, size);
3637 if (extra != 0)
3638 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3639 temp, 0, OPTAB_LIB_WIDEN);
3640 anti_adjust_stack (temp);
3643 #ifndef STACK_GROWS_DOWNWARD
3644 if (0)
3645 #else
3646 if (1)
3647 #endif
3649 temp = virtual_outgoing_args_rtx;
3650 if (extra != 0 && below)
3651 temp = plus_constant (temp, extra);
3653 else
3655 if (GET_CODE (size) == CONST_INT)
3656 temp = plus_constant (virtual_outgoing_args_rtx,
3657 -INTVAL (size) - (below ? 0 : extra));
3658 else if (extra != 0 && !below)
3659 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3660 negate_rtx (Pmode, plus_constant (size, extra)));
3661 else
3662 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3663 negate_rtx (Pmode, size));
3666 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3669 #ifdef PUSH_ROUNDING
3671 /* Emit single push insn. */
3673 static void
3674 emit_single_push_insn (mode, x, type)
3675 rtx x;
3676 enum machine_mode mode;
3677 tree type;
3679 rtx dest_addr;
3680 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3681 rtx dest;
3682 enum insn_code icode;
3683 insn_operand_predicate_fn pred;
3685 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3686 /* If there is push pattern, use it. Otherwise try old way of throwing
3687 MEM representing push operation to move expander. */
3688 icode = push_optab->handlers[(int) mode].insn_code;
3689 if (icode != CODE_FOR_nothing)
3691 if (((pred = insn_data[(int) icode].operand[0].predicate)
3692 && !((*pred) (x, mode))))
3693 x = force_reg (mode, x);
3694 emit_insn (GEN_FCN (icode) (x));
3695 return;
3697 if (GET_MODE_SIZE (mode) == rounded_size)
3698 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3699 else
3701 #ifdef STACK_GROWS_DOWNWARD
3702 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3703 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3704 #else
3705 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3706 GEN_INT (rounded_size));
3707 #endif
3708 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3711 dest = gen_rtx_MEM (mode, dest_addr);
3713 if (type != 0)
3715 set_mem_attributes (dest, type, 1);
3717 if (flag_optimize_sibling_calls)
3718 /* Function incoming arguments may overlap with sibling call
3719 outgoing arguments and we cannot allow reordering of reads
3720 from function arguments with stores to outgoing arguments
3721 of sibling calls. */
3722 set_mem_alias_set (dest, 0);
3724 emit_move_insn (dest, x);
3726 #endif
3728 /* Generate code to push X onto the stack, assuming it has mode MODE and
3729 type TYPE.
3730 MODE is redundant except when X is a CONST_INT (since they don't
3731 carry mode info).
3732 SIZE is an rtx for the size of data to be copied (in bytes),
3733 needed only if X is BLKmode.
3735 ALIGN (in bits) is maximum alignment we can assume.
3737 If PARTIAL and REG are both nonzero, then copy that many of the first
3738 words of X into registers starting with REG, and push the rest of X.
3739 The amount of space pushed is decreased by PARTIAL words,
3740 rounded *down* to a multiple of PARM_BOUNDARY.
3741 REG must be a hard register in this case.
3742 If REG is zero but PARTIAL is not, take any all others actions for an
3743 argument partially in registers, but do not actually load any
3744 registers.
3746 EXTRA is the amount in bytes of extra space to leave next to this arg.
3747 This is ignored if an argument block has already been allocated.
3749 On a machine that lacks real push insns, ARGS_ADDR is the address of
3750 the bottom of the argument block for this call. We use indexing off there
3751 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3752 argument block has not been preallocated.
3754 ARGS_SO_FAR is the size of args previously pushed for this call.
3756 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3757 for arguments passed in registers. If nonzero, it will be the number
3758 of bytes required. */
3760 void
3761 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3762 args_addr, args_so_far, reg_parm_stack_space,
3763 alignment_pad)
3764 rtx x;
3765 enum machine_mode mode;
3766 tree type;
3767 rtx size;
3768 unsigned int align;
3769 int partial;
3770 rtx reg;
3771 int extra;
3772 rtx args_addr;
3773 rtx args_so_far;
3774 int reg_parm_stack_space;
3775 rtx alignment_pad;
3777 rtx xinner;
3778 enum direction stack_direction
3779 #ifdef STACK_GROWS_DOWNWARD
3780 = downward;
3781 #else
3782 = upward;
3783 #endif
3785 /* Decide where to pad the argument: `downward' for below,
3786 `upward' for above, or `none' for don't pad it.
3787 Default is below for small data on big-endian machines; else above. */
3788 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3790 /* Invert direction if stack is post-decrement.
3791 FIXME: why? */
3792 if (STACK_PUSH_CODE == POST_DEC)
3793 if (where_pad != none)
3794 where_pad = (where_pad == downward ? upward : downward);
3796 xinner = x = protect_from_queue (x, 0);
3798 if (mode == BLKmode)
3800 /* Copy a block into the stack, entirely or partially. */
3802 rtx temp;
3803 int used = partial * UNITS_PER_WORD;
3804 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3805 int skip;
3807 if (size == 0)
3808 abort ();
3810 used -= offset;
3812 /* USED is now the # of bytes we need not copy to the stack
3813 because registers will take care of them. */
3815 if (partial != 0)
3816 xinner = adjust_address (xinner, BLKmode, used);
3818 /* If the partial register-part of the arg counts in its stack size,
3819 skip the part of stack space corresponding to the registers.
3820 Otherwise, start copying to the beginning of the stack space,
3821 by setting SKIP to 0. */
3822 skip = (reg_parm_stack_space == 0) ? 0 : used;
3824 #ifdef PUSH_ROUNDING
3825 /* Do it with several push insns if that doesn't take lots of insns
3826 and if there is no difficulty with push insns that skip bytes
3827 on the stack for alignment purposes. */
3828 if (args_addr == 0
3829 && PUSH_ARGS
3830 && GET_CODE (size) == CONST_INT
3831 && skip == 0
3832 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3833 /* Here we avoid the case of a structure whose weak alignment
3834 forces many pushes of a small amount of data,
3835 and such small pushes do rounding that causes trouble. */
3836 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3837 || align >= BIGGEST_ALIGNMENT
3838 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3839 == (align / BITS_PER_UNIT)))
3840 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3842 /* Push padding now if padding above and stack grows down,
3843 or if padding below and stack grows up.
3844 But if space already allocated, this has already been done. */
3845 if (extra && args_addr == 0
3846 && where_pad != none && where_pad != stack_direction)
3847 anti_adjust_stack (GEN_INT (extra));
3849 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3851 else
3852 #endif /* PUSH_ROUNDING */
3854 rtx target;
3856 /* Otherwise make space on the stack and copy the data
3857 to the address of that space. */
3859 /* Deduct words put into registers from the size we must copy. */
3860 if (partial != 0)
3862 if (GET_CODE (size) == CONST_INT)
3863 size = GEN_INT (INTVAL (size) - used);
3864 else
3865 size = expand_binop (GET_MODE (size), sub_optab, size,
3866 GEN_INT (used), NULL_RTX, 0,
3867 OPTAB_LIB_WIDEN);
3870 /* Get the address of the stack space.
3871 In this case, we do not deal with EXTRA separately.
3872 A single stack adjust will do. */
3873 if (! args_addr)
3875 temp = push_block (size, extra, where_pad == downward);
3876 extra = 0;
3878 else if (GET_CODE (args_so_far) == CONST_INT)
3879 temp = memory_address (BLKmode,
3880 plus_constant (args_addr,
3881 skip + INTVAL (args_so_far)));
3882 else
3883 temp = memory_address (BLKmode,
3884 plus_constant (gen_rtx_PLUS (Pmode,
3885 args_addr,
3886 args_so_far),
3887 skip));
3889 if (!ACCUMULATE_OUTGOING_ARGS)
3891 /* If the source is referenced relative to the stack pointer,
3892 copy it to another register to stabilize it. We do not need
3893 to do this if we know that we won't be changing sp. */
3895 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3896 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3897 temp = copy_to_reg (temp);
3900 target = gen_rtx_MEM (BLKmode, temp);
3902 if (type != 0)
3904 set_mem_attributes (target, type, 1);
3905 /* Function incoming arguments may overlap with sibling call
3906 outgoing arguments and we cannot allow reordering of reads
3907 from function arguments with stores to outgoing arguments
3908 of sibling calls. */
3909 set_mem_alias_set (target, 0);
3912 /* ALIGN may well be better aligned than TYPE, e.g. due to
3913 PARM_BOUNDARY. Assume the caller isn't lying. */
3914 set_mem_align (target, align);
3916 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3919 else if (partial > 0)
3921 /* Scalar partly in registers. */
3923 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3924 int i;
3925 int not_stack;
3926 /* # words of start of argument
3927 that we must make space for but need not store. */
3928 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3929 int args_offset = INTVAL (args_so_far);
3930 int skip;
3932 /* Push padding now if padding above and stack grows down,
3933 or if padding below and stack grows up.
3934 But if space already allocated, this has already been done. */
3935 if (extra && args_addr == 0
3936 && where_pad != none && where_pad != stack_direction)
3937 anti_adjust_stack (GEN_INT (extra));
3939 /* If we make space by pushing it, we might as well push
3940 the real data. Otherwise, we can leave OFFSET nonzero
3941 and leave the space uninitialized. */
3942 if (args_addr == 0)
3943 offset = 0;
3945 /* Now NOT_STACK gets the number of words that we don't need to
3946 allocate on the stack. */
3947 not_stack = partial - offset;
3949 /* If the partial register-part of the arg counts in its stack size,
3950 skip the part of stack space corresponding to the registers.
3951 Otherwise, start copying to the beginning of the stack space,
3952 by setting SKIP to 0. */
3953 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3955 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3956 x = validize_mem (force_const_mem (mode, x));
3958 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3959 SUBREGs of such registers are not allowed. */
3960 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3961 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3962 x = copy_to_reg (x);
3964 /* Loop over all the words allocated on the stack for this arg. */
3965 /* We can do it by words, because any scalar bigger than a word
3966 has a size a multiple of a word. */
3967 #ifndef PUSH_ARGS_REVERSED
3968 for (i = not_stack; i < size; i++)
3969 #else
3970 for (i = size - 1; i >= not_stack; i--)
3971 #endif
3972 if (i >= not_stack + offset)
3973 emit_push_insn (operand_subword_force (x, i, mode),
3974 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3975 0, args_addr,
3976 GEN_INT (args_offset + ((i - not_stack + skip)
3977 * UNITS_PER_WORD)),
3978 reg_parm_stack_space, alignment_pad);
3980 else
3982 rtx addr;
3983 rtx dest;
3985 /* Push padding now if padding above and stack grows down,
3986 or if padding below and stack grows up.
3987 But if space already allocated, this has already been done. */
3988 if (extra && args_addr == 0
3989 && where_pad != none && where_pad != stack_direction)
3990 anti_adjust_stack (GEN_INT (extra));
3992 #ifdef PUSH_ROUNDING
3993 if (args_addr == 0 && PUSH_ARGS)
3994 emit_single_push_insn (mode, x, type);
3995 else
3996 #endif
3998 if (GET_CODE (args_so_far) == CONST_INT)
3999 addr
4000 = memory_address (mode,
4001 plus_constant (args_addr,
4002 INTVAL (args_so_far)));
4003 else
4004 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4005 args_so_far));
4006 dest = gen_rtx_MEM (mode, addr);
4007 if (type != 0)
4009 set_mem_attributes (dest, type, 1);
4010 /* Function incoming arguments may overlap with sibling call
4011 outgoing arguments and we cannot allow reordering of reads
4012 from function arguments with stores to outgoing arguments
4013 of sibling calls. */
4014 set_mem_alias_set (dest, 0);
4017 emit_move_insn (dest, x);
4021 /* If part should go in registers, copy that part
4022 into the appropriate registers. Do this now, at the end,
4023 since mem-to-mem copies above may do function calls. */
4024 if (partial > 0 && reg != 0)
4026 /* Handle calls that pass values in multiple non-contiguous locations.
4027 The Irix 6 ABI has examples of this. */
4028 if (GET_CODE (reg) == PARALLEL)
4029 emit_group_load (reg, x, -1); /* ??? size? */
4030 else
4031 move_block_to_reg (REGNO (reg), x, partial, mode);
4034 if (extra && args_addr == 0 && where_pad == stack_direction)
4035 anti_adjust_stack (GEN_INT (extra));
4037 if (alignment_pad && args_addr == 0)
4038 anti_adjust_stack (alignment_pad);
4041 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4042 operations. */
4044 static rtx
4045 get_subtarget (x)
4046 rtx x;
4048 return ((x == 0
4049 /* Only registers can be subtargets. */
4050 || GET_CODE (x) != REG
4051 /* If the register is readonly, it can't be set more than once. */
4052 || RTX_UNCHANGING_P (x)
4053 /* Don't use hard regs to avoid extending their life. */
4054 || REGNO (x) < FIRST_PSEUDO_REGISTER
4055 /* Avoid subtargets inside loops,
4056 since they hide some invariant expressions. */
4057 || preserve_subexpressions_p ())
4058 ? 0 : x);
4061 /* Expand an assignment that stores the value of FROM into TO.
4062 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4063 (This may contain a QUEUED rtx;
4064 if the value is constant, this rtx is a constant.)
4065 Otherwise, the returned value is NULL_RTX.
4067 SUGGEST_REG is no longer actually used.
4068 It used to mean, copy the value through a register
4069 and return that register, if that is possible.
4070 We now use WANT_VALUE to decide whether to do this. */
4073 expand_assignment (to, from, want_value, suggest_reg)
4074 tree to, from;
4075 int want_value;
4076 int suggest_reg ATTRIBUTE_UNUSED;
4078 rtx to_rtx = 0;
4079 rtx result;
4081 /* Don't crash if the lhs of the assignment was erroneous. */
4083 if (TREE_CODE (to) == ERROR_MARK)
4085 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4086 return want_value ? result : NULL_RTX;
4089 /* Assignment of a structure component needs special treatment
4090 if the structure component's rtx is not simply a MEM.
4091 Assignment of an array element at a constant index, and assignment of
4092 an array element in an unaligned packed structure field, has the same
4093 problem. */
4095 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4096 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4097 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4099 enum machine_mode mode1;
4100 HOST_WIDE_INT bitsize, bitpos;
4101 rtx orig_to_rtx;
4102 tree offset;
4103 int unsignedp;
4104 int volatilep = 0;
4105 tree tem;
4107 push_temp_slots ();
4108 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4109 &unsignedp, &volatilep);
4111 /* If we are going to use store_bit_field and extract_bit_field,
4112 make sure to_rtx will be safe for multiple use. */
4114 if (mode1 == VOIDmode && want_value)
4115 tem = stabilize_reference (tem);
4117 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4119 if (offset != 0)
4121 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4123 if (GET_CODE (to_rtx) != MEM)
4124 abort ();
4126 #ifdef POINTERS_EXTEND_UNSIGNED
4127 if (GET_MODE (offset_rtx) != Pmode)
4128 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4129 #else
4130 if (GET_MODE (offset_rtx) != ptr_mode)
4131 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4132 #endif
4134 /* A constant address in TO_RTX can have VOIDmode, we must not try
4135 to call force_reg for that case. Avoid that case. */
4136 if (GET_CODE (to_rtx) == MEM
4137 && GET_MODE (to_rtx) == BLKmode
4138 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4139 && bitsize > 0
4140 && (bitpos % bitsize) == 0
4141 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4142 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4144 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4145 bitpos = 0;
4148 to_rtx = offset_address (to_rtx, offset_rtx,
4149 highest_pow2_factor_for_type (TREE_TYPE (to),
4150 offset));
4153 if (GET_CODE (to_rtx) == MEM)
4155 /* If the field is at offset zero, we could have been given the
4156 DECL_RTX of the parent struct. Don't munge it. */
4157 to_rtx = shallow_copy_rtx (to_rtx);
4159 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4162 /* Deal with volatile and readonly fields. The former is only done
4163 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4164 if (volatilep && GET_CODE (to_rtx) == MEM)
4166 if (to_rtx == orig_to_rtx)
4167 to_rtx = copy_rtx (to_rtx);
4168 MEM_VOLATILE_P (to_rtx) = 1;
4171 if (TREE_CODE (to) == COMPONENT_REF
4172 && TREE_READONLY (TREE_OPERAND (to, 1)))
4174 if (to_rtx == orig_to_rtx)
4175 to_rtx = copy_rtx (to_rtx);
4176 RTX_UNCHANGING_P (to_rtx) = 1;
4179 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4181 if (to_rtx == orig_to_rtx)
4182 to_rtx = copy_rtx (to_rtx);
4183 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4186 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4187 (want_value
4188 /* Spurious cast for HPUX compiler. */
4189 ? ((enum machine_mode)
4190 TYPE_MODE (TREE_TYPE (to)))
4191 : VOIDmode),
4192 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4194 preserve_temp_slots (result);
4195 free_temp_slots ();
4196 pop_temp_slots ();
4198 /* If the value is meaningful, convert RESULT to the proper mode.
4199 Otherwise, return nothing. */
4200 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4201 TYPE_MODE (TREE_TYPE (from)),
4202 result,
4203 TREE_UNSIGNED (TREE_TYPE (to)))
4204 : NULL_RTX);
4207 /* If the rhs is a function call and its value is not an aggregate,
4208 call the function before we start to compute the lhs.
4209 This is needed for correct code for cases such as
4210 val = setjmp (buf) on machines where reference to val
4211 requires loading up part of an address in a separate insn.
4213 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4214 since it might be a promoted variable where the zero- or sign- extension
4215 needs to be done. Handling this in the normal way is safe because no
4216 computation is done before the call. */
4217 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4218 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4219 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4220 && GET_CODE (DECL_RTL (to)) == REG))
4222 rtx value;
4224 push_temp_slots ();
4225 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4226 if (to_rtx == 0)
4227 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4229 /* Handle calls that return values in multiple non-contiguous locations.
4230 The Irix 6 ABI has examples of this. */
4231 if (GET_CODE (to_rtx) == PARALLEL)
4232 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4233 else if (GET_MODE (to_rtx) == BLKmode)
4234 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4235 else
4237 #ifdef POINTERS_EXTEND_UNSIGNED
4238 if (POINTER_TYPE_P (TREE_TYPE (to))
4239 && GET_MODE (to_rtx) != GET_MODE (value))
4240 value = convert_memory_address (GET_MODE (to_rtx), value);
4241 #endif
4242 emit_move_insn (to_rtx, value);
4244 preserve_temp_slots (to_rtx);
4245 free_temp_slots ();
4246 pop_temp_slots ();
4247 return want_value ? to_rtx : NULL_RTX;
4250 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4251 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4253 if (to_rtx == 0)
4254 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4256 /* Don't move directly into a return register. */
4257 if (TREE_CODE (to) == RESULT_DECL
4258 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4260 rtx temp;
4262 push_temp_slots ();
4263 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4265 if (GET_CODE (to_rtx) == PARALLEL)
4266 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4267 else
4268 emit_move_insn (to_rtx, temp);
4270 preserve_temp_slots (to_rtx);
4271 free_temp_slots ();
4272 pop_temp_slots ();
4273 return want_value ? to_rtx : NULL_RTX;
4276 /* In case we are returning the contents of an object which overlaps
4277 the place the value is being stored, use a safe function when copying
4278 a value through a pointer into a structure value return block. */
4279 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4280 && current_function_returns_struct
4281 && !current_function_returns_pcc_struct)
4283 rtx from_rtx, size;
4285 push_temp_slots ();
4286 size = expr_size (from);
4287 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4289 if (TARGET_MEM_FUNCTIONS)
4290 emit_library_call (memmove_libfunc, LCT_NORMAL,
4291 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4292 XEXP (from_rtx, 0), Pmode,
4293 convert_to_mode (TYPE_MODE (sizetype),
4294 size, TREE_UNSIGNED (sizetype)),
4295 TYPE_MODE (sizetype));
4296 else
4297 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4298 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4299 XEXP (to_rtx, 0), Pmode,
4300 convert_to_mode (TYPE_MODE (integer_type_node),
4301 size,
4302 TREE_UNSIGNED (integer_type_node)),
4303 TYPE_MODE (integer_type_node));
4305 preserve_temp_slots (to_rtx);
4306 free_temp_slots ();
4307 pop_temp_slots ();
4308 return want_value ? to_rtx : NULL_RTX;
4311 /* Compute FROM and store the value in the rtx we got. */
4313 push_temp_slots ();
4314 result = store_expr (from, to_rtx, want_value);
4315 preserve_temp_slots (result);
4316 free_temp_slots ();
4317 pop_temp_slots ();
4318 return want_value ? result : NULL_RTX;
4321 /* Generate code for computing expression EXP,
4322 and storing the value into TARGET.
4323 TARGET may contain a QUEUED rtx.
4325 If WANT_VALUE & 1 is nonzero, return a copy of the value
4326 not in TARGET, so that we can be sure to use the proper
4327 value in a containing expression even if TARGET has something
4328 else stored in it. If possible, we copy the value through a pseudo
4329 and return that pseudo. Or, if the value is constant, we try to
4330 return the constant. In some cases, we return a pseudo
4331 copied *from* TARGET.
4333 If the mode is BLKmode then we may return TARGET itself.
4334 It turns out that in BLKmode it doesn't cause a problem.
4335 because C has no operators that could combine two different
4336 assignments into the same BLKmode object with different values
4337 with no sequence point. Will other languages need this to
4338 be more thorough?
4340 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4341 to catch quickly any cases where the caller uses the value
4342 and fails to set WANT_VALUE.
4344 If WANT_VALUE & 2 is set, this is a store into a call param on the
4345 stack, and block moves may need to be treated specially. */
4348 store_expr (exp, target, want_value)
4349 tree exp;
4350 rtx target;
4351 int want_value;
4353 rtx temp;
4354 int dont_return_target = 0;
4355 int dont_store_target = 0;
4357 if (TREE_CODE (exp) == COMPOUND_EXPR)
4359 /* Perform first part of compound expression, then assign from second
4360 part. */
4361 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4362 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4363 emit_queue ();
4364 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4366 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4368 /* For conditional expression, get safe form of the target. Then
4369 test the condition, doing the appropriate assignment on either
4370 side. This avoids the creation of unnecessary temporaries.
4371 For non-BLKmode, it is more efficient not to do this. */
4373 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4375 emit_queue ();
4376 target = protect_from_queue (target, 1);
4378 do_pending_stack_adjust ();
4379 NO_DEFER_POP;
4380 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4381 start_cleanup_deferral ();
4382 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4383 end_cleanup_deferral ();
4384 emit_queue ();
4385 emit_jump_insn (gen_jump (lab2));
4386 emit_barrier ();
4387 emit_label (lab1);
4388 start_cleanup_deferral ();
4389 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4390 end_cleanup_deferral ();
4391 emit_queue ();
4392 emit_label (lab2);
4393 OK_DEFER_POP;
4395 return want_value & 1 ? target : NULL_RTX;
4397 else if (queued_subexp_p (target))
4398 /* If target contains a postincrement, let's not risk
4399 using it as the place to generate the rhs. */
4401 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4403 /* Expand EXP into a new pseudo. */
4404 temp = gen_reg_rtx (GET_MODE (target));
4405 temp = expand_expr (exp, temp, GET_MODE (target),
4406 (want_value & 2
4407 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4409 else
4410 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4411 (want_value & 2
4412 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4414 /* If target is volatile, ANSI requires accessing the value
4415 *from* the target, if it is accessed. So make that happen.
4416 In no case return the target itself. */
4417 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4418 dont_return_target = 1;
4420 else if ((want_value & 1) != 0
4421 && GET_CODE (target) == MEM
4422 && ! MEM_VOLATILE_P (target)
4423 && GET_MODE (target) != BLKmode)
4424 /* If target is in memory and caller wants value in a register instead,
4425 arrange that. Pass TARGET as target for expand_expr so that,
4426 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4427 We know expand_expr will not use the target in that case.
4428 Don't do this if TARGET is volatile because we are supposed
4429 to write it and then read it. */
4431 temp = expand_expr (exp, target, GET_MODE (target),
4432 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4433 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4435 /* If TEMP is already in the desired TARGET, only copy it from
4436 memory and don't store it there again. */
4437 if (temp == target
4438 || (rtx_equal_p (temp, target)
4439 && ! side_effects_p (temp) && ! side_effects_p (target)))
4440 dont_store_target = 1;
4441 temp = copy_to_reg (temp);
4443 dont_return_target = 1;
4445 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4446 /* If this is a scalar in a register that is stored in a wider mode
4447 than the declared mode, compute the result into its declared mode
4448 and then convert to the wider mode. Our value is the computed
4449 expression. */
4451 rtx inner_target = 0;
4453 /* If we don't want a value, we can do the conversion inside EXP,
4454 which will often result in some optimizations. Do the conversion
4455 in two steps: first change the signedness, if needed, then
4456 the extend. But don't do this if the type of EXP is a subtype
4457 of something else since then the conversion might involve
4458 more than just converting modes. */
4459 if ((want_value & 1) == 0
4460 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4461 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4463 if (TREE_UNSIGNED (TREE_TYPE (exp))
4464 != SUBREG_PROMOTED_UNSIGNED_P (target))
4465 exp = convert
4466 ((*lang_hooks.types.signed_or_unsigned_type)
4467 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4469 exp = convert ((*lang_hooks.types.type_for_mode)
4470 (GET_MODE (SUBREG_REG (target)),
4471 SUBREG_PROMOTED_UNSIGNED_P (target)),
4472 exp);
4474 inner_target = SUBREG_REG (target);
4477 temp = expand_expr (exp, inner_target, VOIDmode,
4478 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4480 /* If TEMP is a MEM and we want a result value, make the access
4481 now so it gets done only once. Strictly speaking, this is
4482 only necessary if the MEM is volatile, or if the address
4483 overlaps TARGET. But not performing the load twice also
4484 reduces the amount of rtl we generate and then have to CSE. */
4485 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4486 temp = copy_to_reg (temp);
4488 /* If TEMP is a VOIDmode constant, use convert_modes to make
4489 sure that we properly convert it. */
4490 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4492 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4493 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4494 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4495 GET_MODE (target), temp,
4496 SUBREG_PROMOTED_UNSIGNED_P (target));
4499 convert_move (SUBREG_REG (target), temp,
4500 SUBREG_PROMOTED_UNSIGNED_P (target));
4502 /* If we promoted a constant, change the mode back down to match
4503 target. Otherwise, the caller might get confused by a result whose
4504 mode is larger than expected. */
4506 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4508 if (GET_MODE (temp) != VOIDmode)
4510 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4511 SUBREG_PROMOTED_VAR_P (temp) = 1;
4512 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4513 SUBREG_PROMOTED_UNSIGNED_P (target));
4515 else
4516 temp = convert_modes (GET_MODE (target),
4517 GET_MODE (SUBREG_REG (target)),
4518 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4521 return want_value & 1 ? temp : NULL_RTX;
4523 else
4525 temp = expand_expr (exp, target, GET_MODE (target),
4526 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4527 /* Return TARGET if it's a specified hardware register.
4528 If TARGET is a volatile mem ref, either return TARGET
4529 or return a reg copied *from* TARGET; ANSI requires this.
4531 Otherwise, if TEMP is not TARGET, return TEMP
4532 if it is constant (for efficiency),
4533 or if we really want the correct value. */
4534 if (!(target && GET_CODE (target) == REG
4535 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4536 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4537 && ! rtx_equal_p (temp, target)
4538 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4539 dont_return_target = 1;
4542 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4543 the same as that of TARGET, adjust the constant. This is needed, for
4544 example, in case it is a CONST_DOUBLE and we want only a word-sized
4545 value. */
4546 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4547 && TREE_CODE (exp) != ERROR_MARK
4548 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4549 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4550 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4552 /* If value was not generated in the target, store it there.
4553 Convert the value to TARGET's type first if necessary.
4554 If TEMP and TARGET compare equal according to rtx_equal_p, but
4555 one or both of them are volatile memory refs, we have to distinguish
4556 two cases:
4557 - expand_expr has used TARGET. In this case, we must not generate
4558 another copy. This can be detected by TARGET being equal according
4559 to == .
4560 - expand_expr has not used TARGET - that means that the source just
4561 happens to have the same RTX form. Since temp will have been created
4562 by expand_expr, it will compare unequal according to == .
4563 We must generate a copy in this case, to reach the correct number
4564 of volatile memory references. */
4566 if ((! rtx_equal_p (temp, target)
4567 || (temp != target && (side_effects_p (temp)
4568 || side_effects_p (target))))
4569 && TREE_CODE (exp) != ERROR_MARK
4570 && ! dont_store_target
4571 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4572 but TARGET is not valid memory reference, TEMP will differ
4573 from TARGET although it is really the same location. */
4574 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4575 || target != DECL_RTL_IF_SET (exp))
4576 /* If there's nothing to copy, don't bother. Don't call expr_size
4577 unless necessary, because some front-ends (C++) expr_size-hook
4578 aborts on objects that are not supposed to be bit-copied or
4579 bit-initialized. */
4580 && expr_size (exp) != const0_rtx)
4582 target = protect_from_queue (target, 1);
4583 if (GET_MODE (temp) != GET_MODE (target)
4584 && GET_MODE (temp) != VOIDmode)
4586 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4587 if (dont_return_target)
4589 /* In this case, we will return TEMP,
4590 so make sure it has the proper mode.
4591 But don't forget to store the value into TARGET. */
4592 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4593 emit_move_insn (target, temp);
4595 else
4596 convert_move (target, temp, unsignedp);
4599 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4601 /* Handle copying a string constant into an array. The string
4602 constant may be shorter than the array. So copy just the string's
4603 actual length, and clear the rest. First get the size of the data
4604 type of the string, which is actually the size of the target. */
4605 rtx size = expr_size (exp);
4607 if (GET_CODE (size) == CONST_INT
4608 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4609 emit_block_move (target, temp, size,
4610 (want_value & 2
4611 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4612 else
4614 /* Compute the size of the data to copy from the string. */
4615 tree copy_size
4616 = size_binop (MIN_EXPR,
4617 make_tree (sizetype, size),
4618 size_int (TREE_STRING_LENGTH (exp)));
4619 rtx copy_size_rtx
4620 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4621 (want_value & 2
4622 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4623 rtx label = 0;
4625 /* Copy that much. */
4626 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4627 emit_block_move (target, temp, copy_size_rtx,
4628 (want_value & 2
4629 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4631 /* Figure out how much is left in TARGET that we have to clear.
4632 Do all calculations in ptr_mode. */
4633 if (GET_CODE (copy_size_rtx) == CONST_INT)
4635 size = plus_constant (size, -INTVAL (copy_size_rtx));
4636 target = adjust_address (target, BLKmode,
4637 INTVAL (copy_size_rtx));
4639 else
4641 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4642 copy_size_rtx, NULL_RTX, 0,
4643 OPTAB_LIB_WIDEN);
4645 #ifdef POINTERS_EXTEND_UNSIGNED
4646 if (GET_MODE (copy_size_rtx) != Pmode)
4647 copy_size_rtx = convert_memory_address (Pmode,
4648 copy_size_rtx);
4649 #endif
4651 target = offset_address (target, copy_size_rtx,
4652 highest_pow2_factor (copy_size));
4653 label = gen_label_rtx ();
4654 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4655 GET_MODE (size), 0, label);
4658 if (size != const0_rtx)
4659 clear_storage (target, size);
4661 if (label)
4662 emit_label (label);
4665 /* Handle calls that return values in multiple non-contiguous locations.
4666 The Irix 6 ABI has examples of this. */
4667 else if (GET_CODE (target) == PARALLEL)
4668 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4669 else if (GET_MODE (temp) == BLKmode)
4670 emit_block_move (target, temp, expr_size (exp),
4671 (want_value & 2
4672 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4673 else
4674 emit_move_insn (target, temp);
4677 /* If we don't want a value, return NULL_RTX. */
4678 if ((want_value & 1) == 0)
4679 return NULL_RTX;
4681 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4682 ??? The latter test doesn't seem to make sense. */
4683 else if (dont_return_target && GET_CODE (temp) != MEM)
4684 return temp;
4686 /* Return TARGET itself if it is a hard register. */
4687 else if ((want_value & 1) != 0
4688 && GET_MODE (target) != BLKmode
4689 && ! (GET_CODE (target) == REG
4690 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4691 return copy_to_reg (target);
4693 else
4694 return target;
4697 /* Return 1 if EXP just contains zeros. */
4699 static int
4700 is_zeros_p (exp)
4701 tree exp;
4703 tree elt;
4705 switch (TREE_CODE (exp))
4707 case CONVERT_EXPR:
4708 case NOP_EXPR:
4709 case NON_LVALUE_EXPR:
4710 case VIEW_CONVERT_EXPR:
4711 return is_zeros_p (TREE_OPERAND (exp, 0));
4713 case INTEGER_CST:
4714 return integer_zerop (exp);
4716 case COMPLEX_CST:
4717 return
4718 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4720 case REAL_CST:
4721 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4723 case VECTOR_CST:
4724 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4725 elt = TREE_CHAIN (elt))
4726 if (!is_zeros_p (TREE_VALUE (elt)))
4727 return 0;
4729 return 1;
4731 case CONSTRUCTOR:
4732 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4733 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4734 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4735 if (! is_zeros_p (TREE_VALUE (elt)))
4736 return 0;
4738 return 1;
4740 default:
4741 return 0;
4745 /* Return 1 if EXP contains mostly (3/4) zeros. */
4747 static int
4748 mostly_zeros_p (exp)
4749 tree exp;
4751 if (TREE_CODE (exp) == CONSTRUCTOR)
4753 int elts = 0, zeros = 0;
4754 tree elt = CONSTRUCTOR_ELTS (exp);
4755 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4757 /* If there are no ranges of true bits, it is all zero. */
4758 return elt == NULL_TREE;
4760 for (; elt; elt = TREE_CHAIN (elt))
4762 /* We do not handle the case where the index is a RANGE_EXPR,
4763 so the statistic will be somewhat inaccurate.
4764 We do make a more accurate count in store_constructor itself,
4765 so since this function is only used for nested array elements,
4766 this should be close enough. */
4767 if (mostly_zeros_p (TREE_VALUE (elt)))
4768 zeros++;
4769 elts++;
4772 return 4 * zeros >= 3 * elts;
4775 return is_zeros_p (exp);
4778 /* Helper function for store_constructor.
4779 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4780 TYPE is the type of the CONSTRUCTOR, not the element type.
4781 CLEARED is as for store_constructor.
4782 ALIAS_SET is the alias set to use for any stores.
4784 This provides a recursive shortcut back to store_constructor when it isn't
4785 necessary to go through store_field. This is so that we can pass through
4786 the cleared field to let store_constructor know that we may not have to
4787 clear a substructure if the outer structure has already been cleared. */
4789 static void
4790 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4791 alias_set)
4792 rtx target;
4793 unsigned HOST_WIDE_INT bitsize;
4794 HOST_WIDE_INT bitpos;
4795 enum machine_mode mode;
4796 tree exp, type;
4797 int cleared;
4798 int alias_set;
4800 if (TREE_CODE (exp) == CONSTRUCTOR
4801 && bitpos % BITS_PER_UNIT == 0
4802 /* If we have a nonzero bitpos for a register target, then we just
4803 let store_field do the bitfield handling. This is unlikely to
4804 generate unnecessary clear instructions anyways. */
4805 && (bitpos == 0 || GET_CODE (target) == MEM))
4807 if (GET_CODE (target) == MEM)
4808 target
4809 = adjust_address (target,
4810 GET_MODE (target) == BLKmode
4811 || 0 != (bitpos
4812 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4813 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4816 /* Update the alias set, if required. */
4817 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4818 && MEM_ALIAS_SET (target) != 0)
4820 target = copy_rtx (target);
4821 set_mem_alias_set (target, alias_set);
4824 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4826 else
4827 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4828 alias_set);
4831 /* Store the value of constructor EXP into the rtx TARGET.
4832 TARGET is either a REG or a MEM; we know it cannot conflict, since
4833 safe_from_p has been called.
4834 CLEARED is true if TARGET is known to have been zero'd.
4835 SIZE is the number of bytes of TARGET we are allowed to modify: this
4836 may not be the same as the size of EXP if we are assigning to a field
4837 which has been packed to exclude padding bits. */
4839 static void
4840 store_constructor (exp, target, cleared, size)
4841 tree exp;
4842 rtx target;
4843 int cleared;
4844 HOST_WIDE_INT size;
4846 tree type = TREE_TYPE (exp);
4847 #ifdef WORD_REGISTER_OPERATIONS
4848 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4849 #endif
4851 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4852 || TREE_CODE (type) == QUAL_UNION_TYPE)
4854 tree elt;
4856 /* We either clear the aggregate or indicate the value is dead. */
4857 if ((TREE_CODE (type) == UNION_TYPE
4858 || TREE_CODE (type) == QUAL_UNION_TYPE)
4859 && ! cleared
4860 && ! CONSTRUCTOR_ELTS (exp))
4861 /* If the constructor is empty, clear the union. */
4863 clear_storage (target, expr_size (exp));
4864 cleared = 1;
4867 /* If we are building a static constructor into a register,
4868 set the initial value as zero so we can fold the value into
4869 a constant. But if more than one register is involved,
4870 this probably loses. */
4871 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4872 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4874 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4875 cleared = 1;
4878 /* If the constructor has fewer fields than the structure
4879 or if we are initializing the structure to mostly zeros,
4880 clear the whole structure first. Don't do this if TARGET is a
4881 register whose mode size isn't equal to SIZE since clear_storage
4882 can't handle this case. */
4883 else if (! cleared && size > 0
4884 && ((list_length (CONSTRUCTOR_ELTS (exp))
4885 != fields_length (type))
4886 || mostly_zeros_p (exp))
4887 && (GET_CODE (target) != REG
4888 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4889 == size)))
4891 clear_storage (target, GEN_INT (size));
4892 cleared = 1;
4895 if (! cleared)
4896 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4898 /* Store each element of the constructor into
4899 the corresponding field of TARGET. */
4901 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4903 tree field = TREE_PURPOSE (elt);
4904 tree value = TREE_VALUE (elt);
4905 enum machine_mode mode;
4906 HOST_WIDE_INT bitsize;
4907 HOST_WIDE_INT bitpos = 0;
4908 tree offset;
4909 rtx to_rtx = target;
4911 /* Just ignore missing fields.
4912 We cleared the whole structure, above,
4913 if any fields are missing. */
4914 if (field == 0)
4915 continue;
4917 if (cleared && is_zeros_p (value))
4918 continue;
4920 if (host_integerp (DECL_SIZE (field), 1))
4921 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4922 else
4923 bitsize = -1;
4925 mode = DECL_MODE (field);
4926 if (DECL_BIT_FIELD (field))
4927 mode = VOIDmode;
4929 offset = DECL_FIELD_OFFSET (field);
4930 if (host_integerp (offset, 0)
4931 && host_integerp (bit_position (field), 0))
4933 bitpos = int_bit_position (field);
4934 offset = 0;
4936 else
4937 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4939 if (offset)
4941 rtx offset_rtx;
4943 if (contains_placeholder_p (offset))
4944 offset = build (WITH_RECORD_EXPR, sizetype,
4945 offset, make_tree (TREE_TYPE (exp), target));
4947 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4948 if (GET_CODE (to_rtx) != MEM)
4949 abort ();
4951 #ifdef POINTERS_EXTEND_UNSIGNED
4952 if (GET_MODE (offset_rtx) != Pmode)
4953 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4954 #else
4955 if (GET_MODE (offset_rtx) != ptr_mode)
4956 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4957 #endif
4959 to_rtx = offset_address (to_rtx, offset_rtx,
4960 highest_pow2_factor (offset));
4963 if (TREE_READONLY (field))
4965 if (GET_CODE (to_rtx) == MEM)
4966 to_rtx = copy_rtx (to_rtx);
4968 RTX_UNCHANGING_P (to_rtx) = 1;
4971 #ifdef WORD_REGISTER_OPERATIONS
4972 /* If this initializes a field that is smaller than a word, at the
4973 start of a word, try to widen it to a full word.
4974 This special case allows us to output C++ member function
4975 initializations in a form that the optimizers can understand. */
4976 if (GET_CODE (target) == REG
4977 && bitsize < BITS_PER_WORD
4978 && bitpos % BITS_PER_WORD == 0
4979 && GET_MODE_CLASS (mode) == MODE_INT
4980 && TREE_CODE (value) == INTEGER_CST
4981 && exp_size >= 0
4982 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4984 tree type = TREE_TYPE (value);
4986 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4988 type = (*lang_hooks.types.type_for_size)
4989 (BITS_PER_WORD, TREE_UNSIGNED (type));
4990 value = convert (type, value);
4993 if (BYTES_BIG_ENDIAN)
4994 value
4995 = fold (build (LSHIFT_EXPR, type, value,
4996 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4997 bitsize = BITS_PER_WORD;
4998 mode = word_mode;
5000 #endif
5002 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5003 && DECL_NONADDRESSABLE_P (field))
5005 to_rtx = copy_rtx (to_rtx);
5006 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5009 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5010 value, type, cleared,
5011 get_alias_set (TREE_TYPE (field)));
5014 else if (TREE_CODE (type) == ARRAY_TYPE
5015 || TREE_CODE (type) == VECTOR_TYPE)
5017 tree elt;
5018 int i;
5019 int need_to_clear;
5020 tree domain = TYPE_DOMAIN (type);
5021 tree elttype = TREE_TYPE (type);
5022 int const_bounds_p;
5023 HOST_WIDE_INT minelt = 0;
5024 HOST_WIDE_INT maxelt = 0;
5026 /* Vectors are like arrays, but the domain is stored via an array
5027 type indirectly. */
5028 if (TREE_CODE (type) == VECTOR_TYPE)
5030 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5031 the same field as TYPE_DOMAIN, we are not guaranteed that
5032 it always will. */
5033 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5034 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5037 const_bounds_p = (TYPE_MIN_VALUE (domain)
5038 && TYPE_MAX_VALUE (domain)
5039 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5040 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5042 /* If we have constant bounds for the range of the type, get them. */
5043 if (const_bounds_p)
5045 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5046 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5049 /* If the constructor has fewer elements than the array,
5050 clear the whole array first. Similarly if this is
5051 static constructor of a non-BLKmode object. */
5052 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5053 need_to_clear = 1;
5054 else
5056 HOST_WIDE_INT count = 0, zero_count = 0;
5057 need_to_clear = ! const_bounds_p;
5059 /* This loop is a more accurate version of the loop in
5060 mostly_zeros_p (it handles RANGE_EXPR in an index).
5061 It is also needed to check for missing elements. */
5062 for (elt = CONSTRUCTOR_ELTS (exp);
5063 elt != NULL_TREE && ! need_to_clear;
5064 elt = TREE_CHAIN (elt))
5066 tree index = TREE_PURPOSE (elt);
5067 HOST_WIDE_INT this_node_count;
5069 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5071 tree lo_index = TREE_OPERAND (index, 0);
5072 tree hi_index = TREE_OPERAND (index, 1);
5074 if (! host_integerp (lo_index, 1)
5075 || ! host_integerp (hi_index, 1))
5077 need_to_clear = 1;
5078 break;
5081 this_node_count = (tree_low_cst (hi_index, 1)
5082 - tree_low_cst (lo_index, 1) + 1);
5084 else
5085 this_node_count = 1;
5087 count += this_node_count;
5088 if (mostly_zeros_p (TREE_VALUE (elt)))
5089 zero_count += this_node_count;
5092 /* Clear the entire array first if there are any missing elements,
5093 or if the incidence of zero elements is >= 75%. */
5094 if (! need_to_clear
5095 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5096 need_to_clear = 1;
5099 if (need_to_clear && size > 0)
5101 if (! cleared)
5103 if (REG_P (target))
5104 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5105 else
5106 clear_storage (target, GEN_INT (size));
5108 cleared = 1;
5110 else if (REG_P (target))
5111 /* Inform later passes that the old value is dead. */
5112 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5114 /* Store each element of the constructor into
5115 the corresponding element of TARGET, determined
5116 by counting the elements. */
5117 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5118 elt;
5119 elt = TREE_CHAIN (elt), i++)
5121 enum machine_mode mode;
5122 HOST_WIDE_INT bitsize;
5123 HOST_WIDE_INT bitpos;
5124 int unsignedp;
5125 tree value = TREE_VALUE (elt);
5126 tree index = TREE_PURPOSE (elt);
5127 rtx xtarget = target;
5129 if (cleared && is_zeros_p (value))
5130 continue;
5132 unsignedp = TREE_UNSIGNED (elttype);
5133 mode = TYPE_MODE (elttype);
5134 if (mode == BLKmode)
5135 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5136 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5137 : -1);
5138 else
5139 bitsize = GET_MODE_BITSIZE (mode);
5141 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5143 tree lo_index = TREE_OPERAND (index, 0);
5144 tree hi_index = TREE_OPERAND (index, 1);
5145 rtx index_r, pos_rtx, loop_end;
5146 struct nesting *loop;
5147 HOST_WIDE_INT lo, hi, count;
5148 tree position;
5150 /* If the range is constant and "small", unroll the loop. */
5151 if (const_bounds_p
5152 && host_integerp (lo_index, 0)
5153 && host_integerp (hi_index, 0)
5154 && (lo = tree_low_cst (lo_index, 0),
5155 hi = tree_low_cst (hi_index, 0),
5156 count = hi - lo + 1,
5157 (GET_CODE (target) != MEM
5158 || count <= 2
5159 || (host_integerp (TYPE_SIZE (elttype), 1)
5160 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5161 <= 40 * 8)))))
5163 lo -= minelt; hi -= minelt;
5164 for (; lo <= hi; lo++)
5166 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5168 if (GET_CODE (target) == MEM
5169 && !MEM_KEEP_ALIAS_SET_P (target)
5170 && TREE_CODE (type) == ARRAY_TYPE
5171 && TYPE_NONALIASED_COMPONENT (type))
5173 target = copy_rtx (target);
5174 MEM_KEEP_ALIAS_SET_P (target) = 1;
5177 store_constructor_field
5178 (target, bitsize, bitpos, mode, value, type, cleared,
5179 get_alias_set (elttype));
5182 else
5184 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5185 loop_end = gen_label_rtx ();
5187 unsignedp = TREE_UNSIGNED (domain);
5189 index = build_decl (VAR_DECL, NULL_TREE, domain);
5191 index_r
5192 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5193 &unsignedp, 0));
5194 SET_DECL_RTL (index, index_r);
5195 if (TREE_CODE (value) == SAVE_EXPR
5196 && SAVE_EXPR_RTL (value) == 0)
5198 /* Make sure value gets expanded once before the
5199 loop. */
5200 expand_expr (value, const0_rtx, VOIDmode, 0);
5201 emit_queue ();
5203 store_expr (lo_index, index_r, 0);
5204 loop = expand_start_loop (0);
5206 /* Assign value to element index. */
5207 position
5208 = convert (ssizetype,
5209 fold (build (MINUS_EXPR, TREE_TYPE (index),
5210 index, TYPE_MIN_VALUE (domain))));
5211 position = size_binop (MULT_EXPR, position,
5212 convert (ssizetype,
5213 TYPE_SIZE_UNIT (elttype)));
5215 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5216 xtarget = offset_address (target, pos_rtx,
5217 highest_pow2_factor (position));
5218 xtarget = adjust_address (xtarget, mode, 0);
5219 if (TREE_CODE (value) == CONSTRUCTOR)
5220 store_constructor (value, xtarget, cleared,
5221 bitsize / BITS_PER_UNIT);
5222 else
5223 store_expr (value, xtarget, 0);
5225 expand_exit_loop_if_false (loop,
5226 build (LT_EXPR, integer_type_node,
5227 index, hi_index));
5229 expand_increment (build (PREINCREMENT_EXPR,
5230 TREE_TYPE (index),
5231 index, integer_one_node), 0, 0);
5232 expand_end_loop ();
5233 emit_label (loop_end);
5236 else if ((index != 0 && ! host_integerp (index, 0))
5237 || ! host_integerp (TYPE_SIZE (elttype), 1))
5239 tree position;
5241 if (index == 0)
5242 index = ssize_int (1);
5244 if (minelt)
5245 index = convert (ssizetype,
5246 fold (build (MINUS_EXPR, index,
5247 TYPE_MIN_VALUE (domain))));
5249 position = size_binop (MULT_EXPR, index,
5250 convert (ssizetype,
5251 TYPE_SIZE_UNIT (elttype)));
5252 xtarget = offset_address (target,
5253 expand_expr (position, 0, VOIDmode, 0),
5254 highest_pow2_factor (position));
5255 xtarget = adjust_address (xtarget, mode, 0);
5256 store_expr (value, xtarget, 0);
5258 else
5260 if (index != 0)
5261 bitpos = ((tree_low_cst (index, 0) - minelt)
5262 * tree_low_cst (TYPE_SIZE (elttype), 1));
5263 else
5264 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5266 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5267 && TREE_CODE (type) == ARRAY_TYPE
5268 && TYPE_NONALIASED_COMPONENT (type))
5270 target = copy_rtx (target);
5271 MEM_KEEP_ALIAS_SET_P (target) = 1;
5274 store_constructor_field (target, bitsize, bitpos, mode, value,
5275 type, cleared, get_alias_set (elttype));
5281 /* Set constructor assignments. */
5282 else if (TREE_CODE (type) == SET_TYPE)
5284 tree elt = CONSTRUCTOR_ELTS (exp);
5285 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5286 tree domain = TYPE_DOMAIN (type);
5287 tree domain_min, domain_max, bitlength;
5289 /* The default implementation strategy is to extract the constant
5290 parts of the constructor, use that to initialize the target,
5291 and then "or" in whatever non-constant ranges we need in addition.
5293 If a large set is all zero or all ones, it is
5294 probably better to set it using memset (if available) or bzero.
5295 Also, if a large set has just a single range, it may also be
5296 better to first clear all the first clear the set (using
5297 bzero/memset), and set the bits we want. */
5299 /* Check for all zeros. */
5300 if (elt == NULL_TREE && size > 0)
5302 if (!cleared)
5303 clear_storage (target, GEN_INT (size));
5304 return;
5307 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5308 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5309 bitlength = size_binop (PLUS_EXPR,
5310 size_diffop (domain_max, domain_min),
5311 ssize_int (1));
5313 nbits = tree_low_cst (bitlength, 1);
5315 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5316 are "complicated" (more than one range), initialize (the
5317 constant parts) by copying from a constant. */
5318 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5319 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5321 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5322 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5323 char *bit_buffer = (char *) alloca (nbits);
5324 HOST_WIDE_INT word = 0;
5325 unsigned int bit_pos = 0;
5326 unsigned int ibit = 0;
5327 unsigned int offset = 0; /* In bytes from beginning of set. */
5329 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5330 for (;;)
5332 if (bit_buffer[ibit])
5334 if (BYTES_BIG_ENDIAN)
5335 word |= (1 << (set_word_size - 1 - bit_pos));
5336 else
5337 word |= 1 << bit_pos;
5340 bit_pos++; ibit++;
5341 if (bit_pos >= set_word_size || ibit == nbits)
5343 if (word != 0 || ! cleared)
5345 rtx datum = GEN_INT (word);
5346 rtx to_rtx;
5348 /* The assumption here is that it is safe to use
5349 XEXP if the set is multi-word, but not if
5350 it's single-word. */
5351 if (GET_CODE (target) == MEM)
5352 to_rtx = adjust_address (target, mode, offset);
5353 else if (offset == 0)
5354 to_rtx = target;
5355 else
5356 abort ();
5357 emit_move_insn (to_rtx, datum);
5360 if (ibit == nbits)
5361 break;
5362 word = 0;
5363 bit_pos = 0;
5364 offset += set_word_size / BITS_PER_UNIT;
5368 else if (!cleared)
5369 /* Don't bother clearing storage if the set is all ones. */
5370 if (TREE_CHAIN (elt) != NULL_TREE
5371 || (TREE_PURPOSE (elt) == NULL_TREE
5372 ? nbits != 1
5373 : ( ! host_integerp (TREE_VALUE (elt), 0)
5374 || ! host_integerp (TREE_PURPOSE (elt), 0)
5375 || (tree_low_cst (TREE_VALUE (elt), 0)
5376 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5377 != (HOST_WIDE_INT) nbits))))
5378 clear_storage (target, expr_size (exp));
5380 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5382 /* Start of range of element or NULL. */
5383 tree startbit = TREE_PURPOSE (elt);
5384 /* End of range of element, or element value. */
5385 tree endbit = TREE_VALUE (elt);
5386 HOST_WIDE_INT startb, endb;
5387 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5389 bitlength_rtx = expand_expr (bitlength,
5390 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5392 /* Handle non-range tuple element like [ expr ]. */
5393 if (startbit == NULL_TREE)
5395 startbit = save_expr (endbit);
5396 endbit = startbit;
5399 startbit = convert (sizetype, startbit);
5400 endbit = convert (sizetype, endbit);
5401 if (! integer_zerop (domain_min))
5403 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5404 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5406 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5407 EXPAND_CONST_ADDRESS);
5408 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5409 EXPAND_CONST_ADDRESS);
5411 if (REG_P (target))
5413 targetx
5414 = assign_temp
5415 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5416 (GET_MODE (target), 0),
5417 TYPE_QUAL_CONST)),
5418 0, 1, 1);
5419 emit_move_insn (targetx, target);
5422 else if (GET_CODE (target) == MEM)
5423 targetx = target;
5424 else
5425 abort ();
5427 /* Optimization: If startbit and endbit are constants divisible
5428 by BITS_PER_UNIT, call memset instead. */
5429 if (TARGET_MEM_FUNCTIONS
5430 && TREE_CODE (startbit) == INTEGER_CST
5431 && TREE_CODE (endbit) == INTEGER_CST
5432 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5433 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5435 emit_library_call (memset_libfunc, LCT_NORMAL,
5436 VOIDmode, 3,
5437 plus_constant (XEXP (targetx, 0),
5438 startb / BITS_PER_UNIT),
5439 Pmode,
5440 constm1_rtx, TYPE_MODE (integer_type_node),
5441 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5442 TYPE_MODE (sizetype));
5444 else
5445 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5446 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5447 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5448 startbit_rtx, TYPE_MODE (sizetype),
5449 endbit_rtx, TYPE_MODE (sizetype));
5451 if (REG_P (target))
5452 emit_move_insn (target, targetx);
5456 else
5457 abort ();
5460 /* Store the value of EXP (an expression tree)
5461 into a subfield of TARGET which has mode MODE and occupies
5462 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5463 If MODE is VOIDmode, it means that we are storing into a bit-field.
5465 If VALUE_MODE is VOIDmode, return nothing in particular.
5466 UNSIGNEDP is not used in this case.
5468 Otherwise, return an rtx for the value stored. This rtx
5469 has mode VALUE_MODE if that is convenient to do.
5470 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5472 TYPE is the type of the underlying object,
5474 ALIAS_SET is the alias set for the destination. This value will
5475 (in general) be different from that for TARGET, since TARGET is a
5476 reference to the containing structure. */
5478 static rtx
5479 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5480 alias_set)
5481 rtx target;
5482 HOST_WIDE_INT bitsize;
5483 HOST_WIDE_INT bitpos;
5484 enum machine_mode mode;
5485 tree exp;
5486 enum machine_mode value_mode;
5487 int unsignedp;
5488 tree type;
5489 int alias_set;
5491 HOST_WIDE_INT width_mask = 0;
5493 if (TREE_CODE (exp) == ERROR_MARK)
5494 return const0_rtx;
5496 /* If we have nothing to store, do nothing unless the expression has
5497 side-effects. */
5498 if (bitsize == 0)
5499 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5500 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5501 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5503 /* If we are storing into an unaligned field of an aligned union that is
5504 in a register, we may have the mode of TARGET being an integer mode but
5505 MODE == BLKmode. In that case, get an aligned object whose size and
5506 alignment are the same as TARGET and store TARGET into it (we can avoid
5507 the store if the field being stored is the entire width of TARGET). Then
5508 call ourselves recursively to store the field into a BLKmode version of
5509 that object. Finally, load from the object into TARGET. This is not
5510 very efficient in general, but should only be slightly more expensive
5511 than the otherwise-required unaligned accesses. Perhaps this can be
5512 cleaned up later. */
5514 if (mode == BLKmode
5515 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5517 rtx object
5518 = assign_temp
5519 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5520 0, 1, 1);
5521 rtx blk_object = adjust_address (object, BLKmode, 0);
5523 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5524 emit_move_insn (object, target);
5526 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5527 alias_set);
5529 emit_move_insn (target, object);
5531 /* We want to return the BLKmode version of the data. */
5532 return blk_object;
5535 if (GET_CODE (target) == CONCAT)
5537 /* We're storing into a struct containing a single __complex. */
5539 if (bitpos != 0)
5540 abort ();
5541 return store_expr (exp, target, 0);
5544 /* If the structure is in a register or if the component
5545 is a bit field, we cannot use addressing to access it.
5546 Use bit-field techniques or SUBREG to store in it. */
5548 if (mode == VOIDmode
5549 || (mode != BLKmode && ! direct_store[(int) mode]
5550 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5551 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5552 || GET_CODE (target) == REG
5553 || GET_CODE (target) == SUBREG
5554 /* If the field isn't aligned enough to store as an ordinary memref,
5555 store it as a bit field. */
5556 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5557 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5558 || bitpos % GET_MODE_ALIGNMENT (mode)))
5559 /* If the RHS and field are a constant size and the size of the
5560 RHS isn't the same size as the bitfield, we must use bitfield
5561 operations. */
5562 || (bitsize >= 0
5563 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5564 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5566 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5568 /* If BITSIZE is narrower than the size of the type of EXP
5569 we will be narrowing TEMP. Normally, what's wanted are the
5570 low-order bits. However, if EXP's type is a record and this is
5571 big-endian machine, we want the upper BITSIZE bits. */
5572 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5573 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5574 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5575 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5576 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5577 - bitsize),
5578 temp, 1);
5580 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5581 MODE. */
5582 if (mode != VOIDmode && mode != BLKmode
5583 && mode != TYPE_MODE (TREE_TYPE (exp)))
5584 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5586 /* If the modes of TARGET and TEMP are both BLKmode, both
5587 must be in memory and BITPOS must be aligned on a byte
5588 boundary. If so, we simply do a block copy. */
5589 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5591 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5592 || bitpos % BITS_PER_UNIT != 0)
5593 abort ();
5595 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5596 emit_block_move (target, temp,
5597 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5598 / BITS_PER_UNIT),
5599 BLOCK_OP_NORMAL);
5601 return value_mode == VOIDmode ? const0_rtx : target;
5604 /* Store the value in the bitfield. */
5605 store_bit_field (target, bitsize, bitpos, mode, temp,
5606 int_size_in_bytes (type));
5608 if (value_mode != VOIDmode)
5610 /* The caller wants an rtx for the value.
5611 If possible, avoid refetching from the bitfield itself. */
5612 if (width_mask != 0
5613 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5615 tree count;
5616 enum machine_mode tmode;
5618 tmode = GET_MODE (temp);
5619 if (tmode == VOIDmode)
5620 tmode = value_mode;
5622 if (unsignedp)
5623 return expand_and (tmode, temp,
5624 gen_int_mode (width_mask, tmode),
5625 NULL_RTX);
5627 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5628 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5629 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5632 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5633 NULL_RTX, value_mode, VOIDmode,
5634 int_size_in_bytes (type));
5636 return const0_rtx;
5638 else
5640 rtx addr = XEXP (target, 0);
5641 rtx to_rtx = target;
5643 /* If a value is wanted, it must be the lhs;
5644 so make the address stable for multiple use. */
5646 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5647 && ! CONSTANT_ADDRESS_P (addr)
5648 /* A frame-pointer reference is already stable. */
5649 && ! (GET_CODE (addr) == PLUS
5650 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5651 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5652 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5653 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5655 /* Now build a reference to just the desired component. */
5657 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5659 if (to_rtx == target)
5660 to_rtx = copy_rtx (to_rtx);
5662 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5663 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5664 set_mem_alias_set (to_rtx, alias_set);
5666 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5670 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5671 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5672 codes and find the ultimate containing object, which we return.
5674 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5675 bit position, and *PUNSIGNEDP to the signedness of the field.
5676 If the position of the field is variable, we store a tree
5677 giving the variable offset (in units) in *POFFSET.
5678 This offset is in addition to the bit position.
5679 If the position is not variable, we store 0 in *POFFSET.
5681 If any of the extraction expressions is volatile,
5682 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5684 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5685 is a mode that can be used to access the field. In that case, *PBITSIZE
5686 is redundant.
5688 If the field describes a variable-sized object, *PMODE is set to
5689 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5690 this case, but the address of the object can be found. */
5692 tree
5693 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5694 punsignedp, pvolatilep)
5695 tree exp;
5696 HOST_WIDE_INT *pbitsize;
5697 HOST_WIDE_INT *pbitpos;
5698 tree *poffset;
5699 enum machine_mode *pmode;
5700 int *punsignedp;
5701 int *pvolatilep;
5703 tree size_tree = 0;
5704 enum machine_mode mode = VOIDmode;
5705 tree offset = size_zero_node;
5706 tree bit_offset = bitsize_zero_node;
5707 tree placeholder_ptr = 0;
5708 tree tem;
5710 /* First get the mode, signedness, and size. We do this from just the
5711 outermost expression. */
5712 if (TREE_CODE (exp) == COMPONENT_REF)
5714 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5715 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5716 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5718 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5720 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5722 size_tree = TREE_OPERAND (exp, 1);
5723 *punsignedp = TREE_UNSIGNED (exp);
5725 else
5727 mode = TYPE_MODE (TREE_TYPE (exp));
5728 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5730 if (mode == BLKmode)
5731 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5732 else
5733 *pbitsize = GET_MODE_BITSIZE (mode);
5736 if (size_tree != 0)
5738 if (! host_integerp (size_tree, 1))
5739 mode = BLKmode, *pbitsize = -1;
5740 else
5741 *pbitsize = tree_low_cst (size_tree, 1);
5744 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5745 and find the ultimate containing object. */
5746 while (1)
5748 if (TREE_CODE (exp) == BIT_FIELD_REF)
5749 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5750 else if (TREE_CODE (exp) == COMPONENT_REF)
5752 tree field = TREE_OPERAND (exp, 1);
5753 tree this_offset = DECL_FIELD_OFFSET (field);
5755 /* If this field hasn't been filled in yet, don't go
5756 past it. This should only happen when folding expressions
5757 made during type construction. */
5758 if (this_offset == 0)
5759 break;
5760 else if (! TREE_CONSTANT (this_offset)
5761 && contains_placeholder_p (this_offset))
5762 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5764 offset = size_binop (PLUS_EXPR, offset, this_offset);
5765 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5766 DECL_FIELD_BIT_OFFSET (field));
5768 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5771 else if (TREE_CODE (exp) == ARRAY_REF
5772 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5774 tree index = TREE_OPERAND (exp, 1);
5775 tree array = TREE_OPERAND (exp, 0);
5776 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5777 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5778 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5780 /* We assume all arrays have sizes that are a multiple of a byte.
5781 First subtract the lower bound, if any, in the type of the
5782 index, then convert to sizetype and multiply by the size of the
5783 array element. */
5784 if (low_bound != 0 && ! integer_zerop (low_bound))
5785 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5786 index, low_bound));
5788 /* If the index has a self-referential type, pass it to a
5789 WITH_RECORD_EXPR; if the component size is, pass our
5790 component to one. */
5791 if (! TREE_CONSTANT (index)
5792 && contains_placeholder_p (index))
5793 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5794 if (! TREE_CONSTANT (unit_size)
5795 && contains_placeholder_p (unit_size))
5796 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5798 offset = size_binop (PLUS_EXPR, offset,
5799 size_binop (MULT_EXPR,
5800 convert (sizetype, index),
5801 unit_size));
5804 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5806 tree new = find_placeholder (exp, &placeholder_ptr);
5808 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5809 We might have been called from tree optimization where we
5810 haven't set up an object yet. */
5811 if (new == 0)
5812 break;
5813 else
5814 exp = new;
5816 continue;
5818 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5819 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5820 && ! ((TREE_CODE (exp) == NOP_EXPR
5821 || TREE_CODE (exp) == CONVERT_EXPR)
5822 && (TYPE_MODE (TREE_TYPE (exp))
5823 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5824 break;
5826 /* If any reference in the chain is volatile, the effect is volatile. */
5827 if (TREE_THIS_VOLATILE (exp))
5828 *pvolatilep = 1;
5830 exp = TREE_OPERAND (exp, 0);
5833 /* If OFFSET is constant, see if we can return the whole thing as a
5834 constant bit position. Otherwise, split it up. */
5835 if (host_integerp (offset, 0)
5836 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5837 bitsize_unit_node))
5838 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5839 && host_integerp (tem, 0))
5840 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5841 else
5842 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5844 *pmode = mode;
5845 return exp;
5848 /* Return 1 if T is an expression that get_inner_reference handles. */
5851 handled_component_p (t)
5852 tree t;
5854 switch (TREE_CODE (t))
5856 case BIT_FIELD_REF:
5857 case COMPONENT_REF:
5858 case ARRAY_REF:
5859 case ARRAY_RANGE_REF:
5860 case NON_LVALUE_EXPR:
5861 case VIEW_CONVERT_EXPR:
5862 return 1;
5864 case NOP_EXPR:
5865 case CONVERT_EXPR:
5866 return (TYPE_MODE (TREE_TYPE (t))
5867 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5869 default:
5870 return 0;
5874 /* Given an rtx VALUE that may contain additions and multiplications, return
5875 an equivalent value that just refers to a register, memory, or constant.
5876 This is done by generating instructions to perform the arithmetic and
5877 returning a pseudo-register containing the value.
5879 The returned value may be a REG, SUBREG, MEM or constant. */
5882 force_operand (value, target)
5883 rtx value, target;
5885 rtx op1, op2;
5886 /* Use subtarget as the target for operand 0 of a binary operation. */
5887 rtx subtarget = get_subtarget (target);
5888 enum rtx_code code = GET_CODE (value);
5890 /* Check for a PIC address load. */
5891 if ((code == PLUS || code == MINUS)
5892 && XEXP (value, 0) == pic_offset_table_rtx
5893 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5894 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5895 || GET_CODE (XEXP (value, 1)) == CONST))
5897 if (!subtarget)
5898 subtarget = gen_reg_rtx (GET_MODE (value));
5899 emit_move_insn (subtarget, value);
5900 return subtarget;
5903 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5905 if (!target)
5906 target = gen_reg_rtx (GET_MODE (value));
5907 convert_move (target, force_operand (XEXP (value, 0), NULL),
5908 code == ZERO_EXTEND);
5909 return target;
5912 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5914 op2 = XEXP (value, 1);
5915 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5916 subtarget = 0;
5917 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5919 code = PLUS;
5920 op2 = negate_rtx (GET_MODE (value), op2);
5923 /* Check for an addition with OP2 a constant integer and our first
5924 operand a PLUS of a virtual register and something else. In that
5925 case, we want to emit the sum of the virtual register and the
5926 constant first and then add the other value. This allows virtual
5927 register instantiation to simply modify the constant rather than
5928 creating another one around this addition. */
5929 if (code == PLUS && GET_CODE (op2) == CONST_INT
5930 && GET_CODE (XEXP (value, 0)) == PLUS
5931 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5932 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5933 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5935 rtx temp = expand_simple_binop (GET_MODE (value), code,
5936 XEXP (XEXP (value, 0), 0), op2,
5937 subtarget, 0, OPTAB_LIB_WIDEN);
5938 return expand_simple_binop (GET_MODE (value), code, temp,
5939 force_operand (XEXP (XEXP (value,
5940 0), 1), 0),
5941 target, 0, OPTAB_LIB_WIDEN);
5944 op1 = force_operand (XEXP (value, 0), subtarget);
5945 op2 = force_operand (op2, NULL_RTX);
5946 switch (code)
5948 case MULT:
5949 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5950 case DIV:
5951 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5952 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5953 target, 1, OPTAB_LIB_WIDEN);
5954 else
5955 return expand_divmod (0,
5956 FLOAT_MODE_P (GET_MODE (value))
5957 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5958 GET_MODE (value), op1, op2, target, 0);
5959 break;
5960 case MOD:
5961 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5962 target, 0);
5963 break;
5964 case UDIV:
5965 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5966 target, 1);
5967 break;
5968 case UMOD:
5969 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5970 target, 1);
5971 break;
5972 case ASHIFTRT:
5973 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5974 target, 0, OPTAB_LIB_WIDEN);
5975 break;
5976 default:
5977 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5978 target, 1, OPTAB_LIB_WIDEN);
5981 if (GET_RTX_CLASS (code) == '1')
5983 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5984 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5987 #ifdef INSN_SCHEDULING
5988 /* On machines that have insn scheduling, we want all memory reference to be
5989 explicit, so we need to deal with such paradoxical SUBREGs. */
5990 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5991 && (GET_MODE_SIZE (GET_MODE (value))
5992 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5993 value
5994 = simplify_gen_subreg (GET_MODE (value),
5995 force_reg (GET_MODE (SUBREG_REG (value)),
5996 force_operand (SUBREG_REG (value),
5997 NULL_RTX)),
5998 GET_MODE (SUBREG_REG (value)),
5999 SUBREG_BYTE (value));
6000 #endif
6002 return value;
6005 /* Subroutine of expand_expr: return nonzero iff there is no way that
6006 EXP can reference X, which is being modified. TOP_P is nonzero if this
6007 call is going to be used to determine whether we need a temporary
6008 for EXP, as opposed to a recursive call to this function.
6010 It is always safe for this routine to return zero since it merely
6011 searches for optimization opportunities. */
6014 safe_from_p (x, exp, top_p)
6015 rtx x;
6016 tree exp;
6017 int top_p;
6019 rtx exp_rtl = 0;
6020 int i, nops;
6021 static tree save_expr_list;
6023 if (x == 0
6024 /* If EXP has varying size, we MUST use a target since we currently
6025 have no way of allocating temporaries of variable size
6026 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6027 So we assume here that something at a higher level has prevented a
6028 clash. This is somewhat bogus, but the best we can do. Only
6029 do this when X is BLKmode and when we are at the top level. */
6030 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6031 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6032 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6033 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6034 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6035 != INTEGER_CST)
6036 && GET_MODE (x) == BLKmode)
6037 /* If X is in the outgoing argument area, it is always safe. */
6038 || (GET_CODE (x) == MEM
6039 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6040 || (GET_CODE (XEXP (x, 0)) == PLUS
6041 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6042 return 1;
6044 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6045 find the underlying pseudo. */
6046 if (GET_CODE (x) == SUBREG)
6048 x = SUBREG_REG (x);
6049 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6050 return 0;
6053 /* A SAVE_EXPR might appear many times in the expression passed to the
6054 top-level safe_from_p call, and if it has a complex subexpression,
6055 examining it multiple times could result in a combinatorial explosion.
6056 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6057 with optimization took about 28 minutes to compile -- even though it was
6058 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6059 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6060 we have processed. Note that the only test of top_p was above. */
6062 if (top_p)
6064 int rtn;
6065 tree t;
6067 save_expr_list = 0;
6069 rtn = safe_from_p (x, exp, 0);
6071 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6072 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6074 return rtn;
6077 /* Now look at our tree code and possibly recurse. */
6078 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6080 case 'd':
6081 exp_rtl = DECL_RTL_IF_SET (exp);
6082 break;
6084 case 'c':
6085 return 1;
6087 case 'x':
6088 if (TREE_CODE (exp) == TREE_LIST)
6089 return ((TREE_VALUE (exp) == 0
6090 || safe_from_p (x, TREE_VALUE (exp), 0))
6091 && (TREE_CHAIN (exp) == 0
6092 || safe_from_p (x, TREE_CHAIN (exp), 0)));
6093 else if (TREE_CODE (exp) == ERROR_MARK)
6094 return 1; /* An already-visited SAVE_EXPR? */
6095 else
6096 return 0;
6098 case '1':
6099 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6101 case '2':
6102 case '<':
6103 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
6104 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
6106 case 'e':
6107 case 'r':
6108 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6109 the expression. If it is set, we conflict iff we are that rtx or
6110 both are in memory. Otherwise, we check all operands of the
6111 expression recursively. */
6113 switch (TREE_CODE (exp))
6115 case ADDR_EXPR:
6116 /* If the operand is static or we are static, we can't conflict.
6117 Likewise if we don't conflict with the operand at all. */
6118 if (staticp (TREE_OPERAND (exp, 0))
6119 || TREE_STATIC (exp)
6120 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6121 return 1;
6123 /* Otherwise, the only way this can conflict is if we are taking
6124 the address of a DECL a that address if part of X, which is
6125 very rare. */
6126 exp = TREE_OPERAND (exp, 0);
6127 if (DECL_P (exp))
6129 if (!DECL_RTL_SET_P (exp)
6130 || GET_CODE (DECL_RTL (exp)) != MEM)
6131 return 0;
6132 else
6133 exp_rtl = XEXP (DECL_RTL (exp), 0);
6135 break;
6137 case INDIRECT_REF:
6138 if (GET_CODE (x) == MEM
6139 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6140 get_alias_set (exp)))
6141 return 0;
6142 break;
6144 case CALL_EXPR:
6145 /* Assume that the call will clobber all hard registers and
6146 all of memory. */
6147 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6148 || GET_CODE (x) == MEM)
6149 return 0;
6150 break;
6152 case RTL_EXPR:
6153 /* If a sequence exists, we would have to scan every instruction
6154 in the sequence to see if it was safe. This is probably not
6155 worthwhile. */
6156 if (RTL_EXPR_SEQUENCE (exp))
6157 return 0;
6159 exp_rtl = RTL_EXPR_RTL (exp);
6160 break;
6162 case WITH_CLEANUP_EXPR:
6163 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6164 break;
6166 case CLEANUP_POINT_EXPR:
6167 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6169 case SAVE_EXPR:
6170 exp_rtl = SAVE_EXPR_RTL (exp);
6171 if (exp_rtl)
6172 break;
6174 /* If we've already scanned this, don't do it again. Otherwise,
6175 show we've scanned it and record for clearing the flag if we're
6176 going on. */
6177 if (TREE_PRIVATE (exp))
6178 return 1;
6180 TREE_PRIVATE (exp) = 1;
6181 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6183 TREE_PRIVATE (exp) = 0;
6184 return 0;
6187 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6188 return 1;
6190 case BIND_EXPR:
6191 /* The only operand we look at is operand 1. The rest aren't
6192 part of the expression. */
6193 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6195 case METHOD_CALL_EXPR:
6196 /* This takes an rtx argument, but shouldn't appear here. */
6197 abort ();
6199 default:
6200 break;
6203 /* If we have an rtx, we do not need to scan our operands. */
6204 if (exp_rtl)
6205 break;
6207 nops = first_rtl_op (TREE_CODE (exp));
6208 for (i = 0; i < nops; i++)
6209 if (TREE_OPERAND (exp, i) != 0
6210 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6211 return 0;
6213 /* If this is a language-specific tree code, it may require
6214 special handling. */
6215 if ((unsigned int) TREE_CODE (exp)
6216 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6217 && !(*lang_hooks.safe_from_p) (x, exp))
6218 return 0;
6221 /* If we have an rtl, find any enclosed object. Then see if we conflict
6222 with it. */
6223 if (exp_rtl)
6225 if (GET_CODE (exp_rtl) == SUBREG)
6227 exp_rtl = SUBREG_REG (exp_rtl);
6228 if (GET_CODE (exp_rtl) == REG
6229 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6230 return 0;
6233 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6234 are memory and they conflict. */
6235 return ! (rtx_equal_p (x, exp_rtl)
6236 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6237 && true_dependence (exp_rtl, VOIDmode, x,
6238 rtx_addr_varies_p)));
6241 /* If we reach here, it is safe. */
6242 return 1;
6245 /* Subroutine of expand_expr: return rtx if EXP is a
6246 variable or parameter; else return 0. */
6248 static rtx
6249 var_rtx (exp)
6250 tree exp;
6252 STRIP_NOPS (exp);
6253 switch (TREE_CODE (exp))
6255 case PARM_DECL:
6256 case VAR_DECL:
6257 return DECL_RTL (exp);
6258 default:
6259 return 0;
6263 #ifdef MAX_INTEGER_COMPUTATION_MODE
6265 void
6266 check_max_integer_computation_mode (exp)
6267 tree exp;
6269 enum tree_code code;
6270 enum machine_mode mode;
6272 /* Strip any NOPs that don't change the mode. */
6273 STRIP_NOPS (exp);
6274 code = TREE_CODE (exp);
6276 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6277 if (code == NOP_EXPR
6278 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6279 return;
6281 /* First check the type of the overall operation. We need only look at
6282 unary, binary and relational operations. */
6283 if (TREE_CODE_CLASS (code) == '1'
6284 || TREE_CODE_CLASS (code) == '2'
6285 || TREE_CODE_CLASS (code) == '<')
6287 mode = TYPE_MODE (TREE_TYPE (exp));
6288 if (GET_MODE_CLASS (mode) == MODE_INT
6289 && mode > MAX_INTEGER_COMPUTATION_MODE)
6290 internal_error ("unsupported wide integer operation");
6293 /* Check operand of a unary op. */
6294 if (TREE_CODE_CLASS (code) == '1')
6296 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6297 if (GET_MODE_CLASS (mode) == MODE_INT
6298 && mode > MAX_INTEGER_COMPUTATION_MODE)
6299 internal_error ("unsupported wide integer operation");
6302 /* Check operands of a binary/comparison op. */
6303 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6305 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6306 if (GET_MODE_CLASS (mode) == MODE_INT
6307 && mode > MAX_INTEGER_COMPUTATION_MODE)
6308 internal_error ("unsupported wide integer operation");
6310 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6311 if (GET_MODE_CLASS (mode) == MODE_INT
6312 && mode > MAX_INTEGER_COMPUTATION_MODE)
6313 internal_error ("unsupported wide integer operation");
6316 #endif
6318 /* Return the highest power of two that EXP is known to be a multiple of.
6319 This is used in updating alignment of MEMs in array references. */
6321 static HOST_WIDE_INT
6322 highest_pow2_factor (exp)
6323 tree exp;
6325 HOST_WIDE_INT c0, c1;
6327 switch (TREE_CODE (exp))
6329 case INTEGER_CST:
6330 /* We can find the lowest bit that's a one. If the low
6331 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6332 We need to handle this case since we can find it in a COND_EXPR,
6333 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6334 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6335 later ICE. */
6336 if (TREE_CONSTANT_OVERFLOW (exp))
6337 return BIGGEST_ALIGNMENT;
6338 else
6340 /* Note: tree_low_cst is intentionally not used here,
6341 we don't care about the upper bits. */
6342 c0 = TREE_INT_CST_LOW (exp);
6343 c0 &= -c0;
6344 return c0 ? c0 : BIGGEST_ALIGNMENT;
6346 break;
6348 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6349 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6350 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6351 return MIN (c0, c1);
6353 case MULT_EXPR:
6354 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6355 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6356 return c0 * c1;
6358 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6359 case CEIL_DIV_EXPR:
6360 if (integer_pow2p (TREE_OPERAND (exp, 1))
6361 && host_integerp (TREE_OPERAND (exp, 1), 1))
6363 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6364 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6365 return MAX (1, c0 / c1);
6367 break;
6369 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6370 case SAVE_EXPR: case WITH_RECORD_EXPR:
6371 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6373 case COMPOUND_EXPR:
6374 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6376 case COND_EXPR:
6377 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6378 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6379 return MIN (c0, c1);
6381 default:
6382 break;
6385 return 1;
6388 /* Similar, except that it is known that the expression must be a multiple
6389 of the alignment of TYPE. */
6391 static HOST_WIDE_INT
6392 highest_pow2_factor_for_type (type, exp)
6393 tree type;
6394 tree exp;
6396 HOST_WIDE_INT type_align, factor;
6398 factor = highest_pow2_factor (exp);
6399 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6400 return MAX (factor, type_align);
6403 /* Return an object on the placeholder list that matches EXP, a
6404 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6405 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6406 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6407 is a location which initially points to a starting location in the
6408 placeholder list (zero means start of the list) and where a pointer into
6409 the placeholder list at which the object is found is placed. */
6411 tree
6412 find_placeholder (exp, plist)
6413 tree exp;
6414 tree *plist;
6416 tree type = TREE_TYPE (exp);
6417 tree placeholder_expr;
6419 for (placeholder_expr
6420 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6421 placeholder_expr != 0;
6422 placeholder_expr = TREE_CHAIN (placeholder_expr))
6424 tree need_type = TYPE_MAIN_VARIANT (type);
6425 tree elt;
6427 /* Find the outermost reference that is of the type we want. If none,
6428 see if any object has a type that is a pointer to the type we
6429 want. */
6430 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6431 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6432 || TREE_CODE (elt) == COND_EXPR)
6433 ? TREE_OPERAND (elt, 1)
6434 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6435 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6436 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6437 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6438 ? TREE_OPERAND (elt, 0) : 0))
6439 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6441 if (plist)
6442 *plist = placeholder_expr;
6443 return elt;
6446 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6448 = ((TREE_CODE (elt) == COMPOUND_EXPR
6449 || TREE_CODE (elt) == COND_EXPR)
6450 ? TREE_OPERAND (elt, 1)
6451 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6452 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6453 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6454 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6455 ? TREE_OPERAND (elt, 0) : 0))
6456 if (POINTER_TYPE_P (TREE_TYPE (elt))
6457 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6458 == need_type))
6460 if (plist)
6461 *plist = placeholder_expr;
6462 return build1 (INDIRECT_REF, need_type, elt);
6466 return 0;
6469 /* expand_expr: generate code for computing expression EXP.
6470 An rtx for the computed value is returned. The value is never null.
6471 In the case of a void EXP, const0_rtx is returned.
6473 The value may be stored in TARGET if TARGET is nonzero.
6474 TARGET is just a suggestion; callers must assume that
6475 the rtx returned may not be the same as TARGET.
6477 If TARGET is CONST0_RTX, it means that the value will be ignored.
6479 If TMODE is not VOIDmode, it suggests generating the
6480 result in mode TMODE. But this is done only when convenient.
6481 Otherwise, TMODE is ignored and the value generated in its natural mode.
6482 TMODE is just a suggestion; callers must assume that
6483 the rtx returned may not have mode TMODE.
6485 Note that TARGET may have neither TMODE nor MODE. In that case, it
6486 probably will not be used.
6488 If MODIFIER is EXPAND_SUM then when EXP is an addition
6489 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6490 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6491 products as above, or REG or MEM, or constant.
6492 Ordinarily in such cases we would output mul or add instructions
6493 and then return a pseudo reg containing the sum.
6495 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6496 it also marks a label as absolutely required (it can't be dead).
6497 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6498 This is used for outputting expressions used in initializers.
6500 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6501 with a constant address even if that address is not normally legitimate.
6502 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6504 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6505 a call parameter. Such targets require special care as we haven't yet
6506 marked TARGET so that it's safe from being trashed by libcalls. We
6507 don't want to use TARGET for anything but the final result;
6508 Intermediate values must go elsewhere. Additionally, calls to
6509 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6512 expand_expr (exp, target, tmode, modifier)
6513 tree exp;
6514 rtx target;
6515 enum machine_mode tmode;
6516 enum expand_modifier modifier;
6518 rtx op0, op1, temp;
6519 tree type = TREE_TYPE (exp);
6520 int unsignedp = TREE_UNSIGNED (type);
6521 enum machine_mode mode;
6522 enum tree_code code = TREE_CODE (exp);
6523 optab this_optab;
6524 rtx subtarget, original_target;
6525 int ignore;
6526 tree context;
6528 /* Handle ERROR_MARK before anybody tries to access its type. */
6529 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6531 op0 = CONST0_RTX (tmode);
6532 if (op0 != 0)
6533 return op0;
6534 return const0_rtx;
6537 mode = TYPE_MODE (type);
6538 /* Use subtarget as the target for operand 0 of a binary operation. */
6539 subtarget = get_subtarget (target);
6540 original_target = target;
6541 ignore = (target == const0_rtx
6542 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6543 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6544 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6545 && TREE_CODE (type) == VOID_TYPE));
6547 /* If we are going to ignore this result, we need only do something
6548 if there is a side-effect somewhere in the expression. If there
6549 is, short-circuit the most common cases here. Note that we must
6550 not call expand_expr with anything but const0_rtx in case this
6551 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6553 if (ignore)
6555 if (! TREE_SIDE_EFFECTS (exp))
6556 return const0_rtx;
6558 /* Ensure we reference a volatile object even if value is ignored, but
6559 don't do this if all we are doing is taking its address. */
6560 if (TREE_THIS_VOLATILE (exp)
6561 && TREE_CODE (exp) != FUNCTION_DECL
6562 && mode != VOIDmode && mode != BLKmode
6563 && modifier != EXPAND_CONST_ADDRESS)
6565 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6566 if (GET_CODE (temp) == MEM)
6567 temp = copy_to_reg (temp);
6568 return const0_rtx;
6571 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6572 || code == INDIRECT_REF || code == BUFFER_REF)
6573 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6574 modifier);
6576 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6577 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6579 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6580 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6581 return const0_rtx;
6583 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6584 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6585 /* If the second operand has no side effects, just evaluate
6586 the first. */
6587 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6588 modifier);
6589 else if (code == BIT_FIELD_REF)
6591 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6592 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6593 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6594 return const0_rtx;
6597 target = 0;
6600 #ifdef MAX_INTEGER_COMPUTATION_MODE
6601 /* Only check stuff here if the mode we want is different from the mode
6602 of the expression; if it's the same, check_max_integer_computation_mode
6603 will handle it. Do we really need to check this stuff at all? */
6605 if (target
6606 && GET_MODE (target) != mode
6607 && TREE_CODE (exp) != INTEGER_CST
6608 && TREE_CODE (exp) != PARM_DECL
6609 && TREE_CODE (exp) != ARRAY_REF
6610 && TREE_CODE (exp) != ARRAY_RANGE_REF
6611 && TREE_CODE (exp) != COMPONENT_REF
6612 && TREE_CODE (exp) != BIT_FIELD_REF
6613 && TREE_CODE (exp) != INDIRECT_REF
6614 && TREE_CODE (exp) != CALL_EXPR
6615 && TREE_CODE (exp) != VAR_DECL
6616 && TREE_CODE (exp) != RTL_EXPR)
6618 enum machine_mode mode = GET_MODE (target);
6620 if (GET_MODE_CLASS (mode) == MODE_INT
6621 && mode > MAX_INTEGER_COMPUTATION_MODE)
6622 internal_error ("unsupported wide integer operation");
6625 if (tmode != mode
6626 && TREE_CODE (exp) != INTEGER_CST
6627 && TREE_CODE (exp) != PARM_DECL
6628 && TREE_CODE (exp) != ARRAY_REF
6629 && TREE_CODE (exp) != ARRAY_RANGE_REF
6630 && TREE_CODE (exp) != COMPONENT_REF
6631 && TREE_CODE (exp) != BIT_FIELD_REF
6632 && TREE_CODE (exp) != INDIRECT_REF
6633 && TREE_CODE (exp) != VAR_DECL
6634 && TREE_CODE (exp) != CALL_EXPR
6635 && TREE_CODE (exp) != RTL_EXPR
6636 && GET_MODE_CLASS (tmode) == MODE_INT
6637 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6638 internal_error ("unsupported wide integer operation");
6640 check_max_integer_computation_mode (exp);
6641 #endif
6643 /* If will do cse, generate all results into pseudo registers
6644 since 1) that allows cse to find more things
6645 and 2) otherwise cse could produce an insn the machine
6646 cannot support. An exception is a CONSTRUCTOR into a multi-word
6647 MEM: that's much more likely to be most efficient into the MEM.
6648 Another is a CALL_EXPR which must return in memory. */
6650 if (! cse_not_expected && mode != BLKmode && target
6651 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6652 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6653 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6654 target = 0;
6656 switch (code)
6658 case LABEL_DECL:
6660 tree function = decl_function_context (exp);
6661 /* Handle using a label in a containing function. */
6662 if (function != current_function_decl
6663 && function != inline_function_decl && function != 0)
6665 struct function *p = find_function_data (function);
6666 p->expr->x_forced_labels
6667 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6668 p->expr->x_forced_labels);
6670 else
6672 if (modifier == EXPAND_INITIALIZER)
6673 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6674 label_rtx (exp),
6675 forced_labels);
6678 temp = gen_rtx_MEM (FUNCTION_MODE,
6679 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6680 if (function != current_function_decl
6681 && function != inline_function_decl && function != 0)
6682 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6683 return temp;
6686 case PARM_DECL:
6687 if (!DECL_RTL_SET_P (exp))
6689 error_with_decl (exp, "prior parameter's size depends on `%s'");
6690 return CONST0_RTX (mode);
6693 /* ... fall through ... */
6695 case VAR_DECL:
6696 /* If a static var's type was incomplete when the decl was written,
6697 but the type is complete now, lay out the decl now. */
6698 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6699 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6701 rtx value = DECL_RTL_IF_SET (exp);
6703 layout_decl (exp, 0);
6705 /* If the RTL was already set, update its mode and memory
6706 attributes. */
6707 if (value != 0)
6709 PUT_MODE (value, DECL_MODE (exp));
6710 SET_DECL_RTL (exp, 0);
6711 set_mem_attributes (value, exp, 1);
6712 SET_DECL_RTL (exp, value);
6716 /* ... fall through ... */
6718 case FUNCTION_DECL:
6719 case RESULT_DECL:
6720 if (DECL_RTL (exp) == 0)
6721 abort ();
6723 /* Ensure variable marked as used even if it doesn't go through
6724 a parser. If it hasn't be used yet, write out an external
6725 definition. */
6726 if (! TREE_USED (exp))
6728 assemble_external (exp);
6729 TREE_USED (exp) = 1;
6732 /* Show we haven't gotten RTL for this yet. */
6733 temp = 0;
6735 /* Handle variables inherited from containing functions. */
6736 context = decl_function_context (exp);
6738 /* We treat inline_function_decl as an alias for the current function
6739 because that is the inline function whose vars, types, etc.
6740 are being merged into the current function.
6741 See expand_inline_function. */
6743 if (context != 0 && context != current_function_decl
6744 && context != inline_function_decl
6745 /* If var is static, we don't need a static chain to access it. */
6746 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6747 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6749 rtx addr;
6751 /* Mark as non-local and addressable. */
6752 DECL_NONLOCAL (exp) = 1;
6753 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6754 abort ();
6755 (*lang_hooks.mark_addressable) (exp);
6756 if (GET_CODE (DECL_RTL (exp)) != MEM)
6757 abort ();
6758 addr = XEXP (DECL_RTL (exp), 0);
6759 if (GET_CODE (addr) == MEM)
6760 addr
6761 = replace_equiv_address (addr,
6762 fix_lexical_addr (XEXP (addr, 0), exp));
6763 else
6764 addr = fix_lexical_addr (addr, exp);
6766 temp = replace_equiv_address (DECL_RTL (exp), addr);
6769 /* This is the case of an array whose size is to be determined
6770 from its initializer, while the initializer is still being parsed.
6771 See expand_decl. */
6773 else if (GET_CODE (DECL_RTL (exp)) == MEM
6774 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6775 temp = validize_mem (DECL_RTL (exp));
6777 /* If DECL_RTL is memory, we are in the normal case and either
6778 the address is not valid or it is not a register and -fforce-addr
6779 is specified, get the address into a register. */
6781 else if (GET_CODE (DECL_RTL (exp)) == MEM
6782 && modifier != EXPAND_CONST_ADDRESS
6783 && modifier != EXPAND_SUM
6784 && modifier != EXPAND_INITIALIZER
6785 && (! memory_address_p (DECL_MODE (exp),
6786 XEXP (DECL_RTL (exp), 0))
6787 || (flag_force_addr
6788 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6789 temp = replace_equiv_address (DECL_RTL (exp),
6790 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6792 /* If we got something, return it. But first, set the alignment
6793 if the address is a register. */
6794 if (temp != 0)
6796 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6797 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6799 return temp;
6802 /* If the mode of DECL_RTL does not match that of the decl, it
6803 must be a promoted value. We return a SUBREG of the wanted mode,
6804 but mark it so that we know that it was already extended. */
6806 if (GET_CODE (DECL_RTL (exp)) == REG
6807 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6809 /* Get the signedness used for this variable. Ensure we get the
6810 same mode we got when the variable was declared. */
6811 if (GET_MODE (DECL_RTL (exp))
6812 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6813 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6814 abort ();
6816 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6817 SUBREG_PROMOTED_VAR_P (temp) = 1;
6818 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6819 return temp;
6822 return DECL_RTL (exp);
6824 case INTEGER_CST:
6825 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6826 TREE_INT_CST_HIGH (exp), mode);
6828 /* ??? If overflow is set, fold will have done an incomplete job,
6829 which can result in (plus xx (const_int 0)), which can get
6830 simplified by validate_replace_rtx during virtual register
6831 instantiation, which can result in unrecognizable insns.
6832 Avoid this by forcing all overflows into registers. */
6833 if (TREE_CONSTANT_OVERFLOW (exp)
6834 && modifier != EXPAND_INITIALIZER)
6835 temp = force_reg (mode, temp);
6837 return temp;
6839 case CONST_DECL:
6840 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6842 case REAL_CST:
6843 /* If optimized, generate immediate CONST_DOUBLE
6844 which will be turned into memory by reload if necessary.
6846 We used to force a register so that loop.c could see it. But
6847 this does not allow gen_* patterns to perform optimizations with
6848 the constants. It also produces two insns in cases like "x = 1.0;".
6849 On most machines, floating-point constants are not permitted in
6850 many insns, so we'd end up copying it to a register in any case.
6852 Now, we do the copying in expand_binop, if appropriate. */
6853 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6854 TYPE_MODE (TREE_TYPE (exp)));
6856 case COMPLEX_CST:
6857 case STRING_CST:
6858 if (! TREE_CST_RTL (exp))
6859 output_constant_def (exp, 1);
6861 /* TREE_CST_RTL probably contains a constant address.
6862 On RISC machines where a constant address isn't valid,
6863 make some insns to get that address into a register. */
6864 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6865 && modifier != EXPAND_CONST_ADDRESS
6866 && modifier != EXPAND_INITIALIZER
6867 && modifier != EXPAND_SUM
6868 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6869 || (flag_force_addr
6870 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6871 return replace_equiv_address (TREE_CST_RTL (exp),
6872 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6873 return TREE_CST_RTL (exp);
6875 case EXPR_WITH_FILE_LOCATION:
6877 rtx to_return;
6878 const char *saved_input_filename = input_filename;
6879 int saved_lineno = lineno;
6880 input_filename = EXPR_WFL_FILENAME (exp);
6881 lineno = EXPR_WFL_LINENO (exp);
6882 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6883 emit_line_note (input_filename, lineno);
6884 /* Possibly avoid switching back and forth here. */
6885 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6886 input_filename = saved_input_filename;
6887 lineno = saved_lineno;
6888 return to_return;
6891 case SAVE_EXPR:
6892 context = decl_function_context (exp);
6894 /* If this SAVE_EXPR was at global context, assume we are an
6895 initialization function and move it into our context. */
6896 if (context == 0)
6897 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6899 /* We treat inline_function_decl as an alias for the current function
6900 because that is the inline function whose vars, types, etc.
6901 are being merged into the current function.
6902 See expand_inline_function. */
6903 if (context == current_function_decl || context == inline_function_decl)
6904 context = 0;
6906 /* If this is non-local, handle it. */
6907 if (context)
6909 /* The following call just exists to abort if the context is
6910 not of a containing function. */
6911 find_function_data (context);
6913 temp = SAVE_EXPR_RTL (exp);
6914 if (temp && GET_CODE (temp) == REG)
6916 put_var_into_stack (exp);
6917 temp = SAVE_EXPR_RTL (exp);
6919 if (temp == 0 || GET_CODE (temp) != MEM)
6920 abort ();
6921 return
6922 replace_equiv_address (temp,
6923 fix_lexical_addr (XEXP (temp, 0), exp));
6925 if (SAVE_EXPR_RTL (exp) == 0)
6927 if (mode == VOIDmode)
6928 temp = const0_rtx;
6929 else
6930 temp = assign_temp (build_qualified_type (type,
6931 (TYPE_QUALS (type)
6932 | TYPE_QUAL_CONST)),
6933 3, 0, 0);
6935 SAVE_EXPR_RTL (exp) = temp;
6936 if (!optimize && GET_CODE (temp) == REG)
6937 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6938 save_expr_regs);
6940 /* If the mode of TEMP does not match that of the expression, it
6941 must be a promoted value. We pass store_expr a SUBREG of the
6942 wanted mode but mark it so that we know that it was already
6943 extended. */
6945 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6947 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6948 promote_mode (type, mode, &unsignedp, 0);
6949 SUBREG_PROMOTED_VAR_P (temp) = 1;
6950 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6953 if (temp == const0_rtx)
6954 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6955 else
6956 store_expr (TREE_OPERAND (exp, 0), temp,
6957 modifier == EXPAND_STACK_PARM ? 2 : 0);
6959 TREE_USED (exp) = 1;
6962 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6963 must be a promoted value. We return a SUBREG of the wanted mode,
6964 but mark it so that we know that it was already extended. */
6966 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6967 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6969 /* Compute the signedness and make the proper SUBREG. */
6970 promote_mode (type, mode, &unsignedp, 0);
6971 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6972 SUBREG_PROMOTED_VAR_P (temp) = 1;
6973 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6974 return temp;
6977 return SAVE_EXPR_RTL (exp);
6979 case UNSAVE_EXPR:
6981 rtx temp;
6982 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6983 TREE_OPERAND (exp, 0)
6984 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6985 return temp;
6988 case PLACEHOLDER_EXPR:
6990 tree old_list = placeholder_list;
6991 tree placeholder_expr = 0;
6993 exp = find_placeholder (exp, &placeholder_expr);
6994 if (exp == 0)
6995 abort ();
6997 placeholder_list = TREE_CHAIN (placeholder_expr);
6998 temp = expand_expr (exp, original_target, tmode, modifier);
6999 placeholder_list = old_list;
7000 return temp;
7003 case WITH_RECORD_EXPR:
7004 /* Put the object on the placeholder list, expand our first operand,
7005 and pop the list. */
7006 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7007 placeholder_list);
7008 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7009 modifier);
7010 placeholder_list = TREE_CHAIN (placeholder_list);
7011 return target;
7013 case GOTO_EXPR:
7014 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7015 expand_goto (TREE_OPERAND (exp, 0));
7016 else
7017 expand_computed_goto (TREE_OPERAND (exp, 0));
7018 return const0_rtx;
7020 case EXIT_EXPR:
7021 expand_exit_loop_if_false (NULL,
7022 invert_truthvalue (TREE_OPERAND (exp, 0)));
7023 return const0_rtx;
7025 case LABELED_BLOCK_EXPR:
7026 if (LABELED_BLOCK_BODY (exp))
7027 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7028 /* Should perhaps use expand_label, but this is simpler and safer. */
7029 do_pending_stack_adjust ();
7030 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7031 return const0_rtx;
7033 case EXIT_BLOCK_EXPR:
7034 if (EXIT_BLOCK_RETURN (exp))
7035 sorry ("returned value in block_exit_expr");
7036 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7037 return const0_rtx;
7039 case LOOP_EXPR:
7040 push_temp_slots ();
7041 expand_start_loop (1);
7042 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7043 expand_end_loop ();
7044 pop_temp_slots ();
7046 return const0_rtx;
7048 case BIND_EXPR:
7050 tree vars = TREE_OPERAND (exp, 0);
7052 /* Need to open a binding contour here because
7053 if there are any cleanups they must be contained here. */
7054 expand_start_bindings (2);
7056 /* Mark the corresponding BLOCK for output in its proper place. */
7057 if (TREE_OPERAND (exp, 2) != 0
7058 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7059 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7061 /* If VARS have not yet been expanded, expand them now. */
7062 while (vars)
7064 if (!DECL_RTL_SET_P (vars))
7065 expand_decl (vars);
7066 expand_decl_init (vars);
7067 vars = TREE_CHAIN (vars);
7070 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7072 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7074 return temp;
7077 case RTL_EXPR:
7078 if (RTL_EXPR_SEQUENCE (exp))
7080 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7081 abort ();
7082 emit_insn (RTL_EXPR_SEQUENCE (exp));
7083 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7085 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7086 free_temps_for_rtl_expr (exp);
7087 return RTL_EXPR_RTL (exp);
7089 case CONSTRUCTOR:
7090 /* If we don't need the result, just ensure we evaluate any
7091 subexpressions. */
7092 if (ignore)
7094 tree elt;
7096 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7097 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7099 return const0_rtx;
7102 /* All elts simple constants => refer to a constant in memory. But
7103 if this is a non-BLKmode mode, let it store a field at a time
7104 since that should make a CONST_INT or CONST_DOUBLE when we
7105 fold. Likewise, if we have a target we can use, it is best to
7106 store directly into the target unless the type is large enough
7107 that memcpy will be used. If we are making an initializer and
7108 all operands are constant, put it in memory as well.
7110 FIXME: Avoid trying to fill vector constructors piece-meal.
7111 Output them with output_constant_def below unless we're sure
7112 they're zeros. This should go away when vector initializers
7113 are treated like VECTOR_CST instead of arrays.
7115 else if ((TREE_STATIC (exp)
7116 && ((mode == BLKmode
7117 && ! (target != 0 && safe_from_p (target, exp, 1)))
7118 || TREE_ADDRESSABLE (exp)
7119 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7120 && (! MOVE_BY_PIECES_P
7121 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7122 TYPE_ALIGN (type)))
7123 && ((TREE_CODE (type) == VECTOR_TYPE
7124 && !is_zeros_p (exp))
7125 || ! mostly_zeros_p (exp)))))
7126 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7128 rtx constructor = output_constant_def (exp, 1);
7130 if (modifier != EXPAND_CONST_ADDRESS
7131 && modifier != EXPAND_INITIALIZER
7132 && modifier != EXPAND_SUM)
7133 constructor = validize_mem (constructor);
7135 return constructor;
7137 else
7139 /* Handle calls that pass values in multiple non-contiguous
7140 locations. The Irix 6 ABI has examples of this. */
7141 if (target == 0 || ! safe_from_p (target, exp, 1)
7142 || GET_CODE (target) == PARALLEL
7143 || modifier == EXPAND_STACK_PARM)
7144 target
7145 = assign_temp (build_qualified_type (type,
7146 (TYPE_QUALS (type)
7147 | (TREE_READONLY (exp)
7148 * TYPE_QUAL_CONST))),
7149 0, TREE_ADDRESSABLE (exp), 1);
7151 store_constructor (exp, target, 0, int_expr_size (exp));
7152 return target;
7155 case INDIRECT_REF:
7157 tree exp1 = TREE_OPERAND (exp, 0);
7158 tree index;
7159 tree string = string_constant (exp1, &index);
7161 /* Try to optimize reads from const strings. */
7162 if (string
7163 && TREE_CODE (string) == STRING_CST
7164 && TREE_CODE (index) == INTEGER_CST
7165 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7166 && GET_MODE_CLASS (mode) == MODE_INT
7167 && GET_MODE_SIZE (mode) == 1
7168 && modifier != EXPAND_WRITE)
7169 return gen_int_mode (TREE_STRING_POINTER (string)
7170 [TREE_INT_CST_LOW (index)], mode);
7172 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7173 op0 = memory_address (mode, op0);
7174 temp = gen_rtx_MEM (mode, op0);
7175 set_mem_attributes (temp, exp, 0);
7177 /* If we are writing to this object and its type is a record with
7178 readonly fields, we must mark it as readonly so it will
7179 conflict with readonly references to those fields. */
7180 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7181 RTX_UNCHANGING_P (temp) = 1;
7183 return temp;
7186 case ARRAY_REF:
7187 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7188 abort ();
7191 tree array = TREE_OPERAND (exp, 0);
7192 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7193 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7194 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7195 HOST_WIDE_INT i;
7197 /* Optimize the special-case of a zero lower bound.
7199 We convert the low_bound to sizetype to avoid some problems
7200 with constant folding. (E.g. suppose the lower bound is 1,
7201 and its mode is QI. Without the conversion, (ARRAY
7202 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7203 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7205 if (! integer_zerop (low_bound))
7206 index = size_diffop (index, convert (sizetype, low_bound));
7208 /* Fold an expression like: "foo"[2].
7209 This is not done in fold so it won't happen inside &.
7210 Don't fold if this is for wide characters since it's too
7211 difficult to do correctly and this is a very rare case. */
7213 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7214 && TREE_CODE (array) == STRING_CST
7215 && TREE_CODE (index) == INTEGER_CST
7216 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7217 && GET_MODE_CLASS (mode) == MODE_INT
7218 && GET_MODE_SIZE (mode) == 1)
7219 return gen_int_mode (TREE_STRING_POINTER (array)
7220 [TREE_INT_CST_LOW (index)], mode);
7222 /* If this is a constant index into a constant array,
7223 just get the value from the array. Handle both the cases when
7224 we have an explicit constructor and when our operand is a variable
7225 that was declared const. */
7227 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7228 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7229 && TREE_CODE (index) == INTEGER_CST
7230 && 0 > compare_tree_int (index,
7231 list_length (CONSTRUCTOR_ELTS
7232 (TREE_OPERAND (exp, 0)))))
7234 tree elem;
7236 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7237 i = TREE_INT_CST_LOW (index);
7238 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7241 if (elem)
7242 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7243 modifier);
7246 else if (optimize >= 1
7247 && modifier != EXPAND_CONST_ADDRESS
7248 && modifier != EXPAND_INITIALIZER
7249 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7250 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7251 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7253 if (TREE_CODE (index) == INTEGER_CST)
7255 tree init = DECL_INITIAL (array);
7257 if (TREE_CODE (init) == CONSTRUCTOR)
7259 tree elem;
7261 for (elem = CONSTRUCTOR_ELTS (init);
7262 (elem
7263 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7264 elem = TREE_CHAIN (elem))
7267 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7268 return expand_expr (fold (TREE_VALUE (elem)), target,
7269 tmode, modifier);
7271 else if (TREE_CODE (init) == STRING_CST
7272 && 0 > compare_tree_int (index,
7273 TREE_STRING_LENGTH (init)))
7275 tree type = TREE_TYPE (TREE_TYPE (init));
7276 enum machine_mode mode = TYPE_MODE (type);
7278 if (GET_MODE_CLASS (mode) == MODE_INT
7279 && GET_MODE_SIZE (mode) == 1)
7280 return gen_int_mode (TREE_STRING_POINTER (init)
7281 [TREE_INT_CST_LOW (index)], mode);
7286 /* Fall through. */
7288 case COMPONENT_REF:
7289 case BIT_FIELD_REF:
7290 case ARRAY_RANGE_REF:
7291 /* If the operand is a CONSTRUCTOR, we can just extract the
7292 appropriate field if it is present. Don't do this if we have
7293 already written the data since we want to refer to that copy
7294 and varasm.c assumes that's what we'll do. */
7295 if (code == COMPONENT_REF
7296 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7297 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7299 tree elt;
7301 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7302 elt = TREE_CHAIN (elt))
7303 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7304 /* We can normally use the value of the field in the
7305 CONSTRUCTOR. However, if this is a bitfield in
7306 an integral mode that we can fit in a HOST_WIDE_INT,
7307 we must mask only the number of bits in the bitfield,
7308 since this is done implicitly by the constructor. If
7309 the bitfield does not meet either of those conditions,
7310 we can't do this optimization. */
7311 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7312 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7313 == MODE_INT)
7314 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7315 <= HOST_BITS_PER_WIDE_INT))))
7317 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7318 && modifier == EXPAND_STACK_PARM)
7319 target = 0;
7320 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7321 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7323 HOST_WIDE_INT bitsize
7324 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7325 enum machine_mode imode
7326 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7328 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7330 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7331 op0 = expand_and (imode, op0, op1, target);
7333 else
7335 tree count
7336 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7339 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7340 target, 0);
7341 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7342 target, 0);
7346 return op0;
7351 enum machine_mode mode1;
7352 HOST_WIDE_INT bitsize, bitpos;
7353 tree offset;
7354 int volatilep = 0;
7355 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7356 &mode1, &unsignedp, &volatilep);
7357 rtx orig_op0;
7359 /* If we got back the original object, something is wrong. Perhaps
7360 we are evaluating an expression too early. In any event, don't
7361 infinitely recurse. */
7362 if (tem == exp)
7363 abort ();
7365 /* If TEM's type is a union of variable size, pass TARGET to the inner
7366 computation, since it will need a temporary and TARGET is known
7367 to have to do. This occurs in unchecked conversion in Ada. */
7369 orig_op0 = op0
7370 = expand_expr (tem,
7371 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7372 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7373 != INTEGER_CST)
7374 && modifier != EXPAND_STACK_PARM
7375 ? target : NULL_RTX),
7376 VOIDmode,
7377 (modifier == EXPAND_INITIALIZER
7378 || modifier == EXPAND_CONST_ADDRESS
7379 || modifier == EXPAND_STACK_PARM)
7380 ? modifier : EXPAND_NORMAL);
7382 /* If this is a constant, put it into a register if it is a
7383 legitimate constant and OFFSET is 0 and memory if it isn't. */
7384 if (CONSTANT_P (op0))
7386 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7387 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7388 && offset == 0)
7389 op0 = force_reg (mode, op0);
7390 else
7391 op0 = validize_mem (force_const_mem (mode, op0));
7394 if (offset != 0)
7396 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7397 EXPAND_SUM);
7399 /* If this object is in a register, put it into memory.
7400 This case can't occur in C, but can in Ada if we have
7401 unchecked conversion of an expression from a scalar type to
7402 an array or record type. */
7403 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7404 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7406 /* If the operand is a SAVE_EXPR, we can deal with this by
7407 forcing the SAVE_EXPR into memory. */
7408 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7410 put_var_into_stack (TREE_OPERAND (exp, 0));
7411 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7413 else
7415 tree nt
7416 = build_qualified_type (TREE_TYPE (tem),
7417 (TYPE_QUALS (TREE_TYPE (tem))
7418 | TYPE_QUAL_CONST));
7419 rtx memloc = assign_temp (nt, 1, 1, 1);
7421 emit_move_insn (memloc, op0);
7422 op0 = memloc;
7426 if (GET_CODE (op0) != MEM)
7427 abort ();
7429 #ifdef POINTERS_EXTEND_UNSIGNED
7430 if (GET_MODE (offset_rtx) != Pmode)
7431 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7432 #else
7433 if (GET_MODE (offset_rtx) != ptr_mode)
7434 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7435 #endif
7437 /* A constant address in OP0 can have VOIDmode, we must not try
7438 to call force_reg for that case. Avoid that case. */
7439 if (GET_CODE (op0) == MEM
7440 && GET_MODE (op0) == BLKmode
7441 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7442 && bitsize != 0
7443 && (bitpos % bitsize) == 0
7444 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7445 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7447 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7448 bitpos = 0;
7451 op0 = offset_address (op0, offset_rtx,
7452 highest_pow2_factor (offset));
7455 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7456 record its alignment as BIGGEST_ALIGNMENT. */
7457 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7458 && is_aligning_offset (offset, tem))
7459 set_mem_align (op0, BIGGEST_ALIGNMENT);
7461 /* Don't forget about volatility even if this is a bitfield. */
7462 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7464 if (op0 == orig_op0)
7465 op0 = copy_rtx (op0);
7467 MEM_VOLATILE_P (op0) = 1;
7470 /* The following code doesn't handle CONCAT.
7471 Assume only bitpos == 0 can be used for CONCAT, due to
7472 one element arrays having the same mode as its element. */
7473 if (GET_CODE (op0) == CONCAT)
7475 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7476 abort ();
7477 return op0;
7480 /* In cases where an aligned union has an unaligned object
7481 as a field, we might be extracting a BLKmode value from
7482 an integer-mode (e.g., SImode) object. Handle this case
7483 by doing the extract into an object as wide as the field
7484 (which we know to be the width of a basic mode), then
7485 storing into memory, and changing the mode to BLKmode. */
7486 if (mode1 == VOIDmode
7487 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7488 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7489 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7490 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7491 && modifier != EXPAND_CONST_ADDRESS
7492 && modifier != EXPAND_INITIALIZER)
7493 /* If the field isn't aligned enough to fetch as a memref,
7494 fetch it as a bit field. */
7495 || (mode1 != BLKmode
7496 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7497 && ((TYPE_ALIGN (TREE_TYPE (tem))
7498 < GET_MODE_ALIGNMENT (mode))
7499 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7500 /* If the type and the field are a constant size and the
7501 size of the type isn't the same size as the bitfield,
7502 we must use bitfield operations. */
7503 || (bitsize >= 0
7504 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7505 == INTEGER_CST)
7506 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7507 bitsize)))
7509 enum machine_mode ext_mode = mode;
7511 if (ext_mode == BLKmode
7512 && ! (target != 0 && GET_CODE (op0) == MEM
7513 && GET_CODE (target) == MEM
7514 && bitpos % BITS_PER_UNIT == 0))
7515 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7517 if (ext_mode == BLKmode)
7519 /* In this case, BITPOS must start at a byte boundary and
7520 TARGET, if specified, must be a MEM. */
7521 if (GET_CODE (op0) != MEM
7522 || (target != 0 && GET_CODE (target) != MEM)
7523 || bitpos % BITS_PER_UNIT != 0)
7524 abort ();
7526 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7527 if (target == 0)
7528 target = assign_temp (type, 0, 1, 1);
7530 emit_block_move (target, op0,
7531 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7532 / BITS_PER_UNIT),
7533 (modifier == EXPAND_STACK_PARM
7534 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7536 return target;
7539 op0 = validize_mem (op0);
7541 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7542 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7544 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7545 (modifier == EXPAND_STACK_PARM
7546 ? NULL_RTX : target),
7547 ext_mode, ext_mode,
7548 int_size_in_bytes (TREE_TYPE (tem)));
7550 /* If the result is a record type and BITSIZE is narrower than
7551 the mode of OP0, an integral mode, and this is a big endian
7552 machine, we must put the field into the high-order bits. */
7553 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7554 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7555 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7556 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7557 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7558 - bitsize),
7559 op0, 1);
7561 if (mode == BLKmode)
7563 rtx new = assign_temp (build_qualified_type
7564 ((*lang_hooks.types.type_for_mode)
7565 (ext_mode, 0),
7566 TYPE_QUAL_CONST), 0, 1, 1);
7568 emit_move_insn (new, op0);
7569 op0 = copy_rtx (new);
7570 PUT_MODE (op0, BLKmode);
7571 set_mem_attributes (op0, exp, 1);
7574 return op0;
7577 /* If the result is BLKmode, use that to access the object
7578 now as well. */
7579 if (mode == BLKmode)
7580 mode1 = BLKmode;
7582 /* Get a reference to just this component. */
7583 if (modifier == EXPAND_CONST_ADDRESS
7584 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7585 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7586 else
7587 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7589 if (op0 == orig_op0)
7590 op0 = copy_rtx (op0);
7592 set_mem_attributes (op0, exp, 0);
7593 if (GET_CODE (XEXP (op0, 0)) == REG)
7594 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7596 MEM_VOLATILE_P (op0) |= volatilep;
7597 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7598 || modifier == EXPAND_CONST_ADDRESS
7599 || modifier == EXPAND_INITIALIZER)
7600 return op0;
7601 else if (target == 0)
7602 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7604 convert_move (target, op0, unsignedp);
7605 return target;
7608 case VTABLE_REF:
7610 rtx insn, before = get_last_insn (), vtbl_ref;
7612 /* Evaluate the interior expression. */
7613 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7614 tmode, modifier);
7616 /* Get or create an instruction off which to hang a note. */
7617 if (REG_P (subtarget))
7619 target = subtarget;
7620 insn = get_last_insn ();
7621 if (insn == before)
7622 abort ();
7623 if (! INSN_P (insn))
7624 insn = prev_nonnote_insn (insn);
7626 else
7628 target = gen_reg_rtx (GET_MODE (subtarget));
7629 insn = emit_move_insn (target, subtarget);
7632 /* Collect the data for the note. */
7633 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7634 vtbl_ref = plus_constant (vtbl_ref,
7635 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7636 /* Discard the initial CONST that was added. */
7637 vtbl_ref = XEXP (vtbl_ref, 0);
7639 REG_NOTES (insn)
7640 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7642 return target;
7645 /* Intended for a reference to a buffer of a file-object in Pascal.
7646 But it's not certain that a special tree code will really be
7647 necessary for these. INDIRECT_REF might work for them. */
7648 case BUFFER_REF:
7649 abort ();
7651 case IN_EXPR:
7653 /* Pascal set IN expression.
7655 Algorithm:
7656 rlo = set_low - (set_low%bits_per_word);
7657 the_word = set [ (index - rlo)/bits_per_word ];
7658 bit_index = index % bits_per_word;
7659 bitmask = 1 << bit_index;
7660 return !!(the_word & bitmask); */
7662 tree set = TREE_OPERAND (exp, 0);
7663 tree index = TREE_OPERAND (exp, 1);
7664 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7665 tree set_type = TREE_TYPE (set);
7666 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7667 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7668 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7669 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7670 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7671 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7672 rtx setaddr = XEXP (setval, 0);
7673 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7674 rtx rlow;
7675 rtx diff, quo, rem, addr, bit, result;
7677 /* If domain is empty, answer is no. Likewise if index is constant
7678 and out of bounds. */
7679 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7680 && TREE_CODE (set_low_bound) == INTEGER_CST
7681 && tree_int_cst_lt (set_high_bound, set_low_bound))
7682 || (TREE_CODE (index) == INTEGER_CST
7683 && TREE_CODE (set_low_bound) == INTEGER_CST
7684 && tree_int_cst_lt (index, set_low_bound))
7685 || (TREE_CODE (set_high_bound) == INTEGER_CST
7686 && TREE_CODE (index) == INTEGER_CST
7687 && tree_int_cst_lt (set_high_bound, index))))
7688 return const0_rtx;
7690 if (target == 0)
7691 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7693 /* If we get here, we have to generate the code for both cases
7694 (in range and out of range). */
7696 op0 = gen_label_rtx ();
7697 op1 = gen_label_rtx ();
7699 if (! (GET_CODE (index_val) == CONST_INT
7700 && GET_CODE (lo_r) == CONST_INT))
7701 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7702 GET_MODE (index_val), iunsignedp, op1);
7704 if (! (GET_CODE (index_val) == CONST_INT
7705 && GET_CODE (hi_r) == CONST_INT))
7706 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7707 GET_MODE (index_val), iunsignedp, op1);
7709 /* Calculate the element number of bit zero in the first word
7710 of the set. */
7711 if (GET_CODE (lo_r) == CONST_INT)
7712 rlow = GEN_INT (INTVAL (lo_r)
7713 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7714 else
7715 rlow = expand_binop (index_mode, and_optab, lo_r,
7716 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7717 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7719 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7720 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7722 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7723 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7724 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7725 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7727 addr = memory_address (byte_mode,
7728 expand_binop (index_mode, add_optab, diff,
7729 setaddr, NULL_RTX, iunsignedp,
7730 OPTAB_LIB_WIDEN));
7732 /* Extract the bit we want to examine. */
7733 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7734 gen_rtx_MEM (byte_mode, addr),
7735 make_tree (TREE_TYPE (index), rem),
7736 NULL_RTX, 1);
7737 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7738 GET_MODE (target) == byte_mode ? target : 0,
7739 1, OPTAB_LIB_WIDEN);
7741 if (result != target)
7742 convert_move (target, result, 1);
7744 /* Output the code to handle the out-of-range case. */
7745 emit_jump (op0);
7746 emit_label (op1);
7747 emit_move_insn (target, const0_rtx);
7748 emit_label (op0);
7749 return target;
7752 case WITH_CLEANUP_EXPR:
7753 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7755 WITH_CLEANUP_EXPR_RTL (exp)
7756 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7757 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7758 CLEANUP_EH_ONLY (exp));
7760 /* That's it for this cleanup. */
7761 TREE_OPERAND (exp, 1) = 0;
7763 return WITH_CLEANUP_EXPR_RTL (exp);
7765 case CLEANUP_POINT_EXPR:
7767 /* Start a new binding layer that will keep track of all cleanup
7768 actions to be performed. */
7769 expand_start_bindings (2);
7771 target_temp_slot_level = temp_slot_level;
7773 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7774 /* If we're going to use this value, load it up now. */
7775 if (! ignore)
7776 op0 = force_not_mem (op0);
7777 preserve_temp_slots (op0);
7778 expand_end_bindings (NULL_TREE, 0, 0);
7780 return op0;
7782 case CALL_EXPR:
7783 /* Check for a built-in function. */
7784 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7785 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7786 == FUNCTION_DECL)
7787 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7789 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7790 == BUILT_IN_FRONTEND)
7791 return (*lang_hooks.expand_expr) (exp, original_target,
7792 tmode, modifier);
7793 else
7794 return expand_builtin (exp, target, subtarget, tmode, ignore);
7797 return expand_call (exp, target, ignore);
7799 case NON_LVALUE_EXPR:
7800 case NOP_EXPR:
7801 case CONVERT_EXPR:
7802 case REFERENCE_EXPR:
7803 if (TREE_OPERAND (exp, 0) == error_mark_node)
7804 return const0_rtx;
7806 if (TREE_CODE (type) == UNION_TYPE)
7808 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7810 /* If both input and output are BLKmode, this conversion isn't doing
7811 anything except possibly changing memory attribute. */
7812 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7814 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7815 modifier);
7817 result = copy_rtx (result);
7818 set_mem_attributes (result, exp, 0);
7819 return result;
7822 if (target == 0)
7823 target = assign_temp (type, 0, 1, 1);
7825 if (GET_CODE (target) == MEM)
7826 /* Store data into beginning of memory target. */
7827 store_expr (TREE_OPERAND (exp, 0),
7828 adjust_address (target, TYPE_MODE (valtype), 0),
7829 modifier == EXPAND_STACK_PARM ? 2 : 0);
7831 else if (GET_CODE (target) == REG)
7832 /* Store this field into a union of the proper type. */
7833 store_field (target,
7834 MIN ((int_size_in_bytes (TREE_TYPE
7835 (TREE_OPERAND (exp, 0)))
7836 * BITS_PER_UNIT),
7837 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7838 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7839 VOIDmode, 0, type, 0);
7840 else
7841 abort ();
7843 /* Return the entire union. */
7844 return target;
7847 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7849 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7850 modifier);
7852 /* If the signedness of the conversion differs and OP0 is
7853 a promoted SUBREG, clear that indication since we now
7854 have to do the proper extension. */
7855 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7856 && GET_CODE (op0) == SUBREG)
7857 SUBREG_PROMOTED_VAR_P (op0) = 0;
7859 return op0;
7862 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7863 if (GET_MODE (op0) == mode)
7864 return op0;
7866 /* If OP0 is a constant, just convert it into the proper mode. */
7867 if (CONSTANT_P (op0))
7869 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7870 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7872 if (modifier == EXPAND_INITIALIZER)
7873 return simplify_gen_subreg (mode, op0, inner_mode,
7874 subreg_lowpart_offset (mode,
7875 inner_mode));
7876 else
7877 return convert_modes (mode, inner_mode, op0,
7878 TREE_UNSIGNED (inner_type));
7881 if (modifier == EXPAND_INITIALIZER)
7882 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7884 if (target == 0)
7885 return
7886 convert_to_mode (mode, op0,
7887 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7888 else
7889 convert_move (target, op0,
7890 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7891 return target;
7893 case VIEW_CONVERT_EXPR:
7894 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7896 /* If the input and output modes are both the same, we are done.
7897 Otherwise, if neither mode is BLKmode and both are within a word, we
7898 can use gen_lowpart. If neither is true, make sure the operand is
7899 in memory and convert the MEM to the new mode. */
7900 if (TYPE_MODE (type) == GET_MODE (op0))
7902 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7903 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7904 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7905 op0 = gen_lowpart (TYPE_MODE (type), op0);
7906 else if (GET_CODE (op0) != MEM)
7908 /* If the operand is not a MEM, force it into memory. Since we
7909 are going to be be changing the mode of the MEM, don't call
7910 force_const_mem for constants because we don't allow pool
7911 constants to change mode. */
7912 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7914 if (TREE_ADDRESSABLE (exp))
7915 abort ();
7917 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7918 target
7919 = assign_stack_temp_for_type
7920 (TYPE_MODE (inner_type),
7921 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7923 emit_move_insn (target, op0);
7924 op0 = target;
7927 /* At this point, OP0 is in the correct mode. If the output type is such
7928 that the operand is known to be aligned, indicate that it is.
7929 Otherwise, we need only be concerned about alignment for non-BLKmode
7930 results. */
7931 if (GET_CODE (op0) == MEM)
7933 op0 = copy_rtx (op0);
7935 if (TYPE_ALIGN_OK (type))
7936 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7937 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7938 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7940 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7941 HOST_WIDE_INT temp_size
7942 = MAX (int_size_in_bytes (inner_type),
7943 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7944 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7945 temp_size, 0, type);
7946 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7948 if (TREE_ADDRESSABLE (exp))
7949 abort ();
7951 if (GET_MODE (op0) == BLKmode)
7952 emit_block_move (new_with_op0_mode, op0,
7953 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7954 (modifier == EXPAND_STACK_PARM
7955 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7956 else
7957 emit_move_insn (new_with_op0_mode, op0);
7959 op0 = new;
7962 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7965 return op0;
7967 case PLUS_EXPR:
7968 this_optab = ! unsignedp && flag_trapv
7969 && (GET_MODE_CLASS (mode) == MODE_INT)
7970 ? addv_optab : add_optab;
7972 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7973 something else, make sure we add the register to the constant and
7974 then to the other thing. This case can occur during strength
7975 reduction and doing it this way will produce better code if the
7976 frame pointer or argument pointer is eliminated.
7978 fold-const.c will ensure that the constant is always in the inner
7979 PLUS_EXPR, so the only case we need to do anything about is if
7980 sp, ap, or fp is our second argument, in which case we must swap
7981 the innermost first argument and our second argument. */
7983 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7984 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7985 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7986 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7987 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7988 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7990 tree t = TREE_OPERAND (exp, 1);
7992 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7993 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7996 /* If the result is to be ptr_mode and we are adding an integer to
7997 something, we might be forming a constant. So try to use
7998 plus_constant. If it produces a sum and we can't accept it,
7999 use force_operand. This allows P = &ARR[const] to generate
8000 efficient code on machines where a SYMBOL_REF is not a valid
8001 address.
8003 If this is an EXPAND_SUM call, always return the sum. */
8004 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8005 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8007 if (modifier == EXPAND_STACK_PARM)
8008 target = 0;
8009 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8010 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8011 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8013 rtx constant_part;
8015 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8016 EXPAND_SUM);
8017 /* Use immed_double_const to ensure that the constant is
8018 truncated according to the mode of OP1, then sign extended
8019 to a HOST_WIDE_INT. Using the constant directly can result
8020 in non-canonical RTL in a 64x32 cross compile. */
8021 constant_part
8022 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8023 (HOST_WIDE_INT) 0,
8024 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8025 op1 = plus_constant (op1, INTVAL (constant_part));
8026 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8027 op1 = force_operand (op1, target);
8028 return op1;
8031 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8032 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8033 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8035 rtx constant_part;
8037 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8038 (modifier == EXPAND_INITIALIZER
8039 ? EXPAND_INITIALIZER : EXPAND_SUM));
8040 if (! CONSTANT_P (op0))
8042 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8043 VOIDmode, modifier);
8044 /* Don't go to both_summands if modifier
8045 says it's not right to return a PLUS. */
8046 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8047 goto binop2;
8048 goto both_summands;
8050 /* Use immed_double_const to ensure that the constant is
8051 truncated according to the mode of OP1, then sign extended
8052 to a HOST_WIDE_INT. Using the constant directly can result
8053 in non-canonical RTL in a 64x32 cross compile. */
8054 constant_part
8055 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8056 (HOST_WIDE_INT) 0,
8057 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8058 op0 = plus_constant (op0, INTVAL (constant_part));
8059 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8060 op0 = force_operand (op0, target);
8061 return op0;
8065 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8066 subtarget = 0;
8068 /* No sense saving up arithmetic to be done
8069 if it's all in the wrong mode to form part of an address.
8070 And force_operand won't know whether to sign-extend or
8071 zero-extend. */
8072 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8073 || mode != ptr_mode)
8075 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8076 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8077 if (op0 == const0_rtx)
8078 return op1;
8079 if (op1 == const0_rtx)
8080 return op0;
8081 goto binop2;
8084 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8085 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8087 /* We come here from MINUS_EXPR when the second operand is a
8088 constant. */
8089 both_summands:
8090 /* Make sure any term that's a sum with a constant comes last. */
8091 if (GET_CODE (op0) == PLUS
8092 && CONSTANT_P (XEXP (op0, 1)))
8094 temp = op0;
8095 op0 = op1;
8096 op1 = temp;
8098 /* If adding to a sum including a constant,
8099 associate it to put the constant outside. */
8100 if (GET_CODE (op1) == PLUS
8101 && CONSTANT_P (XEXP (op1, 1)))
8103 rtx constant_term = const0_rtx;
8105 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8106 if (temp != 0)
8107 op0 = temp;
8108 /* Ensure that MULT comes first if there is one. */
8109 else if (GET_CODE (op0) == MULT)
8110 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8111 else
8112 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8114 /* Let's also eliminate constants from op0 if possible. */
8115 op0 = eliminate_constant_term (op0, &constant_term);
8117 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8118 their sum should be a constant. Form it into OP1, since the
8119 result we want will then be OP0 + OP1. */
8121 temp = simplify_binary_operation (PLUS, mode, constant_term,
8122 XEXP (op1, 1));
8123 if (temp != 0)
8124 op1 = temp;
8125 else
8126 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8129 /* Put a constant term last and put a multiplication first. */
8130 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8131 temp = op1, op1 = op0, op0 = temp;
8133 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8134 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8136 case MINUS_EXPR:
8137 /* For initializers, we are allowed to return a MINUS of two
8138 symbolic constants. Here we handle all cases when both operands
8139 are constant. */
8140 /* Handle difference of two symbolic constants,
8141 for the sake of an initializer. */
8142 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8143 && really_constant_p (TREE_OPERAND (exp, 0))
8144 && really_constant_p (TREE_OPERAND (exp, 1)))
8146 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8147 modifier);
8148 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8149 modifier);
8151 /* If the last operand is a CONST_INT, use plus_constant of
8152 the negated constant. Else make the MINUS. */
8153 if (GET_CODE (op1) == CONST_INT)
8154 return plus_constant (op0, - INTVAL (op1));
8155 else
8156 return gen_rtx_MINUS (mode, op0, op1);
8159 this_optab = ! unsignedp && flag_trapv
8160 && (GET_MODE_CLASS(mode) == MODE_INT)
8161 ? subv_optab : sub_optab;
8163 /* No sense saving up arithmetic to be done
8164 if it's all in the wrong mode to form part of an address.
8165 And force_operand won't know whether to sign-extend or
8166 zero-extend. */
8167 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8168 || mode != ptr_mode)
8169 goto binop;
8171 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8172 subtarget = 0;
8174 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8175 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8177 /* Convert A - const to A + (-const). */
8178 if (GET_CODE (op1) == CONST_INT)
8180 op1 = negate_rtx (mode, op1);
8181 goto both_summands;
8184 goto binop2;
8186 case MULT_EXPR:
8187 /* If first operand is constant, swap them.
8188 Thus the following special case checks need only
8189 check the second operand. */
8190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8192 tree t1 = TREE_OPERAND (exp, 0);
8193 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8194 TREE_OPERAND (exp, 1) = t1;
8197 /* Attempt to return something suitable for generating an
8198 indexed address, for machines that support that. */
8200 if (modifier == EXPAND_SUM && mode == ptr_mode
8201 && host_integerp (TREE_OPERAND (exp, 1), 0))
8203 tree exp1 = TREE_OPERAND (exp, 1);
8205 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8206 EXPAND_SUM);
8208 /* If we knew for certain that this is arithmetic for an array
8209 reference, and we knew the bounds of the array, then we could
8210 apply the distributive law across (PLUS X C) for constant C.
8211 Without such knowledge, we risk overflowing the computation
8212 when both X and C are large, but X+C isn't. */
8213 /* ??? Could perhaps special-case EXP being unsigned and C being
8214 positive. In that case we are certain that X+C is no smaller
8215 than X and so the transformed expression will overflow iff the
8216 original would have. */
8218 if (GET_CODE (op0) != REG)
8219 op0 = force_operand (op0, NULL_RTX);
8220 if (GET_CODE (op0) != REG)
8221 op0 = copy_to_mode_reg (mode, op0);
8223 return gen_rtx_MULT (mode, op0,
8224 gen_int_mode (tree_low_cst (exp1, 0),
8225 TYPE_MODE (TREE_TYPE (exp1))));
8228 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8229 subtarget = 0;
8231 if (modifier == EXPAND_STACK_PARM)
8232 target = 0;
8234 /* Check for multiplying things that have been extended
8235 from a narrower type. If this machine supports multiplying
8236 in that narrower type with a result in the desired type,
8237 do it that way, and avoid the explicit type-conversion. */
8238 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8239 && TREE_CODE (type) == INTEGER_TYPE
8240 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8241 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8242 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8243 && int_fits_type_p (TREE_OPERAND (exp, 1),
8244 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8245 /* Don't use a widening multiply if a shift will do. */
8246 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8247 > HOST_BITS_PER_WIDE_INT)
8248 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8250 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8251 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8253 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8254 /* If both operands are extended, they must either both
8255 be zero-extended or both be sign-extended. */
8256 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8258 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8260 enum machine_mode innermode
8261 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8262 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8263 ? smul_widen_optab : umul_widen_optab);
8264 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8265 ? umul_widen_optab : smul_widen_optab);
8266 if (mode == GET_MODE_WIDER_MODE (innermode))
8268 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8270 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8271 NULL_RTX, VOIDmode, 0);
8272 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8273 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8274 VOIDmode, 0);
8275 else
8276 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8277 NULL_RTX, VOIDmode, 0);
8278 goto binop2;
8280 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8281 && innermode == word_mode)
8283 rtx htem;
8284 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8285 NULL_RTX, VOIDmode, 0);
8286 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8287 op1 = convert_modes (innermode, mode,
8288 expand_expr (TREE_OPERAND (exp, 1),
8289 NULL_RTX, VOIDmode, 0),
8290 unsignedp);
8291 else
8292 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8293 NULL_RTX, VOIDmode, 0);
8294 temp = expand_binop (mode, other_optab, op0, op1, target,
8295 unsignedp, OPTAB_LIB_WIDEN);
8296 htem = expand_mult_highpart_adjust (innermode,
8297 gen_highpart (innermode, temp),
8298 op0, op1,
8299 gen_highpart (innermode, temp),
8300 unsignedp);
8301 emit_move_insn (gen_highpart (innermode, temp), htem);
8302 return temp;
8306 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8307 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8308 return expand_mult (mode, op0, op1, target, unsignedp);
8310 case TRUNC_DIV_EXPR:
8311 case FLOOR_DIV_EXPR:
8312 case CEIL_DIV_EXPR:
8313 case ROUND_DIV_EXPR:
8314 case EXACT_DIV_EXPR:
8315 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8316 subtarget = 0;
8317 if (modifier == EXPAND_STACK_PARM)
8318 target = 0;
8319 /* Possible optimization: compute the dividend with EXPAND_SUM
8320 then if the divisor is constant can optimize the case
8321 where some terms of the dividend have coeffs divisible by it. */
8322 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8323 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8324 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8326 case RDIV_EXPR:
8327 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8328 expensive divide. If not, combine will rebuild the original
8329 computation. */
8330 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8331 && TREE_CODE (type) == REAL_TYPE
8332 && !real_onep (TREE_OPERAND (exp, 0)))
8333 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8334 build (RDIV_EXPR, type,
8335 build_real (type, dconst1),
8336 TREE_OPERAND (exp, 1))),
8337 target, tmode, modifier);
8338 this_optab = sdiv_optab;
8339 goto binop;
8341 case TRUNC_MOD_EXPR:
8342 case FLOOR_MOD_EXPR:
8343 case CEIL_MOD_EXPR:
8344 case ROUND_MOD_EXPR:
8345 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8346 subtarget = 0;
8347 if (modifier == EXPAND_STACK_PARM)
8348 target = 0;
8349 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8350 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8351 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8353 case FIX_ROUND_EXPR:
8354 case FIX_FLOOR_EXPR:
8355 case FIX_CEIL_EXPR:
8356 abort (); /* Not used for C. */
8358 case FIX_TRUNC_EXPR:
8359 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8360 if (target == 0 || modifier == EXPAND_STACK_PARM)
8361 target = gen_reg_rtx (mode);
8362 expand_fix (target, op0, unsignedp);
8363 return target;
8365 case FLOAT_EXPR:
8366 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8367 if (target == 0 || modifier == EXPAND_STACK_PARM)
8368 target = gen_reg_rtx (mode);
8369 /* expand_float can't figure out what to do if FROM has VOIDmode.
8370 So give it the correct mode. With -O, cse will optimize this. */
8371 if (GET_MODE (op0) == VOIDmode)
8372 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8373 op0);
8374 expand_float (target, op0,
8375 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8376 return target;
8378 case NEGATE_EXPR:
8379 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8380 if (modifier == EXPAND_STACK_PARM)
8381 target = 0;
8382 temp = expand_unop (mode,
8383 ! unsignedp && flag_trapv
8384 && (GET_MODE_CLASS(mode) == MODE_INT)
8385 ? negv_optab : neg_optab, op0, target, 0);
8386 if (temp == 0)
8387 abort ();
8388 return temp;
8390 case ABS_EXPR:
8391 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8392 if (modifier == EXPAND_STACK_PARM)
8393 target = 0;
8395 /* Handle complex values specially. */
8396 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8397 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8398 return expand_complex_abs (mode, op0, target, unsignedp);
8400 /* Unsigned abs is simply the operand. Testing here means we don't
8401 risk generating incorrect code below. */
8402 if (TREE_UNSIGNED (type))
8403 return op0;
8405 return expand_abs (mode, op0, target, unsignedp,
8406 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8408 case MAX_EXPR:
8409 case MIN_EXPR:
8410 target = original_target;
8411 if (target == 0
8412 || modifier == EXPAND_STACK_PARM
8413 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8414 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8415 || GET_MODE (target) != mode
8416 || (GET_CODE (target) == REG
8417 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8418 target = gen_reg_rtx (mode);
8419 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8420 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8422 /* First try to do it with a special MIN or MAX instruction.
8423 If that does not win, use a conditional jump to select the proper
8424 value. */
8425 this_optab = (TREE_UNSIGNED (type)
8426 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8427 : (code == MIN_EXPR ? smin_optab : smax_optab));
8429 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8430 OPTAB_WIDEN);
8431 if (temp != 0)
8432 return temp;
8434 /* At this point, a MEM target is no longer useful; we will get better
8435 code without it. */
8437 if (GET_CODE (target) == MEM)
8438 target = gen_reg_rtx (mode);
8440 if (target != op0)
8441 emit_move_insn (target, op0);
8443 op0 = gen_label_rtx ();
8445 /* If this mode is an integer too wide to compare properly,
8446 compare word by word. Rely on cse to optimize constant cases. */
8447 if (GET_MODE_CLASS (mode) == MODE_INT
8448 && ! can_compare_p (GE, mode, ccp_jump))
8450 if (code == MAX_EXPR)
8451 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8452 target, op1, NULL_RTX, op0);
8453 else
8454 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8455 op1, target, NULL_RTX, op0);
8457 else
8459 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8460 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8461 unsignedp, mode, NULL_RTX, NULL_RTX,
8462 op0);
8464 emit_move_insn (target, op1);
8465 emit_label (op0);
8466 return target;
8468 case BIT_NOT_EXPR:
8469 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8470 if (modifier == EXPAND_STACK_PARM)
8471 target = 0;
8472 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8473 if (temp == 0)
8474 abort ();
8475 return temp;
8477 case FFS_EXPR:
8478 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8479 if (modifier == EXPAND_STACK_PARM)
8480 target = 0;
8481 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8482 if (temp == 0)
8483 abort ();
8484 return temp;
8486 case CLZ_EXPR:
8487 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8488 temp = expand_unop (mode, clz_optab, op0, target, 1);
8489 if (temp == 0)
8490 abort ();
8491 return temp;
8493 case CTZ_EXPR:
8494 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8495 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8496 if (temp == 0)
8497 abort ();
8498 return temp;
8500 case POPCOUNT_EXPR:
8501 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8502 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8503 if (temp == 0)
8504 abort ();
8505 return temp;
8507 case PARITY_EXPR:
8508 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8509 temp = expand_unop (mode, parity_optab, op0, target, 1);
8510 if (temp == 0)
8511 abort ();
8512 return temp;
8514 /* ??? Can optimize bitwise operations with one arg constant.
8515 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8516 and (a bitwise1 b) bitwise2 b (etc)
8517 but that is probably not worth while. */
8519 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8520 boolean values when we want in all cases to compute both of them. In
8521 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8522 as actual zero-or-1 values and then bitwise anding. In cases where
8523 there cannot be any side effects, better code would be made by
8524 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8525 how to recognize those cases. */
8527 case TRUTH_AND_EXPR:
8528 case BIT_AND_EXPR:
8529 this_optab = and_optab;
8530 goto binop;
8532 case TRUTH_OR_EXPR:
8533 case BIT_IOR_EXPR:
8534 this_optab = ior_optab;
8535 goto binop;
8537 case TRUTH_XOR_EXPR:
8538 case BIT_XOR_EXPR:
8539 this_optab = xor_optab;
8540 goto binop;
8542 case LSHIFT_EXPR:
8543 case RSHIFT_EXPR:
8544 case LROTATE_EXPR:
8545 case RROTATE_EXPR:
8546 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8547 subtarget = 0;
8548 if (modifier == EXPAND_STACK_PARM)
8549 target = 0;
8550 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8551 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8552 unsignedp);
8554 /* Could determine the answer when only additive constants differ. Also,
8555 the addition of one can be handled by changing the condition. */
8556 case LT_EXPR:
8557 case LE_EXPR:
8558 case GT_EXPR:
8559 case GE_EXPR:
8560 case EQ_EXPR:
8561 case NE_EXPR:
8562 case UNORDERED_EXPR:
8563 case ORDERED_EXPR:
8564 case UNLT_EXPR:
8565 case UNLE_EXPR:
8566 case UNGT_EXPR:
8567 case UNGE_EXPR:
8568 case UNEQ_EXPR:
8569 temp = do_store_flag (exp,
8570 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8571 tmode != VOIDmode ? tmode : mode, 0);
8572 if (temp != 0)
8573 return temp;
8575 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8576 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8577 && original_target
8578 && GET_CODE (original_target) == REG
8579 && (GET_MODE (original_target)
8580 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8582 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8583 VOIDmode, 0);
8585 /* If temp is constant, we can just compute the result. */
8586 if (GET_CODE (temp) == CONST_INT)
8588 if (INTVAL (temp) != 0)
8589 emit_move_insn (target, const1_rtx);
8590 else
8591 emit_move_insn (target, const0_rtx);
8593 return target;
8596 if (temp != original_target)
8598 enum machine_mode mode1 = GET_MODE (temp);
8599 if (mode1 == VOIDmode)
8600 mode1 = tmode != VOIDmode ? tmode : mode;
8602 temp = copy_to_mode_reg (mode1, temp);
8605 op1 = gen_label_rtx ();
8606 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8607 GET_MODE (temp), unsignedp, op1);
8608 emit_move_insn (temp, const1_rtx);
8609 emit_label (op1);
8610 return temp;
8613 /* If no set-flag instruction, must generate a conditional
8614 store into a temporary variable. Drop through
8615 and handle this like && and ||. */
8617 case TRUTH_ANDIF_EXPR:
8618 case TRUTH_ORIF_EXPR:
8619 if (! ignore
8620 && (target == 0
8621 || modifier == EXPAND_STACK_PARM
8622 || ! safe_from_p (target, exp, 1)
8623 /* Make sure we don't have a hard reg (such as function's return
8624 value) live across basic blocks, if not optimizing. */
8625 || (!optimize && GET_CODE (target) == REG
8626 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8627 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8629 if (target)
8630 emit_clr_insn (target);
8632 op1 = gen_label_rtx ();
8633 jumpifnot (exp, op1);
8635 if (target)
8636 emit_0_to_1_insn (target);
8638 emit_label (op1);
8639 return ignore ? const0_rtx : target;
8641 case TRUTH_NOT_EXPR:
8642 if (modifier == EXPAND_STACK_PARM)
8643 target = 0;
8644 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8645 /* The parser is careful to generate TRUTH_NOT_EXPR
8646 only with operands that are always zero or one. */
8647 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8648 target, 1, OPTAB_LIB_WIDEN);
8649 if (temp == 0)
8650 abort ();
8651 return temp;
8653 case COMPOUND_EXPR:
8654 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8655 emit_queue ();
8656 return expand_expr (TREE_OPERAND (exp, 1),
8657 (ignore ? const0_rtx : target),
8658 VOIDmode, modifier);
8660 case COND_EXPR:
8661 /* If we would have a "singleton" (see below) were it not for a
8662 conversion in each arm, bring that conversion back out. */
8663 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8664 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8665 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8666 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8668 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8669 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8671 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8672 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8673 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8674 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8675 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8676 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8677 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8678 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8679 return expand_expr (build1 (NOP_EXPR, type,
8680 build (COND_EXPR, TREE_TYPE (iftrue),
8681 TREE_OPERAND (exp, 0),
8682 iftrue, iffalse)),
8683 target, tmode, modifier);
8687 /* Note that COND_EXPRs whose type is a structure or union
8688 are required to be constructed to contain assignments of
8689 a temporary variable, so that we can evaluate them here
8690 for side effect only. If type is void, we must do likewise. */
8692 /* If an arm of the branch requires a cleanup,
8693 only that cleanup is performed. */
8695 tree singleton = 0;
8696 tree binary_op = 0, unary_op = 0;
8698 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8699 convert it to our mode, if necessary. */
8700 if (integer_onep (TREE_OPERAND (exp, 1))
8701 && integer_zerop (TREE_OPERAND (exp, 2))
8702 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8704 if (ignore)
8706 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8707 modifier);
8708 return const0_rtx;
8711 if (modifier == EXPAND_STACK_PARM)
8712 target = 0;
8713 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8714 if (GET_MODE (op0) == mode)
8715 return op0;
8717 if (target == 0)
8718 target = gen_reg_rtx (mode);
8719 convert_move (target, op0, unsignedp);
8720 return target;
8723 /* Check for X ? A + B : A. If we have this, we can copy A to the
8724 output and conditionally add B. Similarly for unary operations.
8725 Don't do this if X has side-effects because those side effects
8726 might affect A or B and the "?" operation is a sequence point in
8727 ANSI. (operand_equal_p tests for side effects.) */
8729 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8730 && operand_equal_p (TREE_OPERAND (exp, 2),
8731 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8732 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8733 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8734 && operand_equal_p (TREE_OPERAND (exp, 1),
8735 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8736 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8737 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8738 && operand_equal_p (TREE_OPERAND (exp, 2),
8739 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8740 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8741 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8742 && operand_equal_p (TREE_OPERAND (exp, 1),
8743 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8744 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8746 /* If we are not to produce a result, we have no target. Otherwise,
8747 if a target was specified use it; it will not be used as an
8748 intermediate target unless it is safe. If no target, use a
8749 temporary. */
8751 if (ignore)
8752 temp = 0;
8753 else if (modifier == EXPAND_STACK_PARM)
8754 temp = assign_temp (type, 0, 0, 1);
8755 else if (original_target
8756 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8757 || (singleton && GET_CODE (original_target) == REG
8758 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8759 && original_target == var_rtx (singleton)))
8760 && GET_MODE (original_target) == mode
8761 #ifdef HAVE_conditional_move
8762 && (! can_conditionally_move_p (mode)
8763 || GET_CODE (original_target) == REG
8764 || TREE_ADDRESSABLE (type))
8765 #endif
8766 && (GET_CODE (original_target) != MEM
8767 || TREE_ADDRESSABLE (type)))
8768 temp = original_target;
8769 else if (TREE_ADDRESSABLE (type))
8770 abort ();
8771 else
8772 temp = assign_temp (type, 0, 0, 1);
8774 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8775 do the test of X as a store-flag operation, do this as
8776 A + ((X != 0) << log C). Similarly for other simple binary
8777 operators. Only do for C == 1 if BRANCH_COST is low. */
8778 if (temp && singleton && binary_op
8779 && (TREE_CODE (binary_op) == PLUS_EXPR
8780 || TREE_CODE (binary_op) == MINUS_EXPR
8781 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8782 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8783 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8784 : integer_onep (TREE_OPERAND (binary_op, 1)))
8785 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8787 rtx result;
8788 tree cond;
8789 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8790 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8791 ? addv_optab : add_optab)
8792 : TREE_CODE (binary_op) == MINUS_EXPR
8793 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8794 ? subv_optab : sub_optab)
8795 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8796 : xor_optab);
8798 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8799 if (singleton == TREE_OPERAND (exp, 1))
8800 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8801 else
8802 cond = TREE_OPERAND (exp, 0);
8804 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8805 ? temp : NULL_RTX),
8806 mode, BRANCH_COST <= 1);
8808 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8809 result = expand_shift (LSHIFT_EXPR, mode, result,
8810 build_int_2 (tree_log2
8811 (TREE_OPERAND
8812 (binary_op, 1)),
8814 (safe_from_p (temp, singleton, 1)
8815 ? temp : NULL_RTX), 0);
8817 if (result)
8819 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8820 return expand_binop (mode, boptab, op1, result, temp,
8821 unsignedp, OPTAB_LIB_WIDEN);
8825 do_pending_stack_adjust ();
8826 NO_DEFER_POP;
8827 op0 = gen_label_rtx ();
8829 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8831 if (temp != 0)
8833 /* If the target conflicts with the other operand of the
8834 binary op, we can't use it. Also, we can't use the target
8835 if it is a hard register, because evaluating the condition
8836 might clobber it. */
8837 if ((binary_op
8838 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8839 || (GET_CODE (temp) == REG
8840 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8841 temp = gen_reg_rtx (mode);
8842 store_expr (singleton, temp,
8843 modifier == EXPAND_STACK_PARM ? 2 : 0);
8845 else
8846 expand_expr (singleton,
8847 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8848 if (singleton == TREE_OPERAND (exp, 1))
8849 jumpif (TREE_OPERAND (exp, 0), op0);
8850 else
8851 jumpifnot (TREE_OPERAND (exp, 0), op0);
8853 start_cleanup_deferral ();
8854 if (binary_op && temp == 0)
8855 /* Just touch the other operand. */
8856 expand_expr (TREE_OPERAND (binary_op, 1),
8857 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8858 else if (binary_op)
8859 store_expr (build (TREE_CODE (binary_op), type,
8860 make_tree (type, temp),
8861 TREE_OPERAND (binary_op, 1)),
8862 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8863 else
8864 store_expr (build1 (TREE_CODE (unary_op), type,
8865 make_tree (type, temp)),
8866 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8867 op1 = op0;
8869 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8870 comparison operator. If we have one of these cases, set the
8871 output to A, branch on A (cse will merge these two references),
8872 then set the output to FOO. */
8873 else if (temp
8874 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8875 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8876 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8877 TREE_OPERAND (exp, 1), 0)
8878 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8879 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8880 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8882 if (GET_CODE (temp) == REG
8883 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8884 temp = gen_reg_rtx (mode);
8885 store_expr (TREE_OPERAND (exp, 1), temp,
8886 modifier == EXPAND_STACK_PARM ? 2 : 0);
8887 jumpif (TREE_OPERAND (exp, 0), op0);
8889 start_cleanup_deferral ();
8890 store_expr (TREE_OPERAND (exp, 2), temp,
8891 modifier == EXPAND_STACK_PARM ? 2 : 0);
8892 op1 = op0;
8894 else if (temp
8895 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8896 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8897 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8898 TREE_OPERAND (exp, 2), 0)
8899 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8900 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8901 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8903 if (GET_CODE (temp) == REG
8904 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8905 temp = gen_reg_rtx (mode);
8906 store_expr (TREE_OPERAND (exp, 2), temp,
8907 modifier == EXPAND_STACK_PARM ? 2 : 0);
8908 jumpifnot (TREE_OPERAND (exp, 0), op0);
8910 start_cleanup_deferral ();
8911 store_expr (TREE_OPERAND (exp, 1), temp,
8912 modifier == EXPAND_STACK_PARM ? 2 : 0);
8913 op1 = op0;
8915 else
8917 op1 = gen_label_rtx ();
8918 jumpifnot (TREE_OPERAND (exp, 0), op0);
8920 start_cleanup_deferral ();
8922 /* One branch of the cond can be void, if it never returns. For
8923 example A ? throw : E */
8924 if (temp != 0
8925 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8926 store_expr (TREE_OPERAND (exp, 1), temp,
8927 modifier == EXPAND_STACK_PARM ? 2 : 0);
8928 else
8929 expand_expr (TREE_OPERAND (exp, 1),
8930 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8931 end_cleanup_deferral ();
8932 emit_queue ();
8933 emit_jump_insn (gen_jump (op1));
8934 emit_barrier ();
8935 emit_label (op0);
8936 start_cleanup_deferral ();
8937 if (temp != 0
8938 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8939 store_expr (TREE_OPERAND (exp, 2), temp,
8940 modifier == EXPAND_STACK_PARM ? 2 : 0);
8941 else
8942 expand_expr (TREE_OPERAND (exp, 2),
8943 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8946 end_cleanup_deferral ();
8948 emit_queue ();
8949 emit_label (op1);
8950 OK_DEFER_POP;
8952 return temp;
8955 case TARGET_EXPR:
8957 /* Something needs to be initialized, but we didn't know
8958 where that thing was when building the tree. For example,
8959 it could be the return value of a function, or a parameter
8960 to a function which lays down in the stack, or a temporary
8961 variable which must be passed by reference.
8963 We guarantee that the expression will either be constructed
8964 or copied into our original target. */
8966 tree slot = TREE_OPERAND (exp, 0);
8967 tree cleanups = NULL_TREE;
8968 tree exp1;
8970 if (TREE_CODE (slot) != VAR_DECL)
8971 abort ();
8973 if (! ignore)
8974 target = original_target;
8976 /* Set this here so that if we get a target that refers to a
8977 register variable that's already been used, put_reg_into_stack
8978 knows that it should fix up those uses. */
8979 TREE_USED (slot) = 1;
8981 if (target == 0)
8983 if (DECL_RTL_SET_P (slot))
8985 target = DECL_RTL (slot);
8986 /* If we have already expanded the slot, so don't do
8987 it again. (mrs) */
8988 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8989 return target;
8991 else
8993 target = assign_temp (type, 2, 0, 1);
8994 /* All temp slots at this level must not conflict. */
8995 preserve_temp_slots (target);
8996 SET_DECL_RTL (slot, target);
8997 if (TREE_ADDRESSABLE (slot))
8998 put_var_into_stack (slot);
9000 /* Since SLOT is not known to the called function
9001 to belong to its stack frame, we must build an explicit
9002 cleanup. This case occurs when we must build up a reference
9003 to pass the reference as an argument. In this case,
9004 it is very likely that such a reference need not be
9005 built here. */
9007 if (TREE_OPERAND (exp, 2) == 0)
9008 TREE_OPERAND (exp, 2)
9009 = (*lang_hooks.maybe_build_cleanup) (slot);
9010 cleanups = TREE_OPERAND (exp, 2);
9013 else
9015 /* This case does occur, when expanding a parameter which
9016 needs to be constructed on the stack. The target
9017 is the actual stack address that we want to initialize.
9018 The function we call will perform the cleanup in this case. */
9020 /* If we have already assigned it space, use that space,
9021 not target that we were passed in, as our target
9022 parameter is only a hint. */
9023 if (DECL_RTL_SET_P (slot))
9025 target = DECL_RTL (slot);
9026 /* If we have already expanded the slot, so don't do
9027 it again. (mrs) */
9028 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9029 return target;
9031 else
9033 SET_DECL_RTL (slot, target);
9034 /* If we must have an addressable slot, then make sure that
9035 the RTL that we just stored in slot is OK. */
9036 if (TREE_ADDRESSABLE (slot))
9037 put_var_into_stack (slot);
9041 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9042 /* Mark it as expanded. */
9043 TREE_OPERAND (exp, 1) = NULL_TREE;
9045 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9047 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9049 return target;
9052 case INIT_EXPR:
9054 tree lhs = TREE_OPERAND (exp, 0);
9055 tree rhs = TREE_OPERAND (exp, 1);
9057 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9058 return temp;
9061 case MODIFY_EXPR:
9063 /* If lhs is complex, expand calls in rhs before computing it.
9064 That's so we don't compute a pointer and save it over a
9065 call. If lhs is simple, compute it first so we can give it
9066 as a target if the rhs is just a call. This avoids an
9067 extra temp and copy and that prevents a partial-subsumption
9068 which makes bad code. Actually we could treat
9069 component_ref's of vars like vars. */
9071 tree lhs = TREE_OPERAND (exp, 0);
9072 tree rhs = TREE_OPERAND (exp, 1);
9074 temp = 0;
9076 /* Check for |= or &= of a bitfield of size one into another bitfield
9077 of size 1. In this case, (unless we need the result of the
9078 assignment) we can do this more efficiently with a
9079 test followed by an assignment, if necessary.
9081 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9082 things change so we do, this code should be enhanced to
9083 support it. */
9084 if (ignore
9085 && TREE_CODE (lhs) == COMPONENT_REF
9086 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9087 || TREE_CODE (rhs) == BIT_AND_EXPR)
9088 && TREE_OPERAND (rhs, 0) == lhs
9089 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9090 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9091 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9093 rtx label = gen_label_rtx ();
9095 do_jump (TREE_OPERAND (rhs, 1),
9096 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9097 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9098 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9099 (TREE_CODE (rhs) == BIT_IOR_EXPR
9100 ? integer_one_node
9101 : integer_zero_node)),
9102 0, 0);
9103 do_pending_stack_adjust ();
9104 emit_label (label);
9105 return const0_rtx;
9108 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9110 return temp;
9113 case RETURN_EXPR:
9114 if (!TREE_OPERAND (exp, 0))
9115 expand_null_return ();
9116 else
9117 expand_return (TREE_OPERAND (exp, 0));
9118 return const0_rtx;
9120 case PREINCREMENT_EXPR:
9121 case PREDECREMENT_EXPR:
9122 return expand_increment (exp, 0, ignore);
9124 case POSTINCREMENT_EXPR:
9125 case POSTDECREMENT_EXPR:
9126 /* Faster to treat as pre-increment if result is not used. */
9127 return expand_increment (exp, ! ignore, ignore);
9129 case ADDR_EXPR:
9130 if (modifier == EXPAND_STACK_PARM)
9131 target = 0;
9132 /* Are we taking the address of a nested function? */
9133 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9134 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9135 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9136 && ! TREE_STATIC (exp))
9138 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9139 op0 = force_operand (op0, target);
9141 /* If we are taking the address of something erroneous, just
9142 return a zero. */
9143 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9144 return const0_rtx;
9145 /* If we are taking the address of a constant and are at the
9146 top level, we have to use output_constant_def since we can't
9147 call force_const_mem at top level. */
9148 else if (cfun == 0
9149 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9150 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9151 == 'c')))
9152 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9153 else
9155 /* We make sure to pass const0_rtx down if we came in with
9156 ignore set, to avoid doing the cleanups twice for something. */
9157 op0 = expand_expr (TREE_OPERAND (exp, 0),
9158 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9159 (modifier == EXPAND_INITIALIZER
9160 ? modifier : EXPAND_CONST_ADDRESS));
9162 /* If we are going to ignore the result, OP0 will have been set
9163 to const0_rtx, so just return it. Don't get confused and
9164 think we are taking the address of the constant. */
9165 if (ignore)
9166 return op0;
9168 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9169 clever and returns a REG when given a MEM. */
9170 op0 = protect_from_queue (op0, 1);
9172 /* We would like the object in memory. If it is a constant, we can
9173 have it be statically allocated into memory. For a non-constant,
9174 we need to allocate some memory and store the value into it. */
9176 if (CONSTANT_P (op0))
9177 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9178 op0);
9179 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9180 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9181 || GET_CODE (op0) == PARALLEL)
9183 /* If the operand is a SAVE_EXPR, we can deal with this by
9184 forcing the SAVE_EXPR into memory. */
9185 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9187 put_var_into_stack (TREE_OPERAND (exp, 0));
9188 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9190 else
9192 /* If this object is in a register, it can't be BLKmode. */
9193 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9194 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9196 if (GET_CODE (op0) == PARALLEL)
9197 /* Handle calls that pass values in multiple
9198 non-contiguous locations. The Irix 6 ABI has examples
9199 of this. */
9200 emit_group_store (memloc, op0,
9201 int_size_in_bytes (inner_type));
9202 else
9203 emit_move_insn (memloc, op0);
9205 op0 = memloc;
9209 if (GET_CODE (op0) != MEM)
9210 abort ();
9212 mark_temp_addr_taken (op0);
9213 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9215 op0 = XEXP (op0, 0);
9216 #ifdef POINTERS_EXTEND_UNSIGNED
9217 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9218 && mode == ptr_mode)
9219 op0 = convert_memory_address (ptr_mode, op0);
9220 #endif
9221 return op0;
9224 /* If OP0 is not aligned as least as much as the type requires, we
9225 need to make a temporary, copy OP0 to it, and take the address of
9226 the temporary. We want to use the alignment of the type, not of
9227 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9228 the test for BLKmode means that can't happen. The test for
9229 BLKmode is because we never make mis-aligned MEMs with
9230 non-BLKmode.
9232 We don't need to do this at all if the machine doesn't have
9233 strict alignment. */
9234 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9235 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9236 > MEM_ALIGN (op0))
9237 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9239 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9240 rtx new
9241 = assign_stack_temp_for_type
9242 (TYPE_MODE (inner_type),
9243 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9244 : int_size_in_bytes (inner_type),
9245 1, build_qualified_type (inner_type,
9246 (TYPE_QUALS (inner_type)
9247 | TYPE_QUAL_CONST)));
9249 if (TYPE_ALIGN_OK (inner_type))
9250 abort ();
9252 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9253 (modifier == EXPAND_STACK_PARM
9254 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9255 op0 = new;
9258 op0 = force_operand (XEXP (op0, 0), target);
9261 if (flag_force_addr
9262 && GET_CODE (op0) != REG
9263 && modifier != EXPAND_CONST_ADDRESS
9264 && modifier != EXPAND_INITIALIZER
9265 && modifier != EXPAND_SUM)
9266 op0 = force_reg (Pmode, op0);
9268 if (GET_CODE (op0) == REG
9269 && ! REG_USERVAR_P (op0))
9270 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9272 #ifdef POINTERS_EXTEND_UNSIGNED
9273 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9274 && mode == ptr_mode)
9275 op0 = convert_memory_address (ptr_mode, op0);
9276 #endif
9278 return op0;
9280 case ENTRY_VALUE_EXPR:
9281 abort ();
9283 /* COMPLEX type for Extended Pascal & Fortran */
9284 case COMPLEX_EXPR:
9286 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9287 rtx insns;
9289 /* Get the rtx code of the operands. */
9290 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9291 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9293 if (! target)
9294 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9296 start_sequence ();
9298 /* Move the real (op0) and imaginary (op1) parts to their location. */
9299 emit_move_insn (gen_realpart (mode, target), op0);
9300 emit_move_insn (gen_imagpart (mode, target), op1);
9302 insns = get_insns ();
9303 end_sequence ();
9305 /* Complex construction should appear as a single unit. */
9306 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9307 each with a separate pseudo as destination.
9308 It's not correct for flow to treat them as a unit. */
9309 if (GET_CODE (target) != CONCAT)
9310 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9311 else
9312 emit_insn (insns);
9314 return target;
9317 case REALPART_EXPR:
9318 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9319 return gen_realpart (mode, op0);
9321 case IMAGPART_EXPR:
9322 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9323 return gen_imagpart (mode, op0);
9325 case CONJ_EXPR:
9327 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9328 rtx imag_t;
9329 rtx insns;
9331 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9333 if (! target)
9334 target = gen_reg_rtx (mode);
9336 start_sequence ();
9338 /* Store the realpart and the negated imagpart to target. */
9339 emit_move_insn (gen_realpart (partmode, target),
9340 gen_realpart (partmode, op0));
9342 imag_t = gen_imagpart (partmode, target);
9343 temp = expand_unop (partmode,
9344 ! unsignedp && flag_trapv
9345 && (GET_MODE_CLASS(partmode) == MODE_INT)
9346 ? negv_optab : neg_optab,
9347 gen_imagpart (partmode, op0), imag_t, 0);
9348 if (temp != imag_t)
9349 emit_move_insn (imag_t, temp);
9351 insns = get_insns ();
9352 end_sequence ();
9354 /* Conjugate should appear as a single unit
9355 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9356 each with a separate pseudo as destination.
9357 It's not correct for flow to treat them as a unit. */
9358 if (GET_CODE (target) != CONCAT)
9359 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9360 else
9361 emit_insn (insns);
9363 return target;
9366 case TRY_CATCH_EXPR:
9368 tree handler = TREE_OPERAND (exp, 1);
9370 expand_eh_region_start ();
9372 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9374 expand_eh_region_end_cleanup (handler);
9376 return op0;
9379 case TRY_FINALLY_EXPR:
9381 tree try_block = TREE_OPERAND (exp, 0);
9382 tree finally_block = TREE_OPERAND (exp, 1);
9384 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9386 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9387 is not sufficient, so we cannot expand the block twice.
9388 So we play games with GOTO_SUBROUTINE_EXPR to let us
9389 expand the thing only once. */
9390 /* When not optimizing, we go ahead with this form since
9391 (1) user breakpoints operate more predictably without
9392 code duplication, and
9393 (2) we're not running any of the global optimizers
9394 that would explode in time/space with the highly
9395 connected CFG created by the indirect branching. */
9397 rtx finally_label = gen_label_rtx ();
9398 rtx done_label = gen_label_rtx ();
9399 rtx return_link = gen_reg_rtx (Pmode);
9400 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9401 (tree) finally_label, (tree) return_link);
9402 TREE_SIDE_EFFECTS (cleanup) = 1;
9404 /* Start a new binding layer that will keep track of all cleanup
9405 actions to be performed. */
9406 expand_start_bindings (2);
9407 target_temp_slot_level = temp_slot_level;
9409 expand_decl_cleanup (NULL_TREE, cleanup);
9410 op0 = expand_expr (try_block, target, tmode, modifier);
9412 preserve_temp_slots (op0);
9413 expand_end_bindings (NULL_TREE, 0, 0);
9414 emit_jump (done_label);
9415 emit_label (finally_label);
9416 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9417 emit_indirect_jump (return_link);
9418 emit_label (done_label);
9420 else
9422 expand_start_bindings (2);
9423 target_temp_slot_level = temp_slot_level;
9425 expand_decl_cleanup (NULL_TREE, finally_block);
9426 op0 = expand_expr (try_block, target, tmode, modifier);
9428 preserve_temp_slots (op0);
9429 expand_end_bindings (NULL_TREE, 0, 0);
9432 return op0;
9435 case GOTO_SUBROUTINE_EXPR:
9437 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9438 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9439 rtx return_address = gen_label_rtx ();
9440 emit_move_insn (return_link,
9441 gen_rtx_LABEL_REF (Pmode, return_address));
9442 emit_jump (subr);
9443 emit_label (return_address);
9444 return const0_rtx;
9447 case VA_ARG_EXPR:
9448 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9450 case EXC_PTR_EXPR:
9451 return get_exception_pointer (cfun);
9453 case FDESC_EXPR:
9454 /* Function descriptors are not valid except for as
9455 initialization constants, and should not be expanded. */
9456 abort ();
9458 default:
9459 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9462 /* Here to do an ordinary binary operator, generating an instruction
9463 from the optab already placed in `this_optab'. */
9464 binop:
9465 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9466 subtarget = 0;
9467 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9468 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9469 binop2:
9470 if (modifier == EXPAND_STACK_PARM)
9471 target = 0;
9472 temp = expand_binop (mode, this_optab, op0, op1, target,
9473 unsignedp, OPTAB_LIB_WIDEN);
9474 if (temp == 0)
9475 abort ();
9476 return temp;
9479 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9480 when applied to the address of EXP produces an address known to be
9481 aligned more than BIGGEST_ALIGNMENT. */
9483 static int
9484 is_aligning_offset (offset, exp)
9485 tree offset;
9486 tree exp;
9488 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9489 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9490 || TREE_CODE (offset) == NOP_EXPR
9491 || TREE_CODE (offset) == CONVERT_EXPR
9492 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9493 offset = TREE_OPERAND (offset, 0);
9495 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9496 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9497 if (TREE_CODE (offset) != BIT_AND_EXPR
9498 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9499 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9500 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9501 return 0;
9503 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9504 It must be NEGATE_EXPR. Then strip any more conversions. */
9505 offset = TREE_OPERAND (offset, 0);
9506 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9507 || TREE_CODE (offset) == NOP_EXPR
9508 || TREE_CODE (offset) == CONVERT_EXPR)
9509 offset = TREE_OPERAND (offset, 0);
9511 if (TREE_CODE (offset) != NEGATE_EXPR)
9512 return 0;
9514 offset = TREE_OPERAND (offset, 0);
9515 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9516 || TREE_CODE (offset) == NOP_EXPR
9517 || TREE_CODE (offset) == CONVERT_EXPR)
9518 offset = TREE_OPERAND (offset, 0);
9520 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9521 whose type is the same as EXP. */
9522 return (TREE_CODE (offset) == ADDR_EXPR
9523 && (TREE_OPERAND (offset, 0) == exp
9524 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9525 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9526 == TREE_TYPE (exp)))));
9529 /* Return the tree node if an ARG corresponds to a string constant or zero
9530 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9531 in bytes within the string that ARG is accessing. The type of the
9532 offset will be `sizetype'. */
9534 tree
9535 string_constant (arg, ptr_offset)
9536 tree arg;
9537 tree *ptr_offset;
9539 STRIP_NOPS (arg);
9541 if (TREE_CODE (arg) == ADDR_EXPR
9542 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9544 *ptr_offset = size_zero_node;
9545 return TREE_OPERAND (arg, 0);
9547 else if (TREE_CODE (arg) == PLUS_EXPR)
9549 tree arg0 = TREE_OPERAND (arg, 0);
9550 tree arg1 = TREE_OPERAND (arg, 1);
9552 STRIP_NOPS (arg0);
9553 STRIP_NOPS (arg1);
9555 if (TREE_CODE (arg0) == ADDR_EXPR
9556 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9558 *ptr_offset = convert (sizetype, arg1);
9559 return TREE_OPERAND (arg0, 0);
9561 else if (TREE_CODE (arg1) == ADDR_EXPR
9562 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9564 *ptr_offset = convert (sizetype, arg0);
9565 return TREE_OPERAND (arg1, 0);
9569 return 0;
9572 /* Expand code for a post- or pre- increment or decrement
9573 and return the RTX for the result.
9574 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9576 static rtx
9577 expand_increment (exp, post, ignore)
9578 tree exp;
9579 int post, ignore;
9581 rtx op0, op1;
9582 rtx temp, value;
9583 tree incremented = TREE_OPERAND (exp, 0);
9584 optab this_optab = add_optab;
9585 int icode;
9586 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9587 int op0_is_copy = 0;
9588 int single_insn = 0;
9589 /* 1 means we can't store into OP0 directly,
9590 because it is a subreg narrower than a word,
9591 and we don't dare clobber the rest of the word. */
9592 int bad_subreg = 0;
9594 /* Stabilize any component ref that might need to be
9595 evaluated more than once below. */
9596 if (!post
9597 || TREE_CODE (incremented) == BIT_FIELD_REF
9598 || (TREE_CODE (incremented) == COMPONENT_REF
9599 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9600 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9601 incremented = stabilize_reference (incremented);
9602 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9603 ones into save exprs so that they don't accidentally get evaluated
9604 more than once by the code below. */
9605 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9606 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9607 incremented = save_expr (incremented);
9609 /* Compute the operands as RTX.
9610 Note whether OP0 is the actual lvalue or a copy of it:
9611 I believe it is a copy iff it is a register or subreg
9612 and insns were generated in computing it. */
9614 temp = get_last_insn ();
9615 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9617 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9618 in place but instead must do sign- or zero-extension during assignment,
9619 so we copy it into a new register and let the code below use it as
9620 a copy.
9622 Note that we can safely modify this SUBREG since it is know not to be
9623 shared (it was made by the expand_expr call above). */
9625 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9627 if (post)
9628 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9629 else
9630 bad_subreg = 1;
9632 else if (GET_CODE (op0) == SUBREG
9633 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9635 /* We cannot increment this SUBREG in place. If we are
9636 post-incrementing, get a copy of the old value. Otherwise,
9637 just mark that we cannot increment in place. */
9638 if (post)
9639 op0 = copy_to_reg (op0);
9640 else
9641 bad_subreg = 1;
9644 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9645 && temp != get_last_insn ());
9646 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9648 /* Decide whether incrementing or decrementing. */
9649 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9650 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9651 this_optab = sub_optab;
9653 /* Convert decrement by a constant into a negative increment. */
9654 if (this_optab == sub_optab
9655 && GET_CODE (op1) == CONST_INT)
9657 op1 = GEN_INT (-INTVAL (op1));
9658 this_optab = add_optab;
9661 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9662 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9664 /* For a preincrement, see if we can do this with a single instruction. */
9665 if (!post)
9667 icode = (int) this_optab->handlers[(int) mode].insn_code;
9668 if (icode != (int) CODE_FOR_nothing
9669 /* Make sure that OP0 is valid for operands 0 and 1
9670 of the insn we want to queue. */
9671 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9672 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9673 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9674 single_insn = 1;
9677 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9678 then we cannot just increment OP0. We must therefore contrive to
9679 increment the original value. Then, for postincrement, we can return
9680 OP0 since it is a copy of the old value. For preincrement, expand here
9681 unless we can do it with a single insn.
9683 Likewise if storing directly into OP0 would clobber high bits
9684 we need to preserve (bad_subreg). */
9685 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9687 /* This is the easiest way to increment the value wherever it is.
9688 Problems with multiple evaluation of INCREMENTED are prevented
9689 because either (1) it is a component_ref or preincrement,
9690 in which case it was stabilized above, or (2) it is an array_ref
9691 with constant index in an array in a register, which is
9692 safe to reevaluate. */
9693 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9694 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9695 ? MINUS_EXPR : PLUS_EXPR),
9696 TREE_TYPE (exp),
9697 incremented,
9698 TREE_OPERAND (exp, 1));
9700 while (TREE_CODE (incremented) == NOP_EXPR
9701 || TREE_CODE (incremented) == CONVERT_EXPR)
9703 newexp = convert (TREE_TYPE (incremented), newexp);
9704 incremented = TREE_OPERAND (incremented, 0);
9707 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9708 return post ? op0 : temp;
9711 if (post)
9713 /* We have a true reference to the value in OP0.
9714 If there is an insn to add or subtract in this mode, queue it.
9715 Queueing the increment insn avoids the register shuffling
9716 that often results if we must increment now and first save
9717 the old value for subsequent use. */
9719 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9720 op0 = stabilize (op0);
9721 #endif
9723 icode = (int) this_optab->handlers[(int) mode].insn_code;
9724 if (icode != (int) CODE_FOR_nothing
9725 /* Make sure that OP0 is valid for operands 0 and 1
9726 of the insn we want to queue. */
9727 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9728 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9730 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9731 op1 = force_reg (mode, op1);
9733 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9735 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9737 rtx addr = (general_operand (XEXP (op0, 0), mode)
9738 ? force_reg (Pmode, XEXP (op0, 0))
9739 : copy_to_reg (XEXP (op0, 0)));
9740 rtx temp, result;
9742 op0 = replace_equiv_address (op0, addr);
9743 temp = force_reg (GET_MODE (op0), op0);
9744 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9745 op1 = force_reg (mode, op1);
9747 /* The increment queue is LIFO, thus we have to `queue'
9748 the instructions in reverse order. */
9749 enqueue_insn (op0, gen_move_insn (op0, temp));
9750 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9751 return result;
9755 /* Preincrement, or we can't increment with one simple insn. */
9756 if (post)
9757 /* Save a copy of the value before inc or dec, to return it later. */
9758 temp = value = copy_to_reg (op0);
9759 else
9760 /* Arrange to return the incremented value. */
9761 /* Copy the rtx because expand_binop will protect from the queue,
9762 and the results of that would be invalid for us to return
9763 if our caller does emit_queue before using our result. */
9764 temp = copy_rtx (value = op0);
9766 /* Increment however we can. */
9767 op1 = expand_binop (mode, this_optab, value, op1, op0,
9768 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9770 /* Make sure the value is stored into OP0. */
9771 if (op1 != op0)
9772 emit_move_insn (op0, op1);
9774 return temp;
9777 /* At the start of a function, record that we have no previously-pushed
9778 arguments waiting to be popped. */
9780 void
9781 init_pending_stack_adjust ()
9783 pending_stack_adjust = 0;
9786 /* When exiting from function, if safe, clear out any pending stack adjust
9787 so the adjustment won't get done.
9789 Note, if the current function calls alloca, then it must have a
9790 frame pointer regardless of the value of flag_omit_frame_pointer. */
9792 void
9793 clear_pending_stack_adjust ()
9795 #ifdef EXIT_IGNORE_STACK
9796 if (optimize > 0
9797 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9798 && EXIT_IGNORE_STACK
9799 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9800 && ! flag_inline_functions)
9802 stack_pointer_delta -= pending_stack_adjust,
9803 pending_stack_adjust = 0;
9805 #endif
9808 /* Pop any previously-pushed arguments that have not been popped yet. */
9810 void
9811 do_pending_stack_adjust ()
9813 if (inhibit_defer_pop == 0)
9815 if (pending_stack_adjust != 0)
9816 adjust_stack (GEN_INT (pending_stack_adjust));
9817 pending_stack_adjust = 0;
9821 /* Expand conditional expressions. */
9823 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9824 LABEL is an rtx of code CODE_LABEL, in this function and all the
9825 functions here. */
9827 void
9828 jumpifnot (exp, label)
9829 tree exp;
9830 rtx label;
9832 do_jump (exp, label, NULL_RTX);
9835 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9837 void
9838 jumpif (exp, label)
9839 tree exp;
9840 rtx label;
9842 do_jump (exp, NULL_RTX, label);
9845 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9846 the result is zero, or IF_TRUE_LABEL if the result is one.
9847 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9848 meaning fall through in that case.
9850 do_jump always does any pending stack adjust except when it does not
9851 actually perform a jump. An example where there is no jump
9852 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9854 This function is responsible for optimizing cases such as
9855 &&, || and comparison operators in EXP. */
9857 void
9858 do_jump (exp, if_false_label, if_true_label)
9859 tree exp;
9860 rtx if_false_label, if_true_label;
9862 enum tree_code code = TREE_CODE (exp);
9863 /* Some cases need to create a label to jump to
9864 in order to properly fall through.
9865 These cases set DROP_THROUGH_LABEL nonzero. */
9866 rtx drop_through_label = 0;
9867 rtx temp;
9868 int i;
9869 tree type;
9870 enum machine_mode mode;
9872 #ifdef MAX_INTEGER_COMPUTATION_MODE
9873 check_max_integer_computation_mode (exp);
9874 #endif
9876 emit_queue ();
9878 switch (code)
9880 case ERROR_MARK:
9881 break;
9883 case INTEGER_CST:
9884 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9885 if (temp)
9886 emit_jump (temp);
9887 break;
9889 #if 0
9890 /* This is not true with #pragma weak */
9891 case ADDR_EXPR:
9892 /* The address of something can never be zero. */
9893 if (if_true_label)
9894 emit_jump (if_true_label);
9895 break;
9896 #endif
9898 case NOP_EXPR:
9899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9900 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9901 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9902 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9903 goto normal;
9904 case CONVERT_EXPR:
9905 /* If we are narrowing the operand, we have to do the compare in the
9906 narrower mode. */
9907 if ((TYPE_PRECISION (TREE_TYPE (exp))
9908 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9909 goto normal;
9910 case NON_LVALUE_EXPR:
9911 case REFERENCE_EXPR:
9912 case ABS_EXPR:
9913 case NEGATE_EXPR:
9914 case LROTATE_EXPR:
9915 case RROTATE_EXPR:
9916 /* These cannot change zero->nonzero or vice versa. */
9917 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9918 break;
9920 case WITH_RECORD_EXPR:
9921 /* Put the object on the placeholder list, recurse through our first
9922 operand, and pop the list. */
9923 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9924 placeholder_list);
9925 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9926 placeholder_list = TREE_CHAIN (placeholder_list);
9927 break;
9929 #if 0
9930 /* This is never less insns than evaluating the PLUS_EXPR followed by
9931 a test and can be longer if the test is eliminated. */
9932 case PLUS_EXPR:
9933 /* Reduce to minus. */
9934 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9935 TREE_OPERAND (exp, 0),
9936 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9937 TREE_OPERAND (exp, 1))));
9938 /* Process as MINUS. */
9939 #endif
9941 case MINUS_EXPR:
9942 /* Nonzero iff operands of minus differ. */
9943 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9944 TREE_OPERAND (exp, 0),
9945 TREE_OPERAND (exp, 1)),
9946 NE, NE, if_false_label, if_true_label);
9947 break;
9949 case BIT_AND_EXPR:
9950 /* If we are AND'ing with a small constant, do this comparison in the
9951 smallest type that fits. If the machine doesn't have comparisons
9952 that small, it will be converted back to the wider comparison.
9953 This helps if we are testing the sign bit of a narrower object.
9954 combine can't do this for us because it can't know whether a
9955 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9957 if (! SLOW_BYTE_ACCESS
9958 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9959 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9960 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9961 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9962 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9963 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9964 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9965 != CODE_FOR_nothing))
9967 do_jump (convert (type, exp), if_false_label, if_true_label);
9968 break;
9970 goto normal;
9972 case TRUTH_NOT_EXPR:
9973 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9974 break;
9976 case TRUTH_ANDIF_EXPR:
9977 if (if_false_label == 0)
9978 if_false_label = drop_through_label = gen_label_rtx ();
9979 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9980 start_cleanup_deferral ();
9981 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9982 end_cleanup_deferral ();
9983 break;
9985 case TRUTH_ORIF_EXPR:
9986 if (if_true_label == 0)
9987 if_true_label = drop_through_label = gen_label_rtx ();
9988 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9989 start_cleanup_deferral ();
9990 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9991 end_cleanup_deferral ();
9992 break;
9994 case COMPOUND_EXPR:
9995 push_temp_slots ();
9996 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9997 preserve_temp_slots (NULL_RTX);
9998 free_temp_slots ();
9999 pop_temp_slots ();
10000 emit_queue ();
10001 do_pending_stack_adjust ();
10002 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10003 break;
10005 case COMPONENT_REF:
10006 case BIT_FIELD_REF:
10007 case ARRAY_REF:
10008 case ARRAY_RANGE_REF:
10010 HOST_WIDE_INT bitsize, bitpos;
10011 int unsignedp;
10012 enum machine_mode mode;
10013 tree type;
10014 tree offset;
10015 int volatilep = 0;
10017 /* Get description of this reference. We don't actually care
10018 about the underlying object here. */
10019 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
10020 &unsignedp, &volatilep);
10022 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
10023 if (! SLOW_BYTE_ACCESS
10024 && type != 0 && bitsize >= 0
10025 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10026 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10027 != CODE_FOR_nothing))
10029 do_jump (convert (type, exp), if_false_label, if_true_label);
10030 break;
10032 goto normal;
10035 case COND_EXPR:
10036 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10037 if (integer_onep (TREE_OPERAND (exp, 1))
10038 && integer_zerop (TREE_OPERAND (exp, 2)))
10039 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10041 else if (integer_zerop (TREE_OPERAND (exp, 1))
10042 && integer_onep (TREE_OPERAND (exp, 2)))
10043 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10045 else
10047 rtx label1 = gen_label_rtx ();
10048 drop_through_label = gen_label_rtx ();
10050 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10052 start_cleanup_deferral ();
10053 /* Now the THEN-expression. */
10054 do_jump (TREE_OPERAND (exp, 1),
10055 if_false_label ? if_false_label : drop_through_label,
10056 if_true_label ? if_true_label : drop_through_label);
10057 /* In case the do_jump just above never jumps. */
10058 do_pending_stack_adjust ();
10059 emit_label (label1);
10061 /* Now the ELSE-expression. */
10062 do_jump (TREE_OPERAND (exp, 2),
10063 if_false_label ? if_false_label : drop_through_label,
10064 if_true_label ? if_true_label : drop_through_label);
10065 end_cleanup_deferral ();
10067 break;
10069 case EQ_EXPR:
10071 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10073 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10074 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10076 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10077 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10078 do_jump
10079 (fold
10080 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10081 fold (build (EQ_EXPR, TREE_TYPE (exp),
10082 fold (build1 (REALPART_EXPR,
10083 TREE_TYPE (inner_type),
10084 exp0)),
10085 fold (build1 (REALPART_EXPR,
10086 TREE_TYPE (inner_type),
10087 exp1)))),
10088 fold (build (EQ_EXPR, TREE_TYPE (exp),
10089 fold (build1 (IMAGPART_EXPR,
10090 TREE_TYPE (inner_type),
10091 exp0)),
10092 fold (build1 (IMAGPART_EXPR,
10093 TREE_TYPE (inner_type),
10094 exp1)))))),
10095 if_false_label, if_true_label);
10098 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10099 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10101 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10102 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
10103 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10104 else
10105 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
10106 break;
10109 case NE_EXPR:
10111 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10113 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10114 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10116 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10117 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10118 do_jump
10119 (fold
10120 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10121 fold (build (NE_EXPR, TREE_TYPE (exp),
10122 fold (build1 (REALPART_EXPR,
10123 TREE_TYPE (inner_type),
10124 exp0)),
10125 fold (build1 (REALPART_EXPR,
10126 TREE_TYPE (inner_type),
10127 exp1)))),
10128 fold (build (NE_EXPR, TREE_TYPE (exp),
10129 fold (build1 (IMAGPART_EXPR,
10130 TREE_TYPE (inner_type),
10131 exp0)),
10132 fold (build1 (IMAGPART_EXPR,
10133 TREE_TYPE (inner_type),
10134 exp1)))))),
10135 if_false_label, if_true_label);
10138 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10139 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10141 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10142 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
10143 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10144 else
10145 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
10146 break;
10149 case LT_EXPR:
10150 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10151 if (GET_MODE_CLASS (mode) == MODE_INT
10152 && ! can_compare_p (LT, mode, ccp_jump))
10153 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10154 else
10155 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
10156 break;
10158 case LE_EXPR:
10159 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10160 if (GET_MODE_CLASS (mode) == MODE_INT
10161 && ! can_compare_p (LE, mode, ccp_jump))
10162 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10163 else
10164 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
10165 break;
10167 case GT_EXPR:
10168 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10169 if (GET_MODE_CLASS (mode) == MODE_INT
10170 && ! can_compare_p (GT, mode, ccp_jump))
10171 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10172 else
10173 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
10174 break;
10176 case GE_EXPR:
10177 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10178 if (GET_MODE_CLASS (mode) == MODE_INT
10179 && ! can_compare_p (GE, mode, ccp_jump))
10180 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10181 else
10182 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
10183 break;
10185 case UNORDERED_EXPR:
10186 case ORDERED_EXPR:
10188 enum rtx_code cmp, rcmp;
10189 int do_rev;
10191 if (code == UNORDERED_EXPR)
10192 cmp = UNORDERED, rcmp = ORDERED;
10193 else
10194 cmp = ORDERED, rcmp = UNORDERED;
10195 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10197 do_rev = 0;
10198 if (! can_compare_p (cmp, mode, ccp_jump)
10199 && (can_compare_p (rcmp, mode, ccp_jump)
10200 /* If the target doesn't provide either UNORDERED or ORDERED
10201 comparisons, canonicalize on UNORDERED for the library. */
10202 || rcmp == UNORDERED))
10203 do_rev = 1;
10205 if (! do_rev)
10206 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10207 else
10208 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10210 break;
10213 enum rtx_code rcode1;
10214 enum tree_code tcode2;
10216 case UNLT_EXPR:
10217 rcode1 = UNLT;
10218 tcode2 = LT_EXPR;
10219 goto unordered_bcc;
10220 case UNLE_EXPR:
10221 rcode1 = UNLE;
10222 tcode2 = LE_EXPR;
10223 goto unordered_bcc;
10224 case UNGT_EXPR:
10225 rcode1 = UNGT;
10226 tcode2 = GT_EXPR;
10227 goto unordered_bcc;
10228 case UNGE_EXPR:
10229 rcode1 = UNGE;
10230 tcode2 = GE_EXPR;
10231 goto unordered_bcc;
10232 case UNEQ_EXPR:
10233 rcode1 = UNEQ;
10234 tcode2 = EQ_EXPR;
10235 goto unordered_bcc;
10237 unordered_bcc:
10238 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10239 if (can_compare_p (rcode1, mode, ccp_jump))
10240 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10241 if_true_label);
10242 else
10244 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10245 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10246 tree cmp0, cmp1;
10248 /* If the target doesn't support combined unordered
10249 compares, decompose into UNORDERED + comparison. */
10250 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10251 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10252 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10253 do_jump (exp, if_false_label, if_true_label);
10256 break;
10258 /* Special case:
10259 __builtin_expect (<test>, 0) and
10260 __builtin_expect (<test>, 1)
10262 We need to do this here, so that <test> is not converted to a SCC
10263 operation on machines that use condition code registers and COMPARE
10264 like the PowerPC, and then the jump is done based on whether the SCC
10265 operation produced a 1 or 0. */
10266 case CALL_EXPR:
10267 /* Check for a built-in function. */
10268 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10270 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10271 tree arglist = TREE_OPERAND (exp, 1);
10273 if (TREE_CODE (fndecl) == FUNCTION_DECL
10274 && DECL_BUILT_IN (fndecl)
10275 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10276 && arglist != NULL_TREE
10277 && TREE_CHAIN (arglist) != NULL_TREE)
10279 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10280 if_true_label);
10282 if (seq != NULL_RTX)
10284 emit_insn (seq);
10285 return;
10289 /* fall through and generate the normal code. */
10291 default:
10292 normal:
10293 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10294 #if 0
10295 /* This is not needed any more and causes poor code since it causes
10296 comparisons and tests from non-SI objects to have different code
10297 sequences. */
10298 /* Copy to register to avoid generating bad insns by cse
10299 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10300 if (!cse_not_expected && GET_CODE (temp) == MEM)
10301 temp = copy_to_reg (temp);
10302 #endif
10303 do_pending_stack_adjust ();
10304 /* Do any postincrements in the expression that was tested. */
10305 emit_queue ();
10307 if (GET_CODE (temp) == CONST_INT
10308 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10309 || GET_CODE (temp) == LABEL_REF)
10311 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10312 if (target)
10313 emit_jump (target);
10315 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10316 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10317 /* Note swapping the labels gives us not-equal. */
10318 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10319 else if (GET_MODE (temp) != VOIDmode)
10320 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10321 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10322 GET_MODE (temp), NULL_RTX,
10323 if_false_label, if_true_label);
10324 else
10325 abort ();
10328 if (drop_through_label)
10330 /* If do_jump produces code that might be jumped around,
10331 do any stack adjusts from that code, before the place
10332 where control merges in. */
10333 do_pending_stack_adjust ();
10334 emit_label (drop_through_label);
10338 /* Given a comparison expression EXP for values too wide to be compared
10339 with one insn, test the comparison and jump to the appropriate label.
10340 The code of EXP is ignored; we always test GT if SWAP is 0,
10341 and LT if SWAP is 1. */
10343 static void
10344 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10345 tree exp;
10346 int swap;
10347 rtx if_false_label, if_true_label;
10349 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10350 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10351 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10352 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10354 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10357 /* Compare OP0 with OP1, word at a time, in mode MODE.
10358 UNSIGNEDP says to do unsigned comparison.
10359 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10361 void
10362 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10363 enum machine_mode mode;
10364 int unsignedp;
10365 rtx op0, op1;
10366 rtx if_false_label, if_true_label;
10368 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10369 rtx drop_through_label = 0;
10370 int i;
10372 if (! if_true_label || ! if_false_label)
10373 drop_through_label = gen_label_rtx ();
10374 if (! if_true_label)
10375 if_true_label = drop_through_label;
10376 if (! if_false_label)
10377 if_false_label = drop_through_label;
10379 /* Compare a word at a time, high order first. */
10380 for (i = 0; i < nwords; i++)
10382 rtx op0_word, op1_word;
10384 if (WORDS_BIG_ENDIAN)
10386 op0_word = operand_subword_force (op0, i, mode);
10387 op1_word = operand_subword_force (op1, i, mode);
10389 else
10391 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10392 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10395 /* All but high-order word must be compared as unsigned. */
10396 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10397 (unsignedp || i > 0), word_mode, NULL_RTX,
10398 NULL_RTX, if_true_label);
10400 /* Consider lower words only if these are equal. */
10401 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10402 NULL_RTX, NULL_RTX, if_false_label);
10405 if (if_false_label)
10406 emit_jump (if_false_label);
10407 if (drop_through_label)
10408 emit_label (drop_through_label);
10411 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10412 with one insn, test the comparison and jump to the appropriate label. */
10414 static void
10415 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10416 tree exp;
10417 rtx if_false_label, if_true_label;
10419 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10420 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10421 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10422 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10423 int i;
10424 rtx drop_through_label = 0;
10426 if (! if_false_label)
10427 drop_through_label = if_false_label = gen_label_rtx ();
10429 for (i = 0; i < nwords; i++)
10430 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10431 operand_subword_force (op1, i, mode),
10432 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10433 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10435 if (if_true_label)
10436 emit_jump (if_true_label);
10437 if (drop_through_label)
10438 emit_label (drop_through_label);
10441 /* Jump according to whether OP0 is 0.
10442 We assume that OP0 has an integer mode that is too wide
10443 for the available compare insns. */
10445 void
10446 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10447 rtx op0;
10448 rtx if_false_label, if_true_label;
10450 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10451 rtx part;
10452 int i;
10453 rtx drop_through_label = 0;
10455 /* The fastest way of doing this comparison on almost any machine is to
10456 "or" all the words and compare the result. If all have to be loaded
10457 from memory and this is a very wide item, it's possible this may
10458 be slower, but that's highly unlikely. */
10460 part = gen_reg_rtx (word_mode);
10461 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10462 for (i = 1; i < nwords && part != 0; i++)
10463 part = expand_binop (word_mode, ior_optab, part,
10464 operand_subword_force (op0, i, GET_MODE (op0)),
10465 part, 1, OPTAB_WIDEN);
10467 if (part != 0)
10469 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10470 NULL_RTX, if_false_label, if_true_label);
10472 return;
10475 /* If we couldn't do the "or" simply, do this with a series of compares. */
10476 if (! if_false_label)
10477 drop_through_label = if_false_label = gen_label_rtx ();
10479 for (i = 0; i < nwords; i++)
10480 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10481 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10482 if_false_label, NULL_RTX);
10484 if (if_true_label)
10485 emit_jump (if_true_label);
10487 if (drop_through_label)
10488 emit_label (drop_through_label);
10491 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10492 (including code to compute the values to be compared)
10493 and set (CC0) according to the result.
10494 The decision as to signed or unsigned comparison must be made by the caller.
10496 We force a stack adjustment unless there are currently
10497 things pushed on the stack that aren't yet used.
10499 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10500 compared. */
10503 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10504 rtx op0, op1;
10505 enum rtx_code code;
10506 int unsignedp;
10507 enum machine_mode mode;
10508 rtx size;
10510 enum rtx_code ucode;
10511 rtx tem;
10513 /* If one operand is constant, make it the second one. Only do this
10514 if the other operand is not constant as well. */
10516 if (swap_commutative_operands_p (op0, op1))
10518 tem = op0;
10519 op0 = op1;
10520 op1 = tem;
10521 code = swap_condition (code);
10524 if (flag_force_mem)
10526 op0 = force_not_mem (op0);
10527 op1 = force_not_mem (op1);
10530 do_pending_stack_adjust ();
10532 ucode = unsignedp ? unsigned_condition (code) : code;
10533 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10534 return tem;
10536 #if 0
10537 /* There's no need to do this now that combine.c can eliminate lots of
10538 sign extensions. This can be less efficient in certain cases on other
10539 machines. */
10541 /* If this is a signed equality comparison, we can do it as an
10542 unsigned comparison since zero-extension is cheaper than sign
10543 extension and comparisons with zero are done as unsigned. This is
10544 the case even on machines that can do fast sign extension, since
10545 zero-extension is easier to combine with other operations than
10546 sign-extension is. If we are comparing against a constant, we must
10547 convert it to what it would look like unsigned. */
10548 if ((code == EQ || code == NE) && ! unsignedp
10549 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10551 if (GET_CODE (op1) == CONST_INT
10552 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10553 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10554 unsignedp = 1;
10556 #endif
10558 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10560 #if HAVE_cc0
10561 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10562 #else
10563 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10564 #endif
10567 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10568 The decision as to signed or unsigned comparison must be made by the caller.
10570 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10571 compared. */
10573 void
10574 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10575 if_false_label, if_true_label)
10576 rtx op0, op1;
10577 enum rtx_code code;
10578 int unsignedp;
10579 enum machine_mode mode;
10580 rtx size;
10581 rtx if_false_label, if_true_label;
10583 enum rtx_code ucode;
10584 rtx tem;
10585 int dummy_true_label = 0;
10587 /* Reverse the comparison if that is safe and we want to jump if it is
10588 false. */
10589 if (! if_true_label && ! FLOAT_MODE_P (mode))
10591 if_true_label = if_false_label;
10592 if_false_label = 0;
10593 code = reverse_condition (code);
10596 /* If one operand is constant, make it the second one. Only do this
10597 if the other operand is not constant as well. */
10599 if (swap_commutative_operands_p (op0, op1))
10601 tem = op0;
10602 op0 = op1;
10603 op1 = tem;
10604 code = swap_condition (code);
10607 if (flag_force_mem)
10609 op0 = force_not_mem (op0);
10610 op1 = force_not_mem (op1);
10613 do_pending_stack_adjust ();
10615 ucode = unsignedp ? unsigned_condition (code) : code;
10616 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10618 if (tem == const_true_rtx)
10620 if (if_true_label)
10621 emit_jump (if_true_label);
10623 else
10625 if (if_false_label)
10626 emit_jump (if_false_label);
10628 return;
10631 #if 0
10632 /* There's no need to do this now that combine.c can eliminate lots of
10633 sign extensions. This can be less efficient in certain cases on other
10634 machines. */
10636 /* If this is a signed equality comparison, we can do it as an
10637 unsigned comparison since zero-extension is cheaper than sign
10638 extension and comparisons with zero are done as unsigned. This is
10639 the case even on machines that can do fast sign extension, since
10640 zero-extension is easier to combine with other operations than
10641 sign-extension is. If we are comparing against a constant, we must
10642 convert it to what it would look like unsigned. */
10643 if ((code == EQ || code == NE) && ! unsignedp
10644 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10646 if (GET_CODE (op1) == CONST_INT
10647 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10648 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10649 unsignedp = 1;
10651 #endif
10653 if (! if_true_label)
10655 dummy_true_label = 1;
10656 if_true_label = gen_label_rtx ();
10659 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10660 if_true_label);
10662 if (if_false_label)
10663 emit_jump (if_false_label);
10664 if (dummy_true_label)
10665 emit_label (if_true_label);
10668 /* Generate code for a comparison expression EXP (including code to compute
10669 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10670 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10671 generated code will drop through.
10672 SIGNED_CODE should be the rtx operation for this comparison for
10673 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10675 We force a stack adjustment unless there are currently
10676 things pushed on the stack that aren't yet used. */
10678 static void
10679 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10680 if_true_label)
10681 tree exp;
10682 enum rtx_code signed_code, unsigned_code;
10683 rtx if_false_label, if_true_label;
10685 rtx op0, op1;
10686 tree type;
10687 enum machine_mode mode;
10688 int unsignedp;
10689 enum rtx_code code;
10691 /* Don't crash if the comparison was erroneous. */
10692 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10693 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10694 return;
10696 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10697 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10698 return;
10700 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10701 mode = TYPE_MODE (type);
10702 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10703 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10704 || (GET_MODE_BITSIZE (mode)
10705 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10706 1)))))))
10708 /* op0 might have been replaced by promoted constant, in which
10709 case the type of second argument should be used. */
10710 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10711 mode = TYPE_MODE (type);
10713 unsignedp = TREE_UNSIGNED (type);
10714 code = unsignedp ? unsigned_code : signed_code;
10716 #ifdef HAVE_canonicalize_funcptr_for_compare
10717 /* If function pointers need to be "canonicalized" before they can
10718 be reliably compared, then canonicalize them. */
10719 if (HAVE_canonicalize_funcptr_for_compare
10720 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10721 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10722 == FUNCTION_TYPE))
10724 rtx new_op0 = gen_reg_rtx (mode);
10726 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10727 op0 = new_op0;
10730 if (HAVE_canonicalize_funcptr_for_compare
10731 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10732 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10733 == FUNCTION_TYPE))
10735 rtx new_op1 = gen_reg_rtx (mode);
10737 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10738 op1 = new_op1;
10740 #endif
10742 /* Do any postincrements in the expression that was tested. */
10743 emit_queue ();
10745 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10746 ((mode == BLKmode)
10747 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10748 if_false_label, if_true_label);
10751 /* Generate code to calculate EXP using a store-flag instruction
10752 and return an rtx for the result. EXP is either a comparison
10753 or a TRUTH_NOT_EXPR whose operand is a comparison.
10755 If TARGET is nonzero, store the result there if convenient.
10757 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10758 cheap.
10760 Return zero if there is no suitable set-flag instruction
10761 available on this machine.
10763 Once expand_expr has been called on the arguments of the comparison,
10764 we are committed to doing the store flag, since it is not safe to
10765 re-evaluate the expression. We emit the store-flag insn by calling
10766 emit_store_flag, but only expand the arguments if we have a reason
10767 to believe that emit_store_flag will be successful. If we think that
10768 it will, but it isn't, we have to simulate the store-flag with a
10769 set/jump/set sequence. */
10771 static rtx
10772 do_store_flag (exp, target, mode, only_cheap)
10773 tree exp;
10774 rtx target;
10775 enum machine_mode mode;
10776 int only_cheap;
10778 enum rtx_code code;
10779 tree arg0, arg1, type;
10780 tree tem;
10781 enum machine_mode operand_mode;
10782 int invert = 0;
10783 int unsignedp;
10784 rtx op0, op1;
10785 enum insn_code icode;
10786 rtx subtarget = target;
10787 rtx result, label;
10789 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10790 result at the end. We can't simply invert the test since it would
10791 have already been inverted if it were valid. This case occurs for
10792 some floating-point comparisons. */
10794 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10795 invert = 1, exp = TREE_OPERAND (exp, 0);
10797 arg0 = TREE_OPERAND (exp, 0);
10798 arg1 = TREE_OPERAND (exp, 1);
10800 /* Don't crash if the comparison was erroneous. */
10801 if (arg0 == error_mark_node || arg1 == error_mark_node)
10802 return const0_rtx;
10804 type = TREE_TYPE (arg0);
10805 operand_mode = TYPE_MODE (type);
10806 unsignedp = TREE_UNSIGNED (type);
10808 /* We won't bother with BLKmode store-flag operations because it would mean
10809 passing a lot of information to emit_store_flag. */
10810 if (operand_mode == BLKmode)
10811 return 0;
10813 /* We won't bother with store-flag operations involving function pointers
10814 when function pointers must be canonicalized before comparisons. */
10815 #ifdef HAVE_canonicalize_funcptr_for_compare
10816 if (HAVE_canonicalize_funcptr_for_compare
10817 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10818 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10819 == FUNCTION_TYPE))
10820 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10821 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10822 == FUNCTION_TYPE))))
10823 return 0;
10824 #endif
10826 STRIP_NOPS (arg0);
10827 STRIP_NOPS (arg1);
10829 /* Get the rtx comparison code to use. We know that EXP is a comparison
10830 operation of some type. Some comparisons against 1 and -1 can be
10831 converted to comparisons with zero. Do so here so that the tests
10832 below will be aware that we have a comparison with zero. These
10833 tests will not catch constants in the first operand, but constants
10834 are rarely passed as the first operand. */
10836 switch (TREE_CODE (exp))
10838 case EQ_EXPR:
10839 code = EQ;
10840 break;
10841 case NE_EXPR:
10842 code = NE;
10843 break;
10844 case LT_EXPR:
10845 if (integer_onep (arg1))
10846 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10847 else
10848 code = unsignedp ? LTU : LT;
10849 break;
10850 case LE_EXPR:
10851 if (! unsignedp && integer_all_onesp (arg1))
10852 arg1 = integer_zero_node, code = LT;
10853 else
10854 code = unsignedp ? LEU : LE;
10855 break;
10856 case GT_EXPR:
10857 if (! unsignedp && integer_all_onesp (arg1))
10858 arg1 = integer_zero_node, code = GE;
10859 else
10860 code = unsignedp ? GTU : GT;
10861 break;
10862 case GE_EXPR:
10863 if (integer_onep (arg1))
10864 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10865 else
10866 code = unsignedp ? GEU : GE;
10867 break;
10869 case UNORDERED_EXPR:
10870 code = UNORDERED;
10871 break;
10872 case ORDERED_EXPR:
10873 code = ORDERED;
10874 break;
10875 case UNLT_EXPR:
10876 code = UNLT;
10877 break;
10878 case UNLE_EXPR:
10879 code = UNLE;
10880 break;
10881 case UNGT_EXPR:
10882 code = UNGT;
10883 break;
10884 case UNGE_EXPR:
10885 code = UNGE;
10886 break;
10887 case UNEQ_EXPR:
10888 code = UNEQ;
10889 break;
10891 default:
10892 abort ();
10895 /* Put a constant second. */
10896 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10898 tem = arg0; arg0 = arg1; arg1 = tem;
10899 code = swap_condition (code);
10902 /* If this is an equality or inequality test of a single bit, we can
10903 do this by shifting the bit being tested to the low-order bit and
10904 masking the result with the constant 1. If the condition was EQ,
10905 we xor it with 1. This does not require an scc insn and is faster
10906 than an scc insn even if we have it. */
10908 if ((code == NE || code == EQ)
10909 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10910 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10912 tree inner = TREE_OPERAND (arg0, 0);
10913 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10914 int ops_unsignedp;
10916 /* If INNER is a right shift of a constant and it plus BITNUM does
10917 not overflow, adjust BITNUM and INNER. */
10919 if (TREE_CODE (inner) == RSHIFT_EXPR
10920 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10921 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10922 && bitnum < TYPE_PRECISION (type)
10923 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10924 bitnum - TYPE_PRECISION (type)))
10926 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10927 inner = TREE_OPERAND (inner, 0);
10930 /* If we are going to be able to omit the AND below, we must do our
10931 operations as unsigned. If we must use the AND, we have a choice.
10932 Normally unsigned is faster, but for some machines signed is. */
10933 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10934 #ifdef LOAD_EXTEND_OP
10935 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10936 #else
10938 #endif
10941 if (! get_subtarget (subtarget)
10942 || GET_MODE (subtarget) != operand_mode
10943 || ! safe_from_p (subtarget, inner, 1))
10944 subtarget = 0;
10946 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10948 if (bitnum != 0)
10949 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10950 size_int (bitnum), subtarget, ops_unsignedp);
10952 if (GET_MODE (op0) != mode)
10953 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10955 if ((code == EQ && ! invert) || (code == NE && invert))
10956 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10957 ops_unsignedp, OPTAB_LIB_WIDEN);
10959 /* Put the AND last so it can combine with more things. */
10960 if (bitnum != TYPE_PRECISION (type) - 1)
10961 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10963 return op0;
10966 /* Now see if we are likely to be able to do this. Return if not. */
10967 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10968 return 0;
10970 icode = setcc_gen_code[(int) code];
10971 if (icode == CODE_FOR_nothing
10972 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10974 /* We can only do this if it is one of the special cases that
10975 can be handled without an scc insn. */
10976 if ((code == LT && integer_zerop (arg1))
10977 || (! only_cheap && code == GE && integer_zerop (arg1)))
10979 else if (BRANCH_COST >= 0
10980 && ! only_cheap && (code == NE || code == EQ)
10981 && TREE_CODE (type) != REAL_TYPE
10982 && ((abs_optab->handlers[(int) operand_mode].insn_code
10983 != CODE_FOR_nothing)
10984 || (ffs_optab->handlers[(int) operand_mode].insn_code
10985 != CODE_FOR_nothing)))
10987 else
10988 return 0;
10991 if (! get_subtarget (target)
10992 || GET_MODE (subtarget) != operand_mode
10993 || ! safe_from_p (subtarget, arg1, 1))
10994 subtarget = 0;
10996 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10997 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10999 if (target == 0)
11000 target = gen_reg_rtx (mode);
11002 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11003 because, if the emit_store_flag does anything it will succeed and
11004 OP0 and OP1 will not be used subsequently. */
11006 result = emit_store_flag (target, code,
11007 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11008 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11009 operand_mode, unsignedp, 1);
11011 if (result)
11013 if (invert)
11014 result = expand_binop (mode, xor_optab, result, const1_rtx,
11015 result, 0, OPTAB_LIB_WIDEN);
11016 return result;
11019 /* If this failed, we have to do this with set/compare/jump/set code. */
11020 if (GET_CODE (target) != REG
11021 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11022 target = gen_reg_rtx (GET_MODE (target));
11024 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11025 result = compare_from_rtx (op0, op1, code, unsignedp,
11026 operand_mode, NULL_RTX);
11027 if (GET_CODE (result) == CONST_INT)
11028 return (((result == const0_rtx && ! invert)
11029 || (result != const0_rtx && invert))
11030 ? const0_rtx : const1_rtx);
11032 /* The code of RESULT may not match CODE if compare_from_rtx
11033 decided to swap its operands and reverse the original code.
11035 We know that compare_from_rtx returns either a CONST_INT or
11036 a new comparison code, so it is safe to just extract the
11037 code from RESULT. */
11038 code = GET_CODE (result);
11040 label = gen_label_rtx ();
11041 if (bcc_gen_fctn[(int) code] == 0)
11042 abort ();
11044 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11045 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11046 emit_label (label);
11048 return target;
11052 /* Stubs in case we haven't got a casesi insn. */
11053 #ifndef HAVE_casesi
11054 # define HAVE_casesi 0
11055 # define gen_casesi(a, b, c, d, e) (0)
11056 # define CODE_FOR_casesi CODE_FOR_nothing
11057 #endif
11059 /* If the machine does not have a case insn that compares the bounds,
11060 this means extra overhead for dispatch tables, which raises the
11061 threshold for using them. */
11062 #ifndef CASE_VALUES_THRESHOLD
11063 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
11064 #endif /* CASE_VALUES_THRESHOLD */
11066 unsigned int
11067 case_values_threshold ()
11069 return CASE_VALUES_THRESHOLD;
11072 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11073 0 otherwise (i.e. if there is no casesi instruction). */
11075 try_casesi (index_type, index_expr, minval, range,
11076 table_label, default_label)
11077 tree index_type, index_expr, minval, range;
11078 rtx table_label ATTRIBUTE_UNUSED;
11079 rtx default_label;
11081 enum machine_mode index_mode = SImode;
11082 int index_bits = GET_MODE_BITSIZE (index_mode);
11083 rtx op1, op2, index;
11084 enum machine_mode op_mode;
11086 if (! HAVE_casesi)
11087 return 0;
11089 /* Convert the index to SImode. */
11090 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11092 enum machine_mode omode = TYPE_MODE (index_type);
11093 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
11095 /* We must handle the endpoints in the original mode. */
11096 index_expr = build (MINUS_EXPR, index_type,
11097 index_expr, minval);
11098 minval = integer_zero_node;
11099 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11100 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11101 omode, 1, default_label);
11102 /* Now we can safely truncate. */
11103 index = convert_to_mode (index_mode, index, 0);
11105 else
11107 if (TYPE_MODE (index_type) != index_mode)
11109 index_expr = convert ((*lang_hooks.types.type_for_size)
11110 (index_bits, 0), index_expr);
11111 index_type = TREE_TYPE (index_expr);
11114 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11116 emit_queue ();
11117 index = protect_from_queue (index, 0);
11118 do_pending_stack_adjust ();
11120 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
11121 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
11122 (index, op_mode))
11123 index = copy_to_mode_reg (op_mode, index);
11125 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
11127 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
11128 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
11129 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
11130 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
11131 (op1, op_mode))
11132 op1 = copy_to_mode_reg (op_mode, op1);
11134 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
11136 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
11137 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
11138 op2, TREE_UNSIGNED (TREE_TYPE (range)));
11139 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
11140 (op2, op_mode))
11141 op2 = copy_to_mode_reg (op_mode, op2);
11143 emit_jump_insn (gen_casesi (index, op1, op2,
11144 table_label, default_label));
11145 return 1;
11148 /* Attempt to generate a tablejump instruction; same concept. */
11149 #ifndef HAVE_tablejump
11150 #define HAVE_tablejump 0
11151 #define gen_tablejump(x, y) (0)
11152 #endif
11154 /* Subroutine of the next function.
11156 INDEX is the value being switched on, with the lowest value
11157 in the table already subtracted.
11158 MODE is its expected mode (needed if INDEX is constant).
11159 RANGE is the length of the jump table.
11160 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11162 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11163 index value is out of range. */
11165 static void
11166 do_tablejump (index, mode, range, table_label, default_label)
11167 rtx index, range, table_label, default_label;
11168 enum machine_mode mode;
11170 rtx temp, vector;
11172 if (INTVAL (range) > cfun->max_jumptable_ents)
11173 cfun->max_jumptable_ents = INTVAL (range);
11175 /* Do an unsigned comparison (in the proper mode) between the index
11176 expression and the value which represents the length of the range.
11177 Since we just finished subtracting the lower bound of the range
11178 from the index expression, this comparison allows us to simultaneously
11179 check that the original index expression value is both greater than
11180 or equal to the minimum value of the range and less than or equal to
11181 the maximum value of the range. */
11183 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11184 default_label);
11186 /* If index is in range, it must fit in Pmode.
11187 Convert to Pmode so we can index with it. */
11188 if (mode != Pmode)
11189 index = convert_to_mode (Pmode, index, 1);
11191 /* Don't let a MEM slip thru, because then INDEX that comes
11192 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11193 and break_out_memory_refs will go to work on it and mess it up. */
11194 #ifdef PIC_CASE_VECTOR_ADDRESS
11195 if (flag_pic && GET_CODE (index) != REG)
11196 index = copy_to_mode_reg (Pmode, index);
11197 #endif
11199 /* If flag_force_addr were to affect this address
11200 it could interfere with the tricky assumptions made
11201 about addresses that contain label-refs,
11202 which may be valid only very near the tablejump itself. */
11203 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11204 GET_MODE_SIZE, because this indicates how large insns are. The other
11205 uses should all be Pmode, because they are addresses. This code
11206 could fail if addresses and insns are not the same size. */
11207 index = gen_rtx_PLUS (Pmode,
11208 gen_rtx_MULT (Pmode, index,
11209 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11210 gen_rtx_LABEL_REF (Pmode, table_label));
11211 #ifdef PIC_CASE_VECTOR_ADDRESS
11212 if (flag_pic)
11213 index = PIC_CASE_VECTOR_ADDRESS (index);
11214 else
11215 #endif
11216 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11217 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11218 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11219 RTX_UNCHANGING_P (vector) = 1;
11220 convert_move (temp, vector, 0);
11222 emit_jump_insn (gen_tablejump (temp, table_label));
11224 /* If we are generating PIC code or if the table is PC-relative, the
11225 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11226 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11227 emit_barrier ();
11231 try_tablejump (index_type, index_expr, minval, range,
11232 table_label, default_label)
11233 tree index_type, index_expr, minval, range;
11234 rtx table_label, default_label;
11236 rtx index;
11238 if (! HAVE_tablejump)
11239 return 0;
11241 index_expr = fold (build (MINUS_EXPR, index_type,
11242 convert (index_type, index_expr),
11243 convert (index_type, minval)));
11244 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11245 emit_queue ();
11246 index = protect_from_queue (index, 0);
11247 do_pending_stack_adjust ();
11249 do_tablejump (index, TYPE_MODE (index_type),
11250 convert_modes (TYPE_MODE (index_type),
11251 TYPE_MODE (TREE_TYPE (range)),
11252 expand_expr (range, NULL_RTX,
11253 VOIDmode, 0),
11254 TREE_UNSIGNED (TREE_TYPE (range))),
11255 table_label, default_label);
11256 return 1;
11259 /* Nonzero if the mode is a valid vector mode for this architecture.
11260 This returns nonzero even if there is no hardware support for the
11261 vector mode, but we can emulate with narrower modes. */
11264 vector_mode_valid_p (mode)
11265 enum machine_mode mode;
11267 enum mode_class class = GET_MODE_CLASS (mode);
11268 enum machine_mode innermode;
11270 /* Doh! What's going on? */
11271 if (class != MODE_VECTOR_INT
11272 && class != MODE_VECTOR_FLOAT)
11273 return 0;
11275 /* Hardware support. Woo hoo! */
11276 if (VECTOR_MODE_SUPPORTED_P (mode))
11277 return 1;
11279 innermode = GET_MODE_INNER (mode);
11281 /* We should probably return 1 if requesting V4DI and we have no DI,
11282 but we have V2DI, but this is probably very unlikely. */
11284 /* If we have support for the inner mode, we can safely emulate it.
11285 We may not have V2DI, but me can emulate with a pair of DIs. */
11286 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11289 #include "gt-expr.h"