* call.c: Fix comment formatting.
[official-gcc.git] / gcc / expr.c
blob5872ab055fb948a42d8c1bd69d66c566f67fadad
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
55 #ifdef PUSH_ROUNDING
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
61 #endif
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
80 #else
81 #define TARGET_MEM_FUNCTIONS 0
82 #endif
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list = 0;
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
110 int reverse;
113 /* This structure is used by store_by_pieces to describe the clear to
114 be performed. */
116 struct store_by_pieces
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
125 PTR constfundata;
126 int reverse;
129 static rtx enqueue_insn PARAMS ((rtx, rtx));
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
131 PARAMS ((unsigned HOST_WIDE_INT,
132 unsigned int));
133 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
134 struct move_by_pieces *));
135 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
136 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
137 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
138 static tree emit_block_move_libcall_fn PARAMS ((int));
139 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
147 enum machine_mode,
148 struct store_by_pieces *));
149 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
150 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
151 static tree clear_storage_libcall_fn PARAMS ((int));
152 static rtx compress_float_constant PARAMS ((rtx, rtx));
153 static rtx get_subtarget PARAMS ((rtx));
154 static int is_zeros_p PARAMS ((tree));
155 static int mostly_zeros_p PARAMS ((tree));
156 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
157 HOST_WIDE_INT, enum machine_mode,
158 tree, tree, int, int));
159 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int, tree,
163 int));
164 static rtx var_rtx PARAMS ((tree));
165 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
166 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
167 static int is_aligning_offset PARAMS ((tree, tree));
168 static rtx expand_increment PARAMS ((tree, int, int));
169 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
170 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
171 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
172 rtx, rtx));
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
174 #ifdef PUSH_ROUNDING
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
176 #endif
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
183 static char direct_load[NUM_MACHINE_MODES];
184 static char direct_store[NUM_MACHINE_MODES];
186 /* Record for each mode whether we can float-extend from memory. */
188 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
190 /* If a memory-to-memory move would take MOVE_RATIO or more simple
191 move-instruction sequences, we will do a movstr or libcall instead. */
193 #ifndef MOVE_RATIO
194 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
195 #define MOVE_RATIO 2
196 #else
197 /* If we are optimizing for space (-Os), cut down the default move ratio. */
198 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 #endif
200 #endif
202 /* This macro is used to determine whether move_by_pieces should be called
203 to perform a structure copy. */
204 #ifndef MOVE_BY_PIECES_P
205 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
206 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
207 #endif
209 /* If a clear memory operation would take CLEAR_RATIO or more simple
210 move-instruction sequences, we will do a clrstr or libcall instead. */
212 #ifndef CLEAR_RATIO
213 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
214 #define CLEAR_RATIO 2
215 #else
216 /* If we are optimizing for space, cut down the default clear ratio. */
217 #define CLEAR_RATIO (optimize_size ? 3 : 15)
218 #endif
219 #endif
221 /* This macro is used to determine whether clear_by_pieces should be
222 called to clear storage. */
223 #ifndef CLEAR_BY_PIECES_P
224 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
225 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
226 #endif
228 /* This array records the insn_code of insns to perform block moves. */
229 enum insn_code movstr_optab[NUM_MACHINE_MODES];
231 /* This array records the insn_code of insns to perform block clears. */
232 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238 #endif
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
243 void
244 init_expr_once ()
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 rtx reg;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
269 int regno;
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
287 REGNO (reg) = regno;
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 enum insn_code ic;
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
336 void
337 init_expr ()
339 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
341 pending_chain = 0;
342 pending_stack_adjust = 0;
343 stack_pointer_delta = 0;
344 inhibit_defer_pop = 0;
345 saveregs_value = 0;
346 apply_args_value = 0;
347 forced_labels = 0;
350 /* Small sanity check that the queue is empty at the end of a function. */
352 void
353 finish_expr_for_function ()
355 if (pending_chain)
356 abort ();
359 /* Manage the queue of increment instructions to be output
360 for POSTINCREMENT_EXPR expressions, etc. */
362 /* Queue up to increment (or change) VAR later. BODY says how:
363 BODY should be the same thing you would pass to emit_insn
364 to increment right away. It will go to emit_insn later on.
366 The value is a QUEUED expression to be used in place of VAR
367 where you want to guarantee the pre-incrementation value of VAR. */
369 static rtx
370 enqueue_insn (var, body)
371 rtx var, body;
373 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
374 body, pending_chain);
375 return pending_chain;
378 /* Use protect_from_queue to convert a QUEUED expression
379 into something that you can put immediately into an instruction.
380 If the queued incrementation has not happened yet,
381 protect_from_queue returns the variable itself.
382 If the incrementation has happened, protect_from_queue returns a temp
383 that contains a copy of the old value of the variable.
385 Any time an rtx which might possibly be a QUEUED is to be put
386 into an instruction, it must be passed through protect_from_queue first.
387 QUEUED expressions are not meaningful in instructions.
389 Do not pass a value through protect_from_queue and then hold
390 on to it for a while before putting it in an instruction!
391 If the queue is flushed in between, incorrect code will result. */
394 protect_from_queue (x, modify)
395 rtx x;
396 int modify;
398 RTX_CODE code = GET_CODE (x);
400 #if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain == 0)
403 return x;
404 #endif
406 if (code != QUEUED)
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
412 shared. */
413 if (code == MEM && GET_MODE (x) != BLKmode
414 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
416 rtx y = XEXP (x, 0);
417 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
419 if (QUEUED_INSN (y))
421 rtx temp = gen_reg_rtx (GET_MODE (x));
423 emit_insn_before (gen_move_insn (temp, new),
424 QUEUED_INSN (y));
425 return temp;
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
435 if (code == MEM)
437 rtx tem = protect_from_queue (XEXP (x, 0), 0);
438 if (tem != XEXP (x, 0))
440 x = copy_rtx (x);
441 XEXP (x, 0) = tem;
444 else if (code == PLUS || code == MULT)
446 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
447 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
448 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
450 x = copy_rtx (x);
451 XEXP (x, 0) = new0;
452 XEXP (x, 1) = new1;
455 return x;
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
459 emit_queue. */
460 if (QUEUED_INSN (x) == 0)
461 return copy_to_reg (QUEUED_VAR (x));
462 /* If the increment has happened and a pre-increment copy exists,
463 use that copy. */
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
470 QUEUED_INSN (x));
471 return QUEUED_COPY (x);
474 /* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
480 queued_subexp_p (x)
481 rtx x;
483 enum rtx_code code = GET_CODE (x);
484 switch (code)
486 case QUEUED:
487 return 1;
488 case MEM:
489 return queued_subexp_p (XEXP (x, 0));
490 case MULT:
491 case PLUS:
492 case MINUS:
493 return (queued_subexp_p (XEXP (x, 0))
494 || queued_subexp_p (XEXP (x, 1)));
495 default:
496 return 0;
500 /* Perform all the pending incrementations. */
502 void
503 emit_queue ()
505 rtx p;
506 while ((p = pending_chain))
508 rtx body = QUEUED_BODY (p);
510 switch (GET_CODE (body))
512 case INSN:
513 case JUMP_INSN:
514 case CALL_INSN:
515 case CODE_LABEL:
516 case BARRIER:
517 case NOTE:
518 QUEUED_INSN (p) = body;
519 emit_insn (body);
520 break;
522 #ifdef ENABLE_CHECKING
523 case SEQUENCE:
524 abort ();
525 break;
526 #endif
528 default:
529 QUEUED_INSN (p) = emit_insn (body);
530 break;
533 pending_chain = QUEUED_NEXT (p);
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
542 void
543 convert_move (to, from, unsignedp)
544 rtx to, from;
545 int unsignedp;
547 enum machine_mode to_mode = GET_MODE (to);
548 enum machine_mode from_mode = GET_MODE (from);
549 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
550 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
551 enum insn_code code;
552 rtx libcall;
554 /* rtx code for making an equivalent value. */
555 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
560 if (to_real != from_real)
561 abort ();
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
565 TO here. */
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 abort ();
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
579 emit_move_insn (to, from);
580 return;
583 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
585 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
586 abort ();
588 if (VECTOR_MODE_P (to_mode))
589 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
590 else
591 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
593 emit_move_insn (to, from);
594 return;
597 if (to_real != from_real)
598 abort ();
600 if (to_real)
602 rtx value, insns;
604 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
606 /* Try converting directly if the insn is supported. */
607 if ((code = can_extend_p (to_mode, from_mode, 0))
608 != CODE_FOR_nothing)
610 emit_unop_insn (code, to, from, UNKNOWN);
611 return;
615 #ifdef HAVE_trunchfqf2
616 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
619 return;
621 #endif
622 #ifdef HAVE_trunctqfqf2
623 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
625 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
626 return;
628 #endif
629 #ifdef HAVE_truncsfqf2
630 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
632 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
633 return;
635 #endif
636 #ifdef HAVE_truncdfqf2
637 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
639 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
640 return;
642 #endif
643 #ifdef HAVE_truncxfqf2
644 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
646 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
647 return;
649 #endif
650 #ifdef HAVE_trunctfqf2
651 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
653 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
654 return;
656 #endif
658 #ifdef HAVE_trunctqfhf2
659 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
661 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
662 return;
664 #endif
665 #ifdef HAVE_truncsfhf2
666 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
668 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
669 return;
671 #endif
672 #ifdef HAVE_truncdfhf2
673 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
675 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
676 return;
678 #endif
679 #ifdef HAVE_truncxfhf2
680 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
682 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
683 return;
685 #endif
686 #ifdef HAVE_trunctfhf2
687 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
689 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
690 return;
692 #endif
694 #ifdef HAVE_truncsftqf2
695 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
697 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
698 return;
700 #endif
701 #ifdef HAVE_truncdftqf2
702 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
704 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
705 return;
707 #endif
708 #ifdef HAVE_truncxftqf2
709 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
711 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
712 return;
714 #endif
715 #ifdef HAVE_trunctftqf2
716 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
718 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
719 return;
721 #endif
723 #ifdef HAVE_truncdfsf2
724 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
726 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
727 return;
729 #endif
730 #ifdef HAVE_truncxfsf2
731 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
733 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
734 return;
736 #endif
737 #ifdef HAVE_trunctfsf2
738 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
740 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
741 return;
743 #endif
744 #ifdef HAVE_truncxfdf2
745 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
747 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
748 return;
750 #endif
751 #ifdef HAVE_trunctfdf2
752 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
754 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
755 return;
757 #endif
759 libcall = (rtx) 0;
760 switch (from_mode)
762 case SFmode:
763 switch (to_mode)
765 case DFmode:
766 libcall = extendsfdf2_libfunc;
767 break;
769 case XFmode:
770 libcall = extendsfxf2_libfunc;
771 break;
773 case TFmode:
774 libcall = extendsftf2_libfunc;
775 break;
777 default:
778 break;
780 break;
782 case DFmode:
783 switch (to_mode)
785 case SFmode:
786 libcall = truncdfsf2_libfunc;
787 break;
789 case XFmode:
790 libcall = extenddfxf2_libfunc;
791 break;
793 case TFmode:
794 libcall = extenddftf2_libfunc;
795 break;
797 default:
798 break;
800 break;
802 case XFmode:
803 switch (to_mode)
805 case SFmode:
806 libcall = truncxfsf2_libfunc;
807 break;
809 case DFmode:
810 libcall = truncxfdf2_libfunc;
811 break;
813 default:
814 break;
816 break;
818 case TFmode:
819 switch (to_mode)
821 case SFmode:
822 libcall = trunctfsf2_libfunc;
823 break;
825 case DFmode:
826 libcall = trunctfdf2_libfunc;
827 break;
829 default:
830 break;
832 break;
834 default:
835 break;
838 if (libcall == (rtx) 0)
839 /* This conversion is not implemented yet. */
840 abort ();
842 start_sequence ();
843 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
844 1, from, from_mode);
845 insns = get_insns ();
846 end_sequence ();
847 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
848 from));
849 return;
852 /* Now both modes are integers. */
854 /* Handle expanding beyond a word. */
855 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
856 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
858 rtx insns;
859 rtx lowpart;
860 rtx fill_value;
861 rtx lowfrom;
862 int i;
863 enum machine_mode lowpart_mode;
864 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
866 /* Try converting directly if the insn is supported. */
867 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
868 != CODE_FOR_nothing)
870 /* If FROM is a SUBREG, put it into a register. Do this
871 so that we always generate the same set of insns for
872 better cse'ing; if an intermediate assignment occurred,
873 we won't be doing the operation directly on the SUBREG. */
874 if (optimize > 0 && GET_CODE (from) == SUBREG)
875 from = force_reg (from_mode, from);
876 emit_unop_insn (code, to, from, equiv_code);
877 return;
879 /* Next, try converting via full word. */
880 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
881 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
882 != CODE_FOR_nothing))
884 if (GET_CODE (to) == REG)
885 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
886 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
887 emit_unop_insn (code, to,
888 gen_lowpart (word_mode, to), equiv_code);
889 return;
892 /* No special multiword conversion insn; do it by hand. */
893 start_sequence ();
895 /* Since we will turn this into a no conflict block, we must ensure
896 that the source does not overlap the target. */
898 if (reg_overlap_mentioned_p (to, from))
899 from = force_reg (from_mode, from);
901 /* Get a copy of FROM widened to a word, if necessary. */
902 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
903 lowpart_mode = word_mode;
904 else
905 lowpart_mode = from_mode;
907 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
909 lowpart = gen_lowpart (lowpart_mode, to);
910 emit_move_insn (lowpart, lowfrom);
912 /* Compute the value to put in each remaining word. */
913 if (unsignedp)
914 fill_value = const0_rtx;
915 else
917 #ifdef HAVE_slt
918 if (HAVE_slt
919 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
920 && STORE_FLAG_VALUE == -1)
922 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
923 lowpart_mode, 0);
924 fill_value = gen_reg_rtx (word_mode);
925 emit_insn (gen_slt (fill_value));
927 else
928 #endif
930 fill_value
931 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
932 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
933 NULL_RTX, 0);
934 fill_value = convert_to_mode (word_mode, fill_value, 1);
938 /* Fill the remaining words. */
939 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
941 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
942 rtx subword = operand_subword (to, index, 1, to_mode);
944 if (subword == 0)
945 abort ();
947 if (fill_value != subword)
948 emit_move_insn (subword, fill_value);
951 insns = get_insns ();
952 end_sequence ();
954 emit_no_conflict_block (insns, to, from, NULL_RTX,
955 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
956 return;
959 /* Truncating multi-word to a word or less. */
960 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
961 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
963 if (!((GET_CODE (from) == MEM
964 && ! MEM_VOLATILE_P (from)
965 && direct_load[(int) to_mode]
966 && ! mode_dependent_address_p (XEXP (from, 0)))
967 || GET_CODE (from) == REG
968 || GET_CODE (from) == SUBREG))
969 from = force_reg (from_mode, from);
970 convert_move (to, gen_lowpart (word_mode, from), 0);
971 return;
974 /* Handle pointer conversion. */ /* SPEE 900220. */
975 if (to_mode == PQImode)
977 if (from_mode != QImode)
978 from = convert_to_mode (QImode, from, unsignedp);
980 #ifdef HAVE_truncqipqi2
981 if (HAVE_truncqipqi2)
983 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
984 return;
986 #endif /* HAVE_truncqipqi2 */
987 abort ();
990 if (from_mode == PQImode)
992 if (to_mode != QImode)
994 from = convert_to_mode (QImode, from, unsignedp);
995 from_mode = QImode;
997 else
999 #ifdef HAVE_extendpqiqi2
1000 if (HAVE_extendpqiqi2)
1002 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1003 return;
1005 #endif /* HAVE_extendpqiqi2 */
1006 abort ();
1010 if (to_mode == PSImode)
1012 if (from_mode != SImode)
1013 from = convert_to_mode (SImode, from, unsignedp);
1015 #ifdef HAVE_truncsipsi2
1016 if (HAVE_truncsipsi2)
1018 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1019 return;
1021 #endif /* HAVE_truncsipsi2 */
1022 abort ();
1025 if (from_mode == PSImode)
1027 if (to_mode != SImode)
1029 from = convert_to_mode (SImode, from, unsignedp);
1030 from_mode = SImode;
1032 else
1034 #ifdef HAVE_extendpsisi2
1035 if (! unsignedp && HAVE_extendpsisi2)
1037 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1038 return;
1040 #endif /* HAVE_extendpsisi2 */
1041 #ifdef HAVE_zero_extendpsisi2
1042 if (unsignedp && HAVE_zero_extendpsisi2)
1044 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1045 return;
1047 #endif /* HAVE_zero_extendpsisi2 */
1048 abort ();
1052 if (to_mode == PDImode)
1054 if (from_mode != DImode)
1055 from = convert_to_mode (DImode, from, unsignedp);
1057 #ifdef HAVE_truncdipdi2
1058 if (HAVE_truncdipdi2)
1060 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1061 return;
1063 #endif /* HAVE_truncdipdi2 */
1064 abort ();
1067 if (from_mode == PDImode)
1069 if (to_mode != DImode)
1071 from = convert_to_mode (DImode, from, unsignedp);
1072 from_mode = DImode;
1074 else
1076 #ifdef HAVE_extendpdidi2
1077 if (HAVE_extendpdidi2)
1079 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1080 return;
1082 #endif /* HAVE_extendpdidi2 */
1083 abort ();
1087 /* Now follow all the conversions between integers
1088 no more than a word long. */
1090 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1091 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1092 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1093 GET_MODE_BITSIZE (from_mode)))
1095 if (!((GET_CODE (from) == MEM
1096 && ! MEM_VOLATILE_P (from)
1097 && direct_load[(int) to_mode]
1098 && ! mode_dependent_address_p (XEXP (from, 0)))
1099 || GET_CODE (from) == REG
1100 || GET_CODE (from) == SUBREG))
1101 from = force_reg (from_mode, from);
1102 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1103 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1104 from = copy_to_reg (from);
1105 emit_move_insn (to, gen_lowpart (to_mode, from));
1106 return;
1109 /* Handle extension. */
1110 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1112 /* Convert directly if that works. */
1113 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1114 != CODE_FOR_nothing)
1116 if (flag_force_mem)
1117 from = force_not_mem (from);
1119 emit_unop_insn (code, to, from, equiv_code);
1120 return;
1122 else
1124 enum machine_mode intermediate;
1125 rtx tmp;
1126 tree shift_amount;
1128 /* Search for a mode to convert via. */
1129 for (intermediate = from_mode; intermediate != VOIDmode;
1130 intermediate = GET_MODE_WIDER_MODE (intermediate))
1131 if (((can_extend_p (to_mode, intermediate, unsignedp)
1132 != CODE_FOR_nothing)
1133 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1134 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1135 GET_MODE_BITSIZE (intermediate))))
1136 && (can_extend_p (intermediate, from_mode, unsignedp)
1137 != CODE_FOR_nothing))
1139 convert_move (to, convert_to_mode (intermediate, from,
1140 unsignedp), unsignedp);
1141 return;
1144 /* No suitable intermediate mode.
1145 Generate what we need with shifts. */
1146 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1147 - GET_MODE_BITSIZE (from_mode), 0);
1148 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1149 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1150 to, unsignedp);
1151 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1152 to, unsignedp);
1153 if (tmp != to)
1154 emit_move_insn (to, tmp);
1155 return;
1159 /* Support special truncate insns for certain modes. */
1161 if (from_mode == DImode && to_mode == SImode)
1163 #ifdef HAVE_truncdisi2
1164 if (HAVE_truncdisi2)
1166 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1167 return;
1169 #endif
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 return;
1174 if (from_mode == DImode && to_mode == HImode)
1176 #ifdef HAVE_truncdihi2
1177 if (HAVE_truncdihi2)
1179 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1180 return;
1182 #endif
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 return;
1187 if (from_mode == DImode && to_mode == QImode)
1189 #ifdef HAVE_truncdiqi2
1190 if (HAVE_truncdiqi2)
1192 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1193 return;
1195 #endif
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 return;
1200 if (from_mode == SImode && to_mode == HImode)
1202 #ifdef HAVE_truncsihi2
1203 if (HAVE_truncsihi2)
1205 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1206 return;
1208 #endif
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 return;
1213 if (from_mode == SImode && to_mode == QImode)
1215 #ifdef HAVE_truncsiqi2
1216 if (HAVE_truncsiqi2)
1218 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1219 return;
1221 #endif
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1223 return;
1226 if (from_mode == HImode && to_mode == QImode)
1228 #ifdef HAVE_trunchiqi2
1229 if (HAVE_trunchiqi2)
1231 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1232 return;
1234 #endif
1235 convert_move (to, force_reg (from_mode, from), unsignedp);
1236 return;
1239 if (from_mode == TImode && to_mode == DImode)
1241 #ifdef HAVE_trunctidi2
1242 if (HAVE_trunctidi2)
1244 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1245 return;
1247 #endif
1248 convert_move (to, force_reg (from_mode, from), unsignedp);
1249 return;
1252 if (from_mode == TImode && to_mode == SImode)
1254 #ifdef HAVE_trunctisi2
1255 if (HAVE_trunctisi2)
1257 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1258 return;
1260 #endif
1261 convert_move (to, force_reg (from_mode, from), unsignedp);
1262 return;
1265 if (from_mode == TImode && to_mode == HImode)
1267 #ifdef HAVE_trunctihi2
1268 if (HAVE_trunctihi2)
1270 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1271 return;
1273 #endif
1274 convert_move (to, force_reg (from_mode, from), unsignedp);
1275 return;
1278 if (from_mode == TImode && to_mode == QImode)
1280 #ifdef HAVE_trunctiqi2
1281 if (HAVE_trunctiqi2)
1283 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1284 return;
1286 #endif
1287 convert_move (to, force_reg (from_mode, from), unsignedp);
1288 return;
1291 /* Handle truncation of volatile memrefs, and so on;
1292 the things that couldn't be truncated directly,
1293 and for which there was no special instruction. */
1294 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1296 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1297 emit_move_insn (to, temp);
1298 return;
1301 /* Mode combination is not recognized. */
1302 abort ();
1305 /* Return an rtx for a value that would result
1306 from converting X to mode MODE.
1307 Both X and MODE may be floating, or both integer.
1308 UNSIGNEDP is nonzero if X is an unsigned value.
1309 This can be done by referring to a part of X in place
1310 or by copying to a new temporary with conversion.
1312 This function *must not* call protect_from_queue
1313 except when putting X into an insn (in which case convert_move does it). */
1316 convert_to_mode (mode, x, unsignedp)
1317 enum machine_mode mode;
1318 rtx x;
1319 int unsignedp;
1321 return convert_modes (mode, VOIDmode, x, unsignedp);
1324 /* Return an rtx for a value that would result
1325 from converting X from mode OLDMODE to mode MODE.
1326 Both modes may be floating, or both integer.
1327 UNSIGNEDP is nonzero if X is an unsigned value.
1329 This can be done by referring to a part of X in place
1330 or by copying to a new temporary with conversion.
1332 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1334 This function *must not* call protect_from_queue
1335 except when putting X into an insn (in which case convert_move does it). */
1338 convert_modes (mode, oldmode, x, unsignedp)
1339 enum machine_mode mode, oldmode;
1340 rtx x;
1341 int unsignedp;
1343 rtx temp;
1345 /* If FROM is a SUBREG that indicates that we have already done at least
1346 the required extension, strip it. */
1348 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1349 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1350 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1351 x = gen_lowpart (mode, x);
1353 if (GET_MODE (x) != VOIDmode)
1354 oldmode = GET_MODE (x);
1356 if (mode == oldmode)
1357 return x;
1359 /* There is one case that we must handle specially: If we are converting
1360 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1361 we are to interpret the constant as unsigned, gen_lowpart will do
1362 the wrong if the constant appears negative. What we want to do is
1363 make the high-order word of the constant zero, not all ones. */
1365 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1366 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1367 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1369 HOST_WIDE_INT val = INTVAL (x);
1371 if (oldmode != VOIDmode
1372 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1374 int width = GET_MODE_BITSIZE (oldmode);
1376 /* We need to zero extend VAL. */
1377 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1380 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1383 /* We can do this with a gen_lowpart if both desired and current modes
1384 are integer, and this is either a constant integer, a register, or a
1385 non-volatile MEM. Except for the constant case where MODE is no
1386 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1388 if ((GET_CODE (x) == CONST_INT
1389 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1390 || (GET_MODE_CLASS (mode) == MODE_INT
1391 && GET_MODE_CLASS (oldmode) == MODE_INT
1392 && (GET_CODE (x) == CONST_DOUBLE
1393 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1394 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1395 && direct_load[(int) mode])
1396 || (GET_CODE (x) == REG
1397 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1398 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1400 /* ?? If we don't know OLDMODE, we have to assume here that
1401 X does not need sign- or zero-extension. This may not be
1402 the case, but it's the best we can do. */
1403 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1404 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1406 HOST_WIDE_INT val = INTVAL (x);
1407 int width = GET_MODE_BITSIZE (oldmode);
1409 /* We must sign or zero-extend in this case. Start by
1410 zero-extending, then sign extend if we need to. */
1411 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1412 if (! unsignedp
1413 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1414 val |= (HOST_WIDE_INT) (-1) << width;
1416 return gen_int_mode (val, mode);
1419 return gen_lowpart (mode, x);
1422 temp = gen_reg_rtx (mode);
1423 convert_move (temp, x, unsignedp);
1424 return temp;
1427 /* This macro is used to determine what the largest unit size that
1428 move_by_pieces can use is. */
1430 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1431 move efficiently, as opposed to MOVE_MAX which is the maximum
1432 number of bytes we can move with a single instruction. */
1434 #ifndef MOVE_MAX_PIECES
1435 #define MOVE_MAX_PIECES MOVE_MAX
1436 #endif
1438 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1439 store efficiently. Due to internal GCC limitations, this is
1440 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1441 for an immediate constant. */
1443 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1445 /* Generate several move instructions to copy LEN bytes from block FROM to
1446 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1447 and TO through protect_from_queue before calling.
1449 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1450 used to push FROM to the stack.
1452 ALIGN is maximum alignment we can assume. */
1454 void
1455 move_by_pieces (to, from, len, align)
1456 rtx to, from;
1457 unsigned HOST_WIDE_INT len;
1458 unsigned int align;
1460 struct move_by_pieces data;
1461 rtx to_addr, from_addr = XEXP (from, 0);
1462 unsigned int max_size = MOVE_MAX_PIECES + 1;
1463 enum machine_mode mode = VOIDmode, tmode;
1464 enum insn_code icode;
1466 data.offset = 0;
1467 data.from_addr = from_addr;
1468 if (to)
1470 to_addr = XEXP (to, 0);
1471 data.to = to;
1472 data.autinc_to
1473 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1474 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1475 data.reverse
1476 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1478 else
1480 to_addr = NULL_RTX;
1481 data.to = NULL_RTX;
1482 data.autinc_to = 1;
1483 #ifdef STACK_GROWS_DOWNWARD
1484 data.reverse = 1;
1485 #else
1486 data.reverse = 0;
1487 #endif
1489 data.to_addr = to_addr;
1490 data.from = from;
1491 data.autinc_from
1492 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1493 || GET_CODE (from_addr) == POST_INC
1494 || GET_CODE (from_addr) == POST_DEC);
1496 data.explicit_inc_from = 0;
1497 data.explicit_inc_to = 0;
1498 if (data.reverse) data.offset = len;
1499 data.len = len;
1501 /* If copying requires more than two move insns,
1502 copy addresses to registers (to make displacements shorter)
1503 and use post-increment if available. */
1504 if (!(data.autinc_from && data.autinc_to)
1505 && move_by_pieces_ninsns (len, align) > 2)
1507 /* Find the mode of the largest move... */
1508 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1509 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1510 if (GET_MODE_SIZE (tmode) < max_size)
1511 mode = tmode;
1513 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1515 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1516 data.autinc_from = 1;
1517 data.explicit_inc_from = -1;
1519 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1521 data.from_addr = copy_addr_to_reg (from_addr);
1522 data.autinc_from = 1;
1523 data.explicit_inc_from = 1;
1525 if (!data.autinc_from && CONSTANT_P (from_addr))
1526 data.from_addr = copy_addr_to_reg (from_addr);
1527 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1529 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1530 data.autinc_to = 1;
1531 data.explicit_inc_to = -1;
1533 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1535 data.to_addr = copy_addr_to_reg (to_addr);
1536 data.autinc_to = 1;
1537 data.explicit_inc_to = 1;
1539 if (!data.autinc_to && CONSTANT_P (to_addr))
1540 data.to_addr = copy_addr_to_reg (to_addr);
1543 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1544 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1545 align = MOVE_MAX * BITS_PER_UNIT;
1547 /* First move what we can in the largest integer mode, then go to
1548 successively smaller modes. */
1550 while (max_size > 1)
1552 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1553 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1554 if (GET_MODE_SIZE (tmode) < max_size)
1555 mode = tmode;
1557 if (mode == VOIDmode)
1558 break;
1560 icode = mov_optab->handlers[(int) mode].insn_code;
1561 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1562 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1564 max_size = GET_MODE_SIZE (mode);
1567 /* The code above should have handled everything. */
1568 if (data.len > 0)
1569 abort ();
1572 /* Return number of insns required to move L bytes by pieces.
1573 ALIGN (in bits) is maximum alignment we can assume. */
1575 static unsigned HOST_WIDE_INT
1576 move_by_pieces_ninsns (l, align)
1577 unsigned HOST_WIDE_INT l;
1578 unsigned int align;
1580 unsigned HOST_WIDE_INT n_insns = 0;
1581 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1583 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1584 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1585 align = MOVE_MAX * BITS_PER_UNIT;
1587 while (max_size > 1)
1589 enum machine_mode mode = VOIDmode, tmode;
1590 enum insn_code icode;
1592 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1593 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1594 if (GET_MODE_SIZE (tmode) < max_size)
1595 mode = tmode;
1597 if (mode == VOIDmode)
1598 break;
1600 icode = mov_optab->handlers[(int) mode].insn_code;
1601 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1602 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1604 max_size = GET_MODE_SIZE (mode);
1607 if (l)
1608 abort ();
1609 return n_insns;
1612 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1613 with move instructions for mode MODE. GENFUN is the gen_... function
1614 to make a move insn for that mode. DATA has all the other info. */
1616 static void
1617 move_by_pieces_1 (genfun, mode, data)
1618 rtx (*genfun) PARAMS ((rtx, ...));
1619 enum machine_mode mode;
1620 struct move_by_pieces *data;
1622 unsigned int size = GET_MODE_SIZE (mode);
1623 rtx to1 = NULL_RTX, from1;
1625 while (data->len >= size)
1627 if (data->reverse)
1628 data->offset -= size;
1630 if (data->to)
1632 if (data->autinc_to)
1633 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1634 data->offset);
1635 else
1636 to1 = adjust_address (data->to, mode, data->offset);
1639 if (data->autinc_from)
1640 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1641 data->offset);
1642 else
1643 from1 = adjust_address (data->from, mode, data->offset);
1645 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1646 emit_insn (gen_add2_insn (data->to_addr,
1647 GEN_INT (-(HOST_WIDE_INT)size)));
1648 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1649 emit_insn (gen_add2_insn (data->from_addr,
1650 GEN_INT (-(HOST_WIDE_INT)size)));
1652 if (data->to)
1653 emit_insn ((*genfun) (to1, from1));
1654 else
1656 #ifdef PUSH_ROUNDING
1657 emit_single_push_insn (mode, from1, NULL);
1658 #else
1659 abort ();
1660 #endif
1663 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1664 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1665 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1666 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1668 if (! data->reverse)
1669 data->offset += size;
1671 data->len -= size;
1675 /* Emit code to move a block Y to a block X. This may be done with
1676 string-move instructions, with multiple scalar move instructions,
1677 or with a library call.
1679 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1680 SIZE is an rtx that says how long they are.
1681 ALIGN is the maximum alignment we can assume they have.
1682 METHOD describes what kind of copy this is, and what mechanisms may be used.
1684 Return the address of the new block, if memcpy is called and returns it,
1685 0 otherwise. */
1688 emit_block_move (x, y, size, method)
1689 rtx x, y, size;
1690 enum block_op_methods method;
1692 bool may_use_call;
1693 rtx retval = 0;
1694 unsigned int align;
1696 switch (method)
1698 case BLOCK_OP_NORMAL:
1699 may_use_call = true;
1700 break;
1702 case BLOCK_OP_CALL_PARM:
1703 may_use_call = block_move_libcall_safe_for_call_parm ();
1705 /* Make inhibit_defer_pop nonzero around the library call
1706 to force it to pop the arguments right away. */
1707 NO_DEFER_POP;
1708 break;
1710 case BLOCK_OP_NO_LIBCALL:
1711 may_use_call = false;
1712 break;
1714 default:
1715 abort ();
1718 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1720 if (GET_MODE (x) != BLKmode)
1721 abort ();
1722 if (GET_MODE (y) != BLKmode)
1723 abort ();
1725 x = protect_from_queue (x, 1);
1726 y = protect_from_queue (y, 0);
1727 size = protect_from_queue (size, 0);
1729 if (GET_CODE (x) != MEM)
1730 abort ();
1731 if (GET_CODE (y) != MEM)
1732 abort ();
1733 if (size == 0)
1734 abort ();
1736 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1737 move_by_pieces (x, y, INTVAL (size), align);
1738 else if (emit_block_move_via_movstr (x, y, size, align))
1740 else if (may_use_call)
1741 retval = emit_block_move_via_libcall (x, y, size);
1742 else
1743 emit_block_move_via_loop (x, y, size, align);
1745 if (method == BLOCK_OP_CALL_PARM)
1746 OK_DEFER_POP;
1748 return retval;
1751 /* A subroutine of emit_block_move. Returns true if calling the
1752 block move libcall will not clobber any parameters which may have
1753 already been placed on the stack. */
1755 static bool
1756 block_move_libcall_safe_for_call_parm ()
1758 if (PUSH_ARGS)
1759 return true;
1760 else
1762 /* Check to see whether memcpy takes all register arguments. */
1763 static enum {
1764 takes_regs_uninit, takes_regs_no, takes_regs_yes
1765 } takes_regs = takes_regs_uninit;
1767 switch (takes_regs)
1769 case takes_regs_uninit:
1771 CUMULATIVE_ARGS args_so_far;
1772 tree fn, arg;
1774 fn = emit_block_move_libcall_fn (false);
1775 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1777 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1778 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1780 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1781 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1782 if (!tmp || !REG_P (tmp))
1783 goto fail_takes_regs;
1784 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1785 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1786 NULL_TREE, 1))
1787 goto fail_takes_regs;
1788 #endif
1789 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1792 takes_regs = takes_regs_yes;
1793 /* FALLTHRU */
1795 case takes_regs_yes:
1796 return true;
1798 fail_takes_regs:
1799 takes_regs = takes_regs_no;
1800 /* FALLTHRU */
1801 case takes_regs_no:
1802 return false;
1804 default:
1805 abort ();
1810 /* A subroutine of emit_block_move. Expand a movstr pattern;
1811 return true if successful. */
1813 static bool
1814 emit_block_move_via_movstr (x, y, size, align)
1815 rtx x, y, size;
1816 unsigned int align;
1818 /* Try the most limited insn first, because there's no point
1819 including more than one in the machine description unless
1820 the more limited one has some advantage. */
1822 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1823 enum machine_mode mode;
1825 /* Since this is a move insn, we don't care about volatility. */
1826 volatile_ok = 1;
1828 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1829 mode = GET_MODE_WIDER_MODE (mode))
1831 enum insn_code code = movstr_optab[(int) mode];
1832 insn_operand_predicate_fn pred;
1834 if (code != CODE_FOR_nothing
1835 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1836 here because if SIZE is less than the mode mask, as it is
1837 returned by the macro, it will definitely be less than the
1838 actual mode mask. */
1839 && ((GET_CODE (size) == CONST_INT
1840 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1841 <= (GET_MODE_MASK (mode) >> 1)))
1842 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1843 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1844 || (*pred) (x, BLKmode))
1845 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1846 || (*pred) (y, BLKmode))
1847 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1848 || (*pred) (opalign, VOIDmode)))
1850 rtx op2;
1851 rtx last = get_last_insn ();
1852 rtx pat;
1854 op2 = convert_to_mode (mode, size, 1);
1855 pred = insn_data[(int) code].operand[2].predicate;
1856 if (pred != 0 && ! (*pred) (op2, mode))
1857 op2 = copy_to_mode_reg (mode, op2);
1859 /* ??? When called via emit_block_move_for_call, it'd be
1860 nice if there were some way to inform the backend, so
1861 that it doesn't fail the expansion because it thinks
1862 emitting the libcall would be more efficient. */
1864 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1865 if (pat)
1867 emit_insn (pat);
1868 volatile_ok = 0;
1869 return true;
1871 else
1872 delete_insns_since (last);
1876 volatile_ok = 0;
1877 return false;
1880 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1881 Return the return value from memcpy, 0 otherwise. */
1883 static rtx
1884 emit_block_move_via_libcall (dst, src, size)
1885 rtx dst, src, size;
1887 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1888 enum machine_mode size_mode;
1889 rtx retval;
1891 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1893 It is unsafe to save the value generated by protect_from_queue
1894 and reuse it later. Consider what happens if emit_queue is
1895 called before the return value from protect_from_queue is used.
1897 Expansion of the CALL_EXPR below will call emit_queue before
1898 we are finished emitting RTL for argument setup. So if we are
1899 not careful we could get the wrong value for an argument.
1901 To avoid this problem we go ahead and emit code to copy X, Y &
1902 SIZE into new pseudos. We can then place those new pseudos
1903 into an RTL_EXPR and use them later, even after a call to
1904 emit_queue.
1906 Note this is not strictly needed for library calls since they
1907 do not call emit_queue before loading their arguments. However,
1908 we may need to have library calls call emit_queue in the future
1909 since failing to do so could cause problems for targets which
1910 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1912 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1913 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1915 if (TARGET_MEM_FUNCTIONS)
1916 size_mode = TYPE_MODE (sizetype);
1917 else
1918 size_mode = TYPE_MODE (unsigned_type_node);
1919 size = convert_to_mode (size_mode, size, 1);
1920 size = copy_to_mode_reg (size_mode, size);
1922 /* It is incorrect to use the libcall calling conventions to call
1923 memcpy in this context. This could be a user call to memcpy and
1924 the user may wish to examine the return value from memcpy. For
1925 targets where libcalls and normal calls have different conventions
1926 for returning pointers, we could end up generating incorrect code.
1928 For convenience, we generate the call to bcopy this way as well. */
1930 dst_tree = make_tree (ptr_type_node, dst);
1931 src_tree = make_tree (ptr_type_node, src);
1932 if (TARGET_MEM_FUNCTIONS)
1933 size_tree = make_tree (sizetype, size);
1934 else
1935 size_tree = make_tree (unsigned_type_node, size);
1937 fn = emit_block_move_libcall_fn (true);
1938 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1939 if (TARGET_MEM_FUNCTIONS)
1941 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1942 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1944 else
1946 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1947 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1950 /* Now we have to build up the CALL_EXPR itself. */
1951 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1952 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1953 call_expr, arg_list, NULL_TREE);
1954 TREE_SIDE_EFFECTS (call_expr) = 1;
1956 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1958 /* If we are initializing a readonly value, show the above call
1959 clobbered it. Otherwise, a load from it may erroneously be
1960 hoisted from a loop. */
1961 if (RTX_UNCHANGING_P (dst))
1962 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1964 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1967 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1968 for the function we use for block copies. The first time FOR_CALL
1969 is true, we call assemble_external. */
1971 static GTY(()) tree block_move_fn;
1973 static tree
1974 emit_block_move_libcall_fn (for_call)
1975 int for_call;
1977 static bool emitted_extern;
1978 tree fn = block_move_fn, args;
1980 if (!fn)
1982 if (TARGET_MEM_FUNCTIONS)
1984 fn = get_identifier ("memcpy");
1985 args = build_function_type_list (ptr_type_node, ptr_type_node,
1986 const_ptr_type_node, sizetype,
1987 NULL_TREE);
1989 else
1991 fn = get_identifier ("bcopy");
1992 args = build_function_type_list (void_type_node, const_ptr_type_node,
1993 ptr_type_node, unsigned_type_node,
1994 NULL_TREE);
1997 fn = build_decl (FUNCTION_DECL, fn, args);
1998 DECL_EXTERNAL (fn) = 1;
1999 TREE_PUBLIC (fn) = 1;
2000 DECL_ARTIFICIAL (fn) = 1;
2001 TREE_NOTHROW (fn) = 1;
2003 block_move_fn = fn;
2006 if (for_call && !emitted_extern)
2008 emitted_extern = true;
2009 make_decl_rtl (fn, NULL);
2010 assemble_external (fn);
2013 return fn;
2016 /* A subroutine of emit_block_move. Copy the data via an explicit
2017 loop. This is used only when libcalls are forbidden. */
2018 /* ??? It'd be nice to copy in hunks larger than QImode. */
2020 static void
2021 emit_block_move_via_loop (x, y, size, align)
2022 rtx x, y, size;
2023 unsigned int align ATTRIBUTE_UNUSED;
2025 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2026 enum machine_mode iter_mode;
2028 iter_mode = GET_MODE (size);
2029 if (iter_mode == VOIDmode)
2030 iter_mode = word_mode;
2032 top_label = gen_label_rtx ();
2033 cmp_label = gen_label_rtx ();
2034 iter = gen_reg_rtx (iter_mode);
2036 emit_move_insn (iter, const0_rtx);
2038 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2039 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2040 do_pending_stack_adjust ();
2042 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2044 emit_jump (cmp_label);
2045 emit_label (top_label);
2047 tmp = convert_modes (Pmode, iter_mode, iter, true);
2048 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2049 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2050 x = change_address (x, QImode, x_addr);
2051 y = change_address (y, QImode, y_addr);
2053 emit_move_insn (x, y);
2055 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2056 true, OPTAB_LIB_WIDEN);
2057 if (tmp != iter)
2058 emit_move_insn (iter, tmp);
2060 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2061 emit_label (cmp_label);
2063 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2064 true, top_label);
2066 emit_note (NULL, NOTE_INSN_LOOP_END);
2069 /* Copy all or part of a value X into registers starting at REGNO.
2070 The number of registers to be filled is NREGS. */
2072 void
2073 move_block_to_reg (regno, x, nregs, mode)
2074 int regno;
2075 rtx x;
2076 int nregs;
2077 enum machine_mode mode;
2079 int i;
2080 #ifdef HAVE_load_multiple
2081 rtx pat;
2082 rtx last;
2083 #endif
2085 if (nregs == 0)
2086 return;
2088 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2089 x = validize_mem (force_const_mem (mode, x));
2091 /* See if the machine can do this with a load multiple insn. */
2092 #ifdef HAVE_load_multiple
2093 if (HAVE_load_multiple)
2095 last = get_last_insn ();
2096 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2097 GEN_INT (nregs));
2098 if (pat)
2100 emit_insn (pat);
2101 return;
2103 else
2104 delete_insns_since (last);
2106 #endif
2108 for (i = 0; i < nregs; i++)
2109 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2110 operand_subword_force (x, i, mode));
2113 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2114 The number of registers to be filled is NREGS. SIZE indicates the number
2115 of bytes in the object X. */
2117 void
2118 move_block_from_reg (regno, x, nregs, size)
2119 int regno;
2120 rtx x;
2121 int nregs;
2122 int size;
2124 int i;
2125 #ifdef HAVE_store_multiple
2126 rtx pat;
2127 rtx last;
2128 #endif
2129 enum machine_mode mode;
2131 if (nregs == 0)
2132 return;
2134 /* If SIZE is that of a mode no bigger than a word, just use that
2135 mode's store operation. */
2136 if (size <= UNITS_PER_WORD
2137 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
2138 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
2140 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2141 return;
2144 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2145 to the left before storing to memory. Note that the previous test
2146 doesn't handle all cases (e.g. SIZE == 3). */
2147 if (size < UNITS_PER_WORD
2148 && BYTES_BIG_ENDIAN
2149 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
2151 rtx tem = operand_subword (x, 0, 1, BLKmode);
2152 rtx shift;
2154 if (tem == 0)
2155 abort ();
2157 shift = expand_shift (LSHIFT_EXPR, word_mode,
2158 gen_rtx_REG (word_mode, regno),
2159 build_int_2 ((UNITS_PER_WORD - size)
2160 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2161 emit_move_insn (tem, shift);
2162 return;
2165 /* See if the machine can do this with a store multiple insn. */
2166 #ifdef HAVE_store_multiple
2167 if (HAVE_store_multiple)
2169 last = get_last_insn ();
2170 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2171 GEN_INT (nregs));
2172 if (pat)
2174 emit_insn (pat);
2175 return;
2177 else
2178 delete_insns_since (last);
2180 #endif
2182 for (i = 0; i < nregs; i++)
2184 rtx tem = operand_subword (x, i, 1, BLKmode);
2186 if (tem == 0)
2187 abort ();
2189 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2193 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2194 registers represented by a PARALLEL. SSIZE represents the total size of
2195 block SRC in bytes, or -1 if not known. */
2196 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2197 the balance will be in what would be the low-order memory addresses, i.e.
2198 left justified for big endian, right justified for little endian. This
2199 happens to be true for the targets currently using this support. If this
2200 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2201 would be needed. */
2203 void
2204 emit_group_load (dst, orig_src, ssize)
2205 rtx dst, orig_src;
2206 int ssize;
2208 rtx *tmps, src;
2209 int start, i;
2211 if (GET_CODE (dst) != PARALLEL)
2212 abort ();
2214 /* Check for a NULL entry, used to indicate that the parameter goes
2215 both on the stack and in registers. */
2216 if (XEXP (XVECEXP (dst, 0, 0), 0))
2217 start = 0;
2218 else
2219 start = 1;
2221 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2223 /* Process the pieces. */
2224 for (i = start; i < XVECLEN (dst, 0); i++)
2226 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2227 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2228 unsigned int bytelen = GET_MODE_SIZE (mode);
2229 int shift = 0;
2231 /* Handle trailing fragments that run over the size of the struct. */
2232 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2234 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2235 bytelen = ssize - bytepos;
2236 if (bytelen <= 0)
2237 abort ();
2240 /* If we won't be loading directly from memory, protect the real source
2241 from strange tricks we might play; but make sure that the source can
2242 be loaded directly into the destination. */
2243 src = orig_src;
2244 if (GET_CODE (orig_src) != MEM
2245 && (!CONSTANT_P (orig_src)
2246 || (GET_MODE (orig_src) != mode
2247 && GET_MODE (orig_src) != VOIDmode)))
2249 if (GET_MODE (orig_src) == VOIDmode)
2250 src = gen_reg_rtx (mode);
2251 else
2252 src = gen_reg_rtx (GET_MODE (orig_src));
2254 emit_move_insn (src, orig_src);
2257 /* Optimize the access just a bit. */
2258 if (GET_CODE (src) == MEM
2259 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2260 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2261 && bytelen == GET_MODE_SIZE (mode))
2263 tmps[i] = gen_reg_rtx (mode);
2264 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2266 else if (GET_CODE (src) == CONCAT)
2268 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2269 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2271 if ((bytepos == 0 && bytelen == slen0)
2272 || (bytepos != 0 && bytepos + bytelen <= slen))
2274 /* The following assumes that the concatenated objects all
2275 have the same size. In this case, a simple calculation
2276 can be used to determine the object and the bit field
2277 to be extracted. */
2278 tmps[i] = XEXP (src, bytepos / slen0);
2279 if (! CONSTANT_P (tmps[i])
2280 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2281 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2282 (bytepos % slen0) * BITS_PER_UNIT,
2283 1, NULL_RTX, mode, mode, ssize);
2285 else if (bytepos == 0)
2287 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2288 emit_move_insn (mem, src);
2289 tmps[i] = adjust_address (mem, mode, 0);
2291 else
2292 abort ();
2294 else if (CONSTANT_P (src)
2295 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2296 tmps[i] = src;
2297 else
2298 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2299 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2300 mode, mode, ssize);
2302 if (BYTES_BIG_ENDIAN && shift)
2303 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2304 tmps[i], 0, OPTAB_WIDEN);
2307 emit_queue ();
2309 /* Copy the extracted pieces into the proper (probable) hard regs. */
2310 for (i = start; i < XVECLEN (dst, 0); i++)
2311 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2314 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2315 registers represented by a PARALLEL. SSIZE represents the total size of
2316 block DST, or -1 if not known. */
2318 void
2319 emit_group_store (orig_dst, src, ssize)
2320 rtx orig_dst, src;
2321 int ssize;
2323 rtx *tmps, dst;
2324 int start, i;
2326 if (GET_CODE (src) != PARALLEL)
2327 abort ();
2329 /* Check for a NULL entry, used to indicate that the parameter goes
2330 both on the stack and in registers. */
2331 if (XEXP (XVECEXP (src, 0, 0), 0))
2332 start = 0;
2333 else
2334 start = 1;
2336 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2338 /* Copy the (probable) hard regs into pseudos. */
2339 for (i = start; i < XVECLEN (src, 0); i++)
2341 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2342 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2343 emit_move_insn (tmps[i], reg);
2345 emit_queue ();
2347 /* If we won't be storing directly into memory, protect the real destination
2348 from strange tricks we might play. */
2349 dst = orig_dst;
2350 if (GET_CODE (dst) == PARALLEL)
2352 rtx temp;
2354 /* We can get a PARALLEL dst if there is a conditional expression in
2355 a return statement. In that case, the dst and src are the same,
2356 so no action is necessary. */
2357 if (rtx_equal_p (dst, src))
2358 return;
2360 /* It is unclear if we can ever reach here, but we may as well handle
2361 it. Allocate a temporary, and split this into a store/load to/from
2362 the temporary. */
2364 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2365 emit_group_store (temp, src, ssize);
2366 emit_group_load (dst, temp, ssize);
2367 return;
2369 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2371 dst = gen_reg_rtx (GET_MODE (orig_dst));
2372 /* Make life a bit easier for combine. */
2373 emit_move_insn (dst, const0_rtx);
2376 /* Process the pieces. */
2377 for (i = start; i < XVECLEN (src, 0); i++)
2379 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2380 enum machine_mode mode = GET_MODE (tmps[i]);
2381 unsigned int bytelen = GET_MODE_SIZE (mode);
2382 rtx dest = dst;
2384 /* Handle trailing fragments that run over the size of the struct. */
2385 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2387 if (BYTES_BIG_ENDIAN)
2389 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2390 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2391 tmps[i], 0, OPTAB_WIDEN);
2393 bytelen = ssize - bytepos;
2396 if (GET_CODE (dst) == CONCAT)
2398 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2399 dest = XEXP (dst, 0);
2400 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2402 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2403 dest = XEXP (dst, 1);
2405 else
2406 abort ();
2409 /* Optimize the access just a bit. */
2410 if (GET_CODE (dest) == MEM
2411 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2412 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2413 && bytelen == GET_MODE_SIZE (mode))
2414 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2415 else
2416 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2417 mode, tmps[i], ssize);
2420 emit_queue ();
2422 /* Copy from the pseudo into the (probable) hard reg. */
2423 if (GET_CODE (dst) == REG)
2424 emit_move_insn (orig_dst, dst);
2427 /* Generate code to copy a BLKmode object of TYPE out of a
2428 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2429 is null, a stack temporary is created. TGTBLK is returned.
2431 The primary purpose of this routine is to handle functions
2432 that return BLKmode structures in registers. Some machines
2433 (the PA for example) want to return all small structures
2434 in registers regardless of the structure's alignment. */
2437 copy_blkmode_from_reg (tgtblk, srcreg, type)
2438 rtx tgtblk;
2439 rtx srcreg;
2440 tree type;
2442 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2443 rtx src = NULL, dst = NULL;
2444 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2445 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2447 if (tgtblk == 0)
2449 tgtblk = assign_temp (build_qualified_type (type,
2450 (TYPE_QUALS (type)
2451 | TYPE_QUAL_CONST)),
2452 0, 1, 1);
2453 preserve_temp_slots (tgtblk);
2456 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2457 into a new pseudo which is a full word.
2459 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2460 the wrong part of the register gets copied so we fake a type conversion
2461 in place. */
2462 if (GET_MODE (srcreg) != BLKmode
2463 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2465 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2466 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2467 else
2468 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2471 /* Structures whose size is not a multiple of a word are aligned
2472 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2473 machine, this means we must skip the empty high order bytes when
2474 calculating the bit offset. */
2475 if (BYTES_BIG_ENDIAN
2476 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2477 && bytes % UNITS_PER_WORD)
2478 big_endian_correction
2479 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2481 /* Copy the structure BITSIZE bites at a time.
2483 We could probably emit more efficient code for machines which do not use
2484 strict alignment, but it doesn't seem worth the effort at the current
2485 time. */
2486 for (bitpos = 0, xbitpos = big_endian_correction;
2487 bitpos < bytes * BITS_PER_UNIT;
2488 bitpos += bitsize, xbitpos += bitsize)
2490 /* We need a new source operand each time xbitpos is on a
2491 word boundary and when xbitpos == big_endian_correction
2492 (the first time through). */
2493 if (xbitpos % BITS_PER_WORD == 0
2494 || xbitpos == big_endian_correction)
2495 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2496 GET_MODE (srcreg));
2498 /* We need a new destination operand each time bitpos is on
2499 a word boundary. */
2500 if (bitpos % BITS_PER_WORD == 0)
2501 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2503 /* Use xbitpos for the source extraction (right justified) and
2504 xbitpos for the destination store (left justified). */
2505 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2506 extract_bit_field (src, bitsize,
2507 xbitpos % BITS_PER_WORD, 1,
2508 NULL_RTX, word_mode, word_mode,
2509 BITS_PER_WORD),
2510 BITS_PER_WORD);
2513 return tgtblk;
2516 /* Add a USE expression for REG to the (possibly empty) list pointed
2517 to by CALL_FUSAGE. REG must denote a hard register. */
2519 void
2520 use_reg (call_fusage, reg)
2521 rtx *call_fusage, reg;
2523 if (GET_CODE (reg) != REG
2524 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2525 abort ();
2527 *call_fusage
2528 = gen_rtx_EXPR_LIST (VOIDmode,
2529 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2532 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2533 starting at REGNO. All of these registers must be hard registers. */
2535 void
2536 use_regs (call_fusage, regno, nregs)
2537 rtx *call_fusage;
2538 int regno;
2539 int nregs;
2541 int i;
2543 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2544 abort ();
2546 for (i = 0; i < nregs; i++)
2547 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2550 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2551 PARALLEL REGS. This is for calls that pass values in multiple
2552 non-contiguous locations. The Irix 6 ABI has examples of this. */
2554 void
2555 use_group_regs (call_fusage, regs)
2556 rtx *call_fusage;
2557 rtx regs;
2559 int i;
2561 for (i = 0; i < XVECLEN (regs, 0); i++)
2563 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2565 /* A NULL entry means the parameter goes both on the stack and in
2566 registers. This can also be a MEM for targets that pass values
2567 partially on the stack and partially in registers. */
2568 if (reg != 0 && GET_CODE (reg) == REG)
2569 use_reg (call_fusage, reg);
2574 /* Determine whether the LEN bytes generated by CONSTFUN can be
2575 stored to memory using several move instructions. CONSTFUNDATA is
2576 a pointer which will be passed as argument in every CONSTFUN call.
2577 ALIGN is maximum alignment we can assume. Return nonzero if a
2578 call to store_by_pieces should succeed. */
2581 can_store_by_pieces (len, constfun, constfundata, align)
2582 unsigned HOST_WIDE_INT len;
2583 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2584 PTR constfundata;
2585 unsigned int align;
2587 unsigned HOST_WIDE_INT max_size, l;
2588 HOST_WIDE_INT offset = 0;
2589 enum machine_mode mode, tmode;
2590 enum insn_code icode;
2591 int reverse;
2592 rtx cst;
2594 if (! MOVE_BY_PIECES_P (len, align))
2595 return 0;
2597 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2598 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2599 align = MOVE_MAX * BITS_PER_UNIT;
2601 /* We would first store what we can in the largest integer mode, then go to
2602 successively smaller modes. */
2604 for (reverse = 0;
2605 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2606 reverse++)
2608 l = len;
2609 mode = VOIDmode;
2610 max_size = STORE_MAX_PIECES + 1;
2611 while (max_size > 1)
2613 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2614 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2615 if (GET_MODE_SIZE (tmode) < max_size)
2616 mode = tmode;
2618 if (mode == VOIDmode)
2619 break;
2621 icode = mov_optab->handlers[(int) mode].insn_code;
2622 if (icode != CODE_FOR_nothing
2623 && align >= GET_MODE_ALIGNMENT (mode))
2625 unsigned int size = GET_MODE_SIZE (mode);
2627 while (l >= size)
2629 if (reverse)
2630 offset -= size;
2632 cst = (*constfun) (constfundata, offset, mode);
2633 if (!LEGITIMATE_CONSTANT_P (cst))
2634 return 0;
2636 if (!reverse)
2637 offset += size;
2639 l -= size;
2643 max_size = GET_MODE_SIZE (mode);
2646 /* The code above should have handled everything. */
2647 if (l != 0)
2648 abort ();
2651 return 1;
2654 /* Generate several move instructions to store LEN bytes generated by
2655 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2656 pointer which will be passed as argument in every CONSTFUN call.
2657 ALIGN is maximum alignment we can assume. */
2659 void
2660 store_by_pieces (to, len, constfun, constfundata, align)
2661 rtx to;
2662 unsigned HOST_WIDE_INT len;
2663 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2664 PTR constfundata;
2665 unsigned int align;
2667 struct store_by_pieces data;
2669 if (! MOVE_BY_PIECES_P (len, align))
2670 abort ();
2671 to = protect_from_queue (to, 1);
2672 data.constfun = constfun;
2673 data.constfundata = constfundata;
2674 data.len = len;
2675 data.to = to;
2676 store_by_pieces_1 (&data, align);
2679 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2680 rtx with BLKmode). The caller must pass TO through protect_from_queue
2681 before calling. ALIGN is maximum alignment we can assume. */
2683 static void
2684 clear_by_pieces (to, len, align)
2685 rtx to;
2686 unsigned HOST_WIDE_INT len;
2687 unsigned int align;
2689 struct store_by_pieces data;
2691 data.constfun = clear_by_pieces_1;
2692 data.constfundata = NULL;
2693 data.len = len;
2694 data.to = to;
2695 store_by_pieces_1 (&data, align);
2698 /* Callback routine for clear_by_pieces.
2699 Return const0_rtx unconditionally. */
2701 static rtx
2702 clear_by_pieces_1 (data, offset, mode)
2703 PTR data ATTRIBUTE_UNUSED;
2704 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2705 enum machine_mode mode ATTRIBUTE_UNUSED;
2707 return const0_rtx;
2710 /* Subroutine of clear_by_pieces and store_by_pieces.
2711 Generate several move instructions to store LEN bytes of block TO. (A MEM
2712 rtx with BLKmode). The caller must pass TO through protect_from_queue
2713 before calling. ALIGN is maximum alignment we can assume. */
2715 static void
2716 store_by_pieces_1 (data, align)
2717 struct store_by_pieces *data;
2718 unsigned int align;
2720 rtx to_addr = XEXP (data->to, 0);
2721 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2722 enum machine_mode mode = VOIDmode, tmode;
2723 enum insn_code icode;
2725 data->offset = 0;
2726 data->to_addr = to_addr;
2727 data->autinc_to
2728 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2729 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2731 data->explicit_inc_to = 0;
2732 data->reverse
2733 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2734 if (data->reverse)
2735 data->offset = data->len;
2737 /* If storing requires more than two move insns,
2738 copy addresses to registers (to make displacements shorter)
2739 and use post-increment if available. */
2740 if (!data->autinc_to
2741 && move_by_pieces_ninsns (data->len, align) > 2)
2743 /* Determine the main mode we'll be using. */
2744 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2745 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2746 if (GET_MODE_SIZE (tmode) < max_size)
2747 mode = tmode;
2749 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2751 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2752 data->autinc_to = 1;
2753 data->explicit_inc_to = -1;
2756 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2757 && ! data->autinc_to)
2759 data->to_addr = copy_addr_to_reg (to_addr);
2760 data->autinc_to = 1;
2761 data->explicit_inc_to = 1;
2764 if ( !data->autinc_to && CONSTANT_P (to_addr))
2765 data->to_addr = copy_addr_to_reg (to_addr);
2768 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2769 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2770 align = MOVE_MAX * BITS_PER_UNIT;
2772 /* First store what we can in the largest integer mode, then go to
2773 successively smaller modes. */
2775 while (max_size > 1)
2777 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2778 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2779 if (GET_MODE_SIZE (tmode) < max_size)
2780 mode = tmode;
2782 if (mode == VOIDmode)
2783 break;
2785 icode = mov_optab->handlers[(int) mode].insn_code;
2786 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2787 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2789 max_size = GET_MODE_SIZE (mode);
2792 /* The code above should have handled everything. */
2793 if (data->len != 0)
2794 abort ();
2797 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2798 with move instructions for mode MODE. GENFUN is the gen_... function
2799 to make a move insn for that mode. DATA has all the other info. */
2801 static void
2802 store_by_pieces_2 (genfun, mode, data)
2803 rtx (*genfun) PARAMS ((rtx, ...));
2804 enum machine_mode mode;
2805 struct store_by_pieces *data;
2807 unsigned int size = GET_MODE_SIZE (mode);
2808 rtx to1, cst;
2810 while (data->len >= size)
2812 if (data->reverse)
2813 data->offset -= size;
2815 if (data->autinc_to)
2816 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2817 data->offset);
2818 else
2819 to1 = adjust_address (data->to, mode, data->offset);
2821 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2822 emit_insn (gen_add2_insn (data->to_addr,
2823 GEN_INT (-(HOST_WIDE_INT) size)));
2825 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2826 emit_insn ((*genfun) (to1, cst));
2828 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2829 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2831 if (! data->reverse)
2832 data->offset += size;
2834 data->len -= size;
2838 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2839 its length in bytes. */
2842 clear_storage (object, size)
2843 rtx object;
2844 rtx size;
2846 rtx retval = 0;
2847 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2848 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2850 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2851 just move a zero. Otherwise, do this a piece at a time. */
2852 if (GET_MODE (object) != BLKmode
2853 && GET_CODE (size) == CONST_INT
2854 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2855 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2856 else
2858 object = protect_from_queue (object, 1);
2859 size = protect_from_queue (size, 0);
2861 if (GET_CODE (size) == CONST_INT
2862 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2863 clear_by_pieces (object, INTVAL (size), align);
2864 else if (clear_storage_via_clrstr (object, size, align))
2866 else
2867 retval = clear_storage_via_libcall (object, size);
2870 return retval;
2873 /* A subroutine of clear_storage. Expand a clrstr pattern;
2874 return true if successful. */
2876 static bool
2877 clear_storage_via_clrstr (object, size, align)
2878 rtx object, size;
2879 unsigned int align;
2881 /* Try the most limited insn first, because there's no point
2882 including more than one in the machine description unless
2883 the more limited one has some advantage. */
2885 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2886 enum machine_mode mode;
2888 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2889 mode = GET_MODE_WIDER_MODE (mode))
2891 enum insn_code code = clrstr_optab[(int) mode];
2892 insn_operand_predicate_fn pred;
2894 if (code != CODE_FOR_nothing
2895 /* We don't need MODE to be narrower than
2896 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2897 the mode mask, as it is returned by the macro, it will
2898 definitely be less than the actual mode mask. */
2899 && ((GET_CODE (size) == CONST_INT
2900 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2901 <= (GET_MODE_MASK (mode) >> 1)))
2902 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2903 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2904 || (*pred) (object, BLKmode))
2905 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2906 || (*pred) (opalign, VOIDmode)))
2908 rtx op1;
2909 rtx last = get_last_insn ();
2910 rtx pat;
2912 op1 = convert_to_mode (mode, size, 1);
2913 pred = insn_data[(int) code].operand[1].predicate;
2914 if (pred != 0 && ! (*pred) (op1, mode))
2915 op1 = copy_to_mode_reg (mode, op1);
2917 pat = GEN_FCN ((int) code) (object, op1, opalign);
2918 if (pat)
2920 emit_insn (pat);
2921 return true;
2923 else
2924 delete_insns_since (last);
2928 return false;
2931 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2932 Return the return value of memset, 0 otherwise. */
2934 static rtx
2935 clear_storage_via_libcall (object, size)
2936 rtx object, size;
2938 tree call_expr, arg_list, fn, object_tree, size_tree;
2939 enum machine_mode size_mode;
2940 rtx retval;
2942 /* OBJECT or SIZE may have been passed through protect_from_queue.
2944 It is unsafe to save the value generated by protect_from_queue
2945 and reuse it later. Consider what happens if emit_queue is
2946 called before the return value from protect_from_queue is used.
2948 Expansion of the CALL_EXPR below will call emit_queue before
2949 we are finished emitting RTL for argument setup. So if we are
2950 not careful we could get the wrong value for an argument.
2952 To avoid this problem we go ahead and emit code to copy OBJECT
2953 and SIZE into new pseudos. We can then place those new pseudos
2954 into an RTL_EXPR and use them later, even after a call to
2955 emit_queue.
2957 Note this is not strictly needed for library calls since they
2958 do not call emit_queue before loading their arguments. However,
2959 we may need to have library calls call emit_queue in the future
2960 since failing to do so could cause problems for targets which
2961 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2963 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2965 if (TARGET_MEM_FUNCTIONS)
2966 size_mode = TYPE_MODE (sizetype);
2967 else
2968 size_mode = TYPE_MODE (unsigned_type_node);
2969 size = convert_to_mode (size_mode, size, 1);
2970 size = copy_to_mode_reg (size_mode, size);
2972 /* It is incorrect to use the libcall calling conventions to call
2973 memset in this context. This could be a user call to memset and
2974 the user may wish to examine the return value from memset. For
2975 targets where libcalls and normal calls have different conventions
2976 for returning pointers, we could end up generating incorrect code.
2978 For convenience, we generate the call to bzero this way as well. */
2980 object_tree = make_tree (ptr_type_node, object);
2981 if (TARGET_MEM_FUNCTIONS)
2982 size_tree = make_tree (sizetype, size);
2983 else
2984 size_tree = make_tree (unsigned_type_node, size);
2986 fn = clear_storage_libcall_fn (true);
2987 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2988 if (TARGET_MEM_FUNCTIONS)
2989 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2990 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2992 /* Now we have to build up the CALL_EXPR itself. */
2993 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2994 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2995 call_expr, arg_list, NULL_TREE);
2996 TREE_SIDE_EFFECTS (call_expr) = 1;
2998 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3000 /* If we are initializing a readonly value, show the above call
3001 clobbered it. Otherwise, a load from it may erroneously be
3002 hoisted from a loop. */
3003 if (RTX_UNCHANGING_P (object))
3004 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3006 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3009 /* A subroutine of clear_storage_via_libcall. Create the tree node
3010 for the function we use for block clears. The first time FOR_CALL
3011 is true, we call assemble_external. */
3013 static GTY(()) tree block_clear_fn;
3015 static tree
3016 clear_storage_libcall_fn (for_call)
3017 int for_call;
3019 static bool emitted_extern;
3020 tree fn = block_clear_fn, args;
3022 if (!fn)
3024 if (TARGET_MEM_FUNCTIONS)
3026 fn = get_identifier ("memset");
3027 args = build_function_type_list (ptr_type_node, ptr_type_node,
3028 integer_type_node, sizetype,
3029 NULL_TREE);
3031 else
3033 fn = get_identifier ("bzero");
3034 args = build_function_type_list (void_type_node, ptr_type_node,
3035 unsigned_type_node, NULL_TREE);
3038 fn = build_decl (FUNCTION_DECL, fn, args);
3039 DECL_EXTERNAL (fn) = 1;
3040 TREE_PUBLIC (fn) = 1;
3041 DECL_ARTIFICIAL (fn) = 1;
3042 TREE_NOTHROW (fn) = 1;
3044 block_clear_fn = fn;
3047 if (for_call && !emitted_extern)
3049 emitted_extern = true;
3050 make_decl_rtl (fn, NULL);
3051 assemble_external (fn);
3054 return fn;
3057 /* Generate code to copy Y into X.
3058 Both Y and X must have the same mode, except that
3059 Y can be a constant with VOIDmode.
3060 This mode cannot be BLKmode; use emit_block_move for that.
3062 Return the last instruction emitted. */
3065 emit_move_insn (x, y)
3066 rtx x, y;
3068 enum machine_mode mode = GET_MODE (x);
3069 rtx y_cst = NULL_RTX;
3070 rtx last_insn;
3072 x = protect_from_queue (x, 1);
3073 y = protect_from_queue (y, 0);
3075 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3076 abort ();
3078 /* Never force constant_p_rtx to memory. */
3079 if (GET_CODE (y) == CONSTANT_P_RTX)
3081 else if (CONSTANT_P (y))
3083 if (optimize
3084 && FLOAT_MODE_P (GET_MODE (x))
3085 && (last_insn = compress_float_constant (x, y)))
3086 return last_insn;
3088 if (!LEGITIMATE_CONSTANT_P (y))
3090 y_cst = y;
3091 y = force_const_mem (mode, y);
3095 /* If X or Y are memory references, verify that their addresses are valid
3096 for the machine. */
3097 if (GET_CODE (x) == MEM
3098 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3099 && ! push_operand (x, GET_MODE (x)))
3100 || (flag_force_addr
3101 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3102 x = validize_mem (x);
3104 if (GET_CODE (y) == MEM
3105 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3106 || (flag_force_addr
3107 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3108 y = validize_mem (y);
3110 if (mode == BLKmode)
3111 abort ();
3113 last_insn = emit_move_insn_1 (x, y);
3115 if (y_cst && GET_CODE (x) == REG)
3116 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3118 return last_insn;
3121 /* Low level part of emit_move_insn.
3122 Called just like emit_move_insn, but assumes X and Y
3123 are basically valid. */
3126 emit_move_insn_1 (x, y)
3127 rtx x, y;
3129 enum machine_mode mode = GET_MODE (x);
3130 enum machine_mode submode;
3131 enum mode_class class = GET_MODE_CLASS (mode);
3133 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3134 abort ();
3136 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3137 return
3138 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3140 /* Expand complex moves by moving real part and imag part, if possible. */
3141 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3142 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
3143 * BITS_PER_UNIT),
3144 (class == MODE_COMPLEX_INT
3145 ? MODE_INT : MODE_FLOAT),
3147 && (mov_optab->handlers[(int) submode].insn_code
3148 != CODE_FOR_nothing))
3150 /* Don't split destination if it is a stack push. */
3151 int stack = push_operand (x, GET_MODE (x));
3153 #ifdef PUSH_ROUNDING
3154 /* In case we output to the stack, but the size is smaller machine can
3155 push exactly, we need to use move instructions. */
3156 if (stack
3157 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3158 != GET_MODE_SIZE (submode)))
3160 rtx temp;
3161 HOST_WIDE_INT offset1, offset2;
3163 /* Do not use anti_adjust_stack, since we don't want to update
3164 stack_pointer_delta. */
3165 temp = expand_binop (Pmode,
3166 #ifdef STACK_GROWS_DOWNWARD
3167 sub_optab,
3168 #else
3169 add_optab,
3170 #endif
3171 stack_pointer_rtx,
3172 GEN_INT
3173 (PUSH_ROUNDING
3174 (GET_MODE_SIZE (GET_MODE (x)))),
3175 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3177 if (temp != stack_pointer_rtx)
3178 emit_move_insn (stack_pointer_rtx, temp);
3180 #ifdef STACK_GROWS_DOWNWARD
3181 offset1 = 0;
3182 offset2 = GET_MODE_SIZE (submode);
3183 #else
3184 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3185 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3186 + GET_MODE_SIZE (submode));
3187 #endif
3189 emit_move_insn (change_address (x, submode,
3190 gen_rtx_PLUS (Pmode,
3191 stack_pointer_rtx,
3192 GEN_INT (offset1))),
3193 gen_realpart (submode, y));
3194 emit_move_insn (change_address (x, submode,
3195 gen_rtx_PLUS (Pmode,
3196 stack_pointer_rtx,
3197 GEN_INT (offset2))),
3198 gen_imagpart (submode, y));
3200 else
3201 #endif
3202 /* If this is a stack, push the highpart first, so it
3203 will be in the argument order.
3205 In that case, change_address is used only to convert
3206 the mode, not to change the address. */
3207 if (stack)
3209 /* Note that the real part always precedes the imag part in memory
3210 regardless of machine's endianness. */
3211 #ifdef STACK_GROWS_DOWNWARD
3212 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3213 (gen_rtx_MEM (submode, XEXP (x, 0)),
3214 gen_imagpart (submode, y)));
3215 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3216 (gen_rtx_MEM (submode, XEXP (x, 0)),
3217 gen_realpart (submode, y)));
3218 #else
3219 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3220 (gen_rtx_MEM (submode, XEXP (x, 0)),
3221 gen_realpart (submode, y)));
3222 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3223 (gen_rtx_MEM (submode, XEXP (x, 0)),
3224 gen_imagpart (submode, y)));
3225 #endif
3227 else
3229 rtx realpart_x, realpart_y;
3230 rtx imagpart_x, imagpart_y;
3232 /* If this is a complex value with each part being smaller than a
3233 word, the usual calling sequence will likely pack the pieces into
3234 a single register. Unfortunately, SUBREG of hard registers only
3235 deals in terms of words, so we have a problem converting input
3236 arguments to the CONCAT of two registers that is used elsewhere
3237 for complex values. If this is before reload, we can copy it into
3238 memory and reload. FIXME, we should see about using extract and
3239 insert on integer registers, but complex short and complex char
3240 variables should be rarely used. */
3241 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3242 && (reload_in_progress | reload_completed) == 0)
3244 int packed_dest_p
3245 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3246 int packed_src_p
3247 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3249 if (packed_dest_p || packed_src_p)
3251 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3252 ? MODE_FLOAT : MODE_INT);
3254 enum machine_mode reg_mode
3255 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3257 if (reg_mode != BLKmode)
3259 rtx mem = assign_stack_temp (reg_mode,
3260 GET_MODE_SIZE (mode), 0);
3261 rtx cmem = adjust_address (mem, mode, 0);
3263 cfun->cannot_inline
3264 = N_("function using short complex types cannot be inline");
3266 if (packed_dest_p)
3268 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3270 emit_move_insn_1 (cmem, y);
3271 return emit_move_insn_1 (sreg, mem);
3273 else
3275 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3277 emit_move_insn_1 (mem, sreg);
3278 return emit_move_insn_1 (x, cmem);
3284 realpart_x = gen_realpart (submode, x);
3285 realpart_y = gen_realpart (submode, y);
3286 imagpart_x = gen_imagpart (submode, x);
3287 imagpart_y = gen_imagpart (submode, y);
3289 /* Show the output dies here. This is necessary for SUBREGs
3290 of pseudos since we cannot track their lifetimes correctly;
3291 hard regs shouldn't appear here except as return values.
3292 We never want to emit such a clobber after reload. */
3293 if (x != y
3294 && ! (reload_in_progress || reload_completed)
3295 && (GET_CODE (realpart_x) == SUBREG
3296 || GET_CODE (imagpart_x) == SUBREG))
3297 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3299 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3300 (realpart_x, realpart_y));
3301 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3302 (imagpart_x, imagpart_y));
3305 return get_last_insn ();
3308 /* This will handle any multi-word or full-word mode that lacks a move_insn
3309 pattern. However, you will get better code if you define such patterns,
3310 even if they must turn into multiple assembler instructions. */
3311 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3313 rtx last_insn = 0;
3314 rtx seq, inner;
3315 int need_clobber;
3316 int i;
3318 #ifdef PUSH_ROUNDING
3320 /* If X is a push on the stack, do the push now and replace
3321 X with a reference to the stack pointer. */
3322 if (push_operand (x, GET_MODE (x)))
3324 rtx temp;
3325 enum rtx_code code;
3327 /* Do not use anti_adjust_stack, since we don't want to update
3328 stack_pointer_delta. */
3329 temp = expand_binop (Pmode,
3330 #ifdef STACK_GROWS_DOWNWARD
3331 sub_optab,
3332 #else
3333 add_optab,
3334 #endif
3335 stack_pointer_rtx,
3336 GEN_INT
3337 (PUSH_ROUNDING
3338 (GET_MODE_SIZE (GET_MODE (x)))),
3339 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3341 if (temp != stack_pointer_rtx)
3342 emit_move_insn (stack_pointer_rtx, temp);
3344 code = GET_CODE (XEXP (x, 0));
3346 /* Just hope that small offsets off SP are OK. */
3347 if (code == POST_INC)
3348 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3349 GEN_INT (-((HOST_WIDE_INT)
3350 GET_MODE_SIZE (GET_MODE (x)))));
3351 else if (code == POST_DEC)
3352 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3353 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3354 else
3355 temp = stack_pointer_rtx;
3357 x = change_address (x, VOIDmode, temp);
3359 #endif
3361 /* If we are in reload, see if either operand is a MEM whose address
3362 is scheduled for replacement. */
3363 if (reload_in_progress && GET_CODE (x) == MEM
3364 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3365 x = replace_equiv_address_nv (x, inner);
3366 if (reload_in_progress && GET_CODE (y) == MEM
3367 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3368 y = replace_equiv_address_nv (y, inner);
3370 start_sequence ();
3372 need_clobber = 0;
3373 for (i = 0;
3374 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3375 i++)
3377 rtx xpart = operand_subword (x, i, 1, mode);
3378 rtx ypart = operand_subword (y, i, 1, mode);
3380 /* If we can't get a part of Y, put Y into memory if it is a
3381 constant. Otherwise, force it into a register. If we still
3382 can't get a part of Y, abort. */
3383 if (ypart == 0 && CONSTANT_P (y))
3385 y = force_const_mem (mode, y);
3386 ypart = operand_subword (y, i, 1, mode);
3388 else if (ypart == 0)
3389 ypart = operand_subword_force (y, i, mode);
3391 if (xpart == 0 || ypart == 0)
3392 abort ();
3394 need_clobber |= (GET_CODE (xpart) == SUBREG);
3396 last_insn = emit_move_insn (xpart, ypart);
3399 seq = get_insns ();
3400 end_sequence ();
3402 /* Show the output dies here. This is necessary for SUBREGs
3403 of pseudos since we cannot track their lifetimes correctly;
3404 hard regs shouldn't appear here except as return values.
3405 We never want to emit such a clobber after reload. */
3406 if (x != y
3407 && ! (reload_in_progress || reload_completed)
3408 && need_clobber != 0)
3409 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3411 emit_insn (seq);
3413 return last_insn;
3415 else
3416 abort ();
3419 /* If Y is representable exactly in a narrower mode, and the target can
3420 perform the extension directly from constant or memory, then emit the
3421 move as an extension. */
3423 static rtx
3424 compress_float_constant (x, y)
3425 rtx x, y;
3427 enum machine_mode dstmode = GET_MODE (x);
3428 enum machine_mode orig_srcmode = GET_MODE (y);
3429 enum machine_mode srcmode;
3430 REAL_VALUE_TYPE r;
3432 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3434 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3435 srcmode != orig_srcmode;
3436 srcmode = GET_MODE_WIDER_MODE (srcmode))
3438 enum insn_code ic;
3439 rtx trunc_y, last_insn;
3441 /* Skip if the target can't extend this way. */
3442 ic = can_extend_p (dstmode, srcmode, 0);
3443 if (ic == CODE_FOR_nothing)
3444 continue;
3446 /* Skip if the narrowed value isn't exact. */
3447 if (! exact_real_truncate (srcmode, &r))
3448 continue;
3450 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3452 if (LEGITIMATE_CONSTANT_P (trunc_y))
3454 /* Skip if the target needs extra instructions to perform
3455 the extension. */
3456 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3457 continue;
3459 else if (float_extend_from_mem[dstmode][srcmode])
3460 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3461 else
3462 continue;
3464 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3465 last_insn = get_last_insn ();
3467 if (GET_CODE (x) == REG)
3468 REG_NOTES (last_insn)
3469 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3471 return last_insn;
3474 return NULL_RTX;
3477 /* Pushing data onto the stack. */
3479 /* Push a block of length SIZE (perhaps variable)
3480 and return an rtx to address the beginning of the block.
3481 Note that it is not possible for the value returned to be a QUEUED.
3482 The value may be virtual_outgoing_args_rtx.
3484 EXTRA is the number of bytes of padding to push in addition to SIZE.
3485 BELOW nonzero means this padding comes at low addresses;
3486 otherwise, the padding comes at high addresses. */
3489 push_block (size, extra, below)
3490 rtx size;
3491 int extra, below;
3493 rtx temp;
3495 size = convert_modes (Pmode, ptr_mode, size, 1);
3496 if (CONSTANT_P (size))
3497 anti_adjust_stack (plus_constant (size, extra));
3498 else if (GET_CODE (size) == REG && extra == 0)
3499 anti_adjust_stack (size);
3500 else
3502 temp = copy_to_mode_reg (Pmode, size);
3503 if (extra != 0)
3504 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3505 temp, 0, OPTAB_LIB_WIDEN);
3506 anti_adjust_stack (temp);
3509 #ifndef STACK_GROWS_DOWNWARD
3510 if (0)
3511 #else
3512 if (1)
3513 #endif
3515 temp = virtual_outgoing_args_rtx;
3516 if (extra != 0 && below)
3517 temp = plus_constant (temp, extra);
3519 else
3521 if (GET_CODE (size) == CONST_INT)
3522 temp = plus_constant (virtual_outgoing_args_rtx,
3523 -INTVAL (size) - (below ? 0 : extra));
3524 else if (extra != 0 && !below)
3525 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3526 negate_rtx (Pmode, plus_constant (size, extra)));
3527 else
3528 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3529 negate_rtx (Pmode, size));
3532 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3535 #ifdef PUSH_ROUNDING
3537 /* Emit single push insn. */
3539 static void
3540 emit_single_push_insn (mode, x, type)
3541 rtx x;
3542 enum machine_mode mode;
3543 tree type;
3545 rtx dest_addr;
3546 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3547 rtx dest;
3548 enum insn_code icode;
3549 insn_operand_predicate_fn pred;
3551 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3552 /* If there is push pattern, use it. Otherwise try old way of throwing
3553 MEM representing push operation to move expander. */
3554 icode = push_optab->handlers[(int) mode].insn_code;
3555 if (icode != CODE_FOR_nothing)
3557 if (((pred = insn_data[(int) icode].operand[0].predicate)
3558 && !((*pred) (x, mode))))
3559 x = force_reg (mode, x);
3560 emit_insn (GEN_FCN (icode) (x));
3561 return;
3563 if (GET_MODE_SIZE (mode) == rounded_size)
3564 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3565 else
3567 #ifdef STACK_GROWS_DOWNWARD
3568 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3569 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3570 #else
3571 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3572 GEN_INT (rounded_size));
3573 #endif
3574 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3577 dest = gen_rtx_MEM (mode, dest_addr);
3579 if (type != 0)
3581 set_mem_attributes (dest, type, 1);
3583 if (flag_optimize_sibling_calls)
3584 /* Function incoming arguments may overlap with sibling call
3585 outgoing arguments and we cannot allow reordering of reads
3586 from function arguments with stores to outgoing arguments
3587 of sibling calls. */
3588 set_mem_alias_set (dest, 0);
3590 emit_move_insn (dest, x);
3592 #endif
3594 /* Generate code to push X onto the stack, assuming it has mode MODE and
3595 type TYPE.
3596 MODE is redundant except when X is a CONST_INT (since they don't
3597 carry mode info).
3598 SIZE is an rtx for the size of data to be copied (in bytes),
3599 needed only if X is BLKmode.
3601 ALIGN (in bits) is maximum alignment we can assume.
3603 If PARTIAL and REG are both nonzero, then copy that many of the first
3604 words of X into registers starting with REG, and push the rest of X.
3605 The amount of space pushed is decreased by PARTIAL words,
3606 rounded *down* to a multiple of PARM_BOUNDARY.
3607 REG must be a hard register in this case.
3608 If REG is zero but PARTIAL is not, take any all others actions for an
3609 argument partially in registers, but do not actually load any
3610 registers.
3612 EXTRA is the amount in bytes of extra space to leave next to this arg.
3613 This is ignored if an argument block has already been allocated.
3615 On a machine that lacks real push insns, ARGS_ADDR is the address of
3616 the bottom of the argument block for this call. We use indexing off there
3617 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3618 argument block has not been preallocated.
3620 ARGS_SO_FAR is the size of args previously pushed for this call.
3622 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3623 for arguments passed in registers. If nonzero, it will be the number
3624 of bytes required. */
3626 void
3627 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3628 args_addr, args_so_far, reg_parm_stack_space,
3629 alignment_pad)
3630 rtx x;
3631 enum machine_mode mode;
3632 tree type;
3633 rtx size;
3634 unsigned int align;
3635 int partial;
3636 rtx reg;
3637 int extra;
3638 rtx args_addr;
3639 rtx args_so_far;
3640 int reg_parm_stack_space;
3641 rtx alignment_pad;
3643 rtx xinner;
3644 enum direction stack_direction
3645 #ifdef STACK_GROWS_DOWNWARD
3646 = downward;
3647 #else
3648 = upward;
3649 #endif
3651 /* Decide where to pad the argument: `downward' for below,
3652 `upward' for above, or `none' for don't pad it.
3653 Default is below for small data on big-endian machines; else above. */
3654 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3656 /* Invert direction if stack is post-decrement.
3657 FIXME: why? */
3658 if (STACK_PUSH_CODE == POST_DEC)
3659 if (where_pad != none)
3660 where_pad = (where_pad == downward ? upward : downward);
3662 xinner = x = protect_from_queue (x, 0);
3664 if (mode == BLKmode)
3666 /* Copy a block into the stack, entirely or partially. */
3668 rtx temp;
3669 int used = partial * UNITS_PER_WORD;
3670 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3671 int skip;
3673 if (size == 0)
3674 abort ();
3676 used -= offset;
3678 /* USED is now the # of bytes we need not copy to the stack
3679 because registers will take care of them. */
3681 if (partial != 0)
3682 xinner = adjust_address (xinner, BLKmode, used);
3684 /* If the partial register-part of the arg counts in its stack size,
3685 skip the part of stack space corresponding to the registers.
3686 Otherwise, start copying to the beginning of the stack space,
3687 by setting SKIP to 0. */
3688 skip = (reg_parm_stack_space == 0) ? 0 : used;
3690 #ifdef PUSH_ROUNDING
3691 /* Do it with several push insns if that doesn't take lots of insns
3692 and if there is no difficulty with push insns that skip bytes
3693 on the stack for alignment purposes. */
3694 if (args_addr == 0
3695 && PUSH_ARGS
3696 && GET_CODE (size) == CONST_INT
3697 && skip == 0
3698 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3699 /* Here we avoid the case of a structure whose weak alignment
3700 forces many pushes of a small amount of data,
3701 and such small pushes do rounding that causes trouble. */
3702 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3703 || align >= BIGGEST_ALIGNMENT
3704 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3705 == (align / BITS_PER_UNIT)))
3706 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3708 /* Push padding now if padding above and stack grows down,
3709 or if padding below and stack grows up.
3710 But if space already allocated, this has already been done. */
3711 if (extra && args_addr == 0
3712 && where_pad != none && where_pad != stack_direction)
3713 anti_adjust_stack (GEN_INT (extra));
3715 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3717 else
3718 #endif /* PUSH_ROUNDING */
3720 rtx target;
3722 /* Otherwise make space on the stack and copy the data
3723 to the address of that space. */
3725 /* Deduct words put into registers from the size we must copy. */
3726 if (partial != 0)
3728 if (GET_CODE (size) == CONST_INT)
3729 size = GEN_INT (INTVAL (size) - used);
3730 else
3731 size = expand_binop (GET_MODE (size), sub_optab, size,
3732 GEN_INT (used), NULL_RTX, 0,
3733 OPTAB_LIB_WIDEN);
3736 /* Get the address of the stack space.
3737 In this case, we do not deal with EXTRA separately.
3738 A single stack adjust will do. */
3739 if (! args_addr)
3741 temp = push_block (size, extra, where_pad == downward);
3742 extra = 0;
3744 else if (GET_CODE (args_so_far) == CONST_INT)
3745 temp = memory_address (BLKmode,
3746 plus_constant (args_addr,
3747 skip + INTVAL (args_so_far)));
3748 else
3749 temp = memory_address (BLKmode,
3750 plus_constant (gen_rtx_PLUS (Pmode,
3751 args_addr,
3752 args_so_far),
3753 skip));
3755 if (!ACCUMULATE_OUTGOING_ARGS)
3757 /* If the source is referenced relative to the stack pointer,
3758 copy it to another register to stabilize it. We do not need
3759 to do this if we know that we won't be changing sp. */
3761 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3762 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3763 temp = copy_to_reg (temp);
3766 target = gen_rtx_MEM (BLKmode, temp);
3768 if (type != 0)
3770 set_mem_attributes (target, type, 1);
3771 /* Function incoming arguments may overlap with sibling call
3772 outgoing arguments and we cannot allow reordering of reads
3773 from function arguments with stores to outgoing arguments
3774 of sibling calls. */
3775 set_mem_alias_set (target, 0);
3778 /* ALIGN may well be better aligned than TYPE, e.g. due to
3779 PARM_BOUNDARY. Assume the caller isn't lying. */
3780 set_mem_align (target, align);
3782 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3785 else if (partial > 0)
3787 /* Scalar partly in registers. */
3789 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3790 int i;
3791 int not_stack;
3792 /* # words of start of argument
3793 that we must make space for but need not store. */
3794 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3795 int args_offset = INTVAL (args_so_far);
3796 int skip;
3798 /* Push padding now if padding above and stack grows down,
3799 or if padding below and stack grows up.
3800 But if space already allocated, this has already been done. */
3801 if (extra && args_addr == 0
3802 && where_pad != none && where_pad != stack_direction)
3803 anti_adjust_stack (GEN_INT (extra));
3805 /* If we make space by pushing it, we might as well push
3806 the real data. Otherwise, we can leave OFFSET nonzero
3807 and leave the space uninitialized. */
3808 if (args_addr == 0)
3809 offset = 0;
3811 /* Now NOT_STACK gets the number of words that we don't need to
3812 allocate on the stack. */
3813 not_stack = partial - offset;
3815 /* If the partial register-part of the arg counts in its stack size,
3816 skip the part of stack space corresponding to the registers.
3817 Otherwise, start copying to the beginning of the stack space,
3818 by setting SKIP to 0. */
3819 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3821 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3822 x = validize_mem (force_const_mem (mode, x));
3824 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3825 SUBREGs of such registers are not allowed. */
3826 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3827 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3828 x = copy_to_reg (x);
3830 /* Loop over all the words allocated on the stack for this arg. */
3831 /* We can do it by words, because any scalar bigger than a word
3832 has a size a multiple of a word. */
3833 #ifndef PUSH_ARGS_REVERSED
3834 for (i = not_stack; i < size; i++)
3835 #else
3836 for (i = size - 1; i >= not_stack; i--)
3837 #endif
3838 if (i >= not_stack + offset)
3839 emit_push_insn (operand_subword_force (x, i, mode),
3840 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3841 0, args_addr,
3842 GEN_INT (args_offset + ((i - not_stack + skip)
3843 * UNITS_PER_WORD)),
3844 reg_parm_stack_space, alignment_pad);
3846 else
3848 rtx addr;
3849 rtx target = NULL_RTX;
3850 rtx dest;
3852 /* Push padding now if padding above and stack grows down,
3853 or if padding below and stack grows up.
3854 But if space already allocated, this has already been done. */
3855 if (extra && args_addr == 0
3856 && where_pad != none && where_pad != stack_direction)
3857 anti_adjust_stack (GEN_INT (extra));
3859 #ifdef PUSH_ROUNDING
3860 if (args_addr == 0 && PUSH_ARGS)
3861 emit_single_push_insn (mode, x, type);
3862 else
3863 #endif
3865 if (GET_CODE (args_so_far) == CONST_INT)
3866 addr
3867 = memory_address (mode,
3868 plus_constant (args_addr,
3869 INTVAL (args_so_far)));
3870 else
3871 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3872 args_so_far));
3873 target = addr;
3874 dest = gen_rtx_MEM (mode, addr);
3875 if (type != 0)
3877 set_mem_attributes (dest, type, 1);
3878 /* Function incoming arguments may overlap with sibling call
3879 outgoing arguments and we cannot allow reordering of reads
3880 from function arguments with stores to outgoing arguments
3881 of sibling calls. */
3882 set_mem_alias_set (dest, 0);
3885 emit_move_insn (dest, x);
3889 /* If part should go in registers, copy that part
3890 into the appropriate registers. Do this now, at the end,
3891 since mem-to-mem copies above may do function calls. */
3892 if (partial > 0 && reg != 0)
3894 /* Handle calls that pass values in multiple non-contiguous locations.
3895 The Irix 6 ABI has examples of this. */
3896 if (GET_CODE (reg) == PARALLEL)
3897 emit_group_load (reg, x, -1); /* ??? size? */
3898 else
3899 move_block_to_reg (REGNO (reg), x, partial, mode);
3902 if (extra && args_addr == 0 && where_pad == stack_direction)
3903 anti_adjust_stack (GEN_INT (extra));
3905 if (alignment_pad && args_addr == 0)
3906 anti_adjust_stack (alignment_pad);
3909 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3910 operations. */
3912 static rtx
3913 get_subtarget (x)
3914 rtx x;
3916 return ((x == 0
3917 /* Only registers can be subtargets. */
3918 || GET_CODE (x) != REG
3919 /* If the register is readonly, it can't be set more than once. */
3920 || RTX_UNCHANGING_P (x)
3921 /* Don't use hard regs to avoid extending their life. */
3922 || REGNO (x) < FIRST_PSEUDO_REGISTER
3923 /* Avoid subtargets inside loops,
3924 since they hide some invariant expressions. */
3925 || preserve_subexpressions_p ())
3926 ? 0 : x);
3929 /* Expand an assignment that stores the value of FROM into TO.
3930 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3931 (This may contain a QUEUED rtx;
3932 if the value is constant, this rtx is a constant.)
3933 Otherwise, the returned value is NULL_RTX.
3935 SUGGEST_REG is no longer actually used.
3936 It used to mean, copy the value through a register
3937 and return that register, if that is possible.
3938 We now use WANT_VALUE to decide whether to do this. */
3941 expand_assignment (to, from, want_value, suggest_reg)
3942 tree to, from;
3943 int want_value;
3944 int suggest_reg ATTRIBUTE_UNUSED;
3946 rtx to_rtx = 0;
3947 rtx result;
3949 /* Don't crash if the lhs of the assignment was erroneous. */
3951 if (TREE_CODE (to) == ERROR_MARK)
3953 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3954 return want_value ? result : NULL_RTX;
3957 /* Assignment of a structure component needs special treatment
3958 if the structure component's rtx is not simply a MEM.
3959 Assignment of an array element at a constant index, and assignment of
3960 an array element in an unaligned packed structure field, has the same
3961 problem. */
3963 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3964 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3966 enum machine_mode mode1;
3967 HOST_WIDE_INT bitsize, bitpos;
3968 rtx orig_to_rtx;
3969 tree offset;
3970 int unsignedp;
3971 int volatilep = 0;
3972 tree tem;
3974 push_temp_slots ();
3975 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3976 &unsignedp, &volatilep);
3978 /* If we are going to use store_bit_field and extract_bit_field,
3979 make sure to_rtx will be safe for multiple use. */
3981 if (mode1 == VOIDmode && want_value)
3982 tem = stabilize_reference (tem);
3984 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3986 if (offset != 0)
3988 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3990 if (GET_CODE (to_rtx) != MEM)
3991 abort ();
3993 #ifdef POINTERS_EXTEND_UNSIGNED
3994 if (GET_MODE (offset_rtx) != Pmode)
3995 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3996 #else
3997 if (GET_MODE (offset_rtx) != ptr_mode)
3998 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3999 #endif
4001 /* A constant address in TO_RTX can have VOIDmode, we must not try
4002 to call force_reg for that case. Avoid that case. */
4003 if (GET_CODE (to_rtx) == MEM
4004 && GET_MODE (to_rtx) == BLKmode
4005 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4006 && bitsize > 0
4007 && (bitpos % bitsize) == 0
4008 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4009 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4011 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4012 bitpos = 0;
4015 to_rtx = offset_address (to_rtx, offset_rtx,
4016 highest_pow2_factor_for_type (TREE_TYPE (to),
4017 offset));
4020 if (GET_CODE (to_rtx) == MEM)
4022 /* If the field is at offset zero, we could have been given the
4023 DECL_RTX of the parent struct. Don't munge it. */
4024 to_rtx = shallow_copy_rtx (to_rtx);
4026 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4029 /* Deal with volatile and readonly fields. The former is only done
4030 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4031 if (volatilep && GET_CODE (to_rtx) == MEM)
4033 if (to_rtx == orig_to_rtx)
4034 to_rtx = copy_rtx (to_rtx);
4035 MEM_VOLATILE_P (to_rtx) = 1;
4038 if (TREE_CODE (to) == COMPONENT_REF
4039 && TREE_READONLY (TREE_OPERAND (to, 1)))
4041 if (to_rtx == orig_to_rtx)
4042 to_rtx = copy_rtx (to_rtx);
4043 RTX_UNCHANGING_P (to_rtx) = 1;
4046 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4048 if (to_rtx == orig_to_rtx)
4049 to_rtx = copy_rtx (to_rtx);
4050 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4053 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4054 (want_value
4055 /* Spurious cast for HPUX compiler. */
4056 ? ((enum machine_mode)
4057 TYPE_MODE (TREE_TYPE (to)))
4058 : VOIDmode),
4059 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4061 preserve_temp_slots (result);
4062 free_temp_slots ();
4063 pop_temp_slots ();
4065 /* If the value is meaningful, convert RESULT to the proper mode.
4066 Otherwise, return nothing. */
4067 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4068 TYPE_MODE (TREE_TYPE (from)),
4069 result,
4070 TREE_UNSIGNED (TREE_TYPE (to)))
4071 : NULL_RTX);
4074 /* If the rhs is a function call and its value is not an aggregate,
4075 call the function before we start to compute the lhs.
4076 This is needed for correct code for cases such as
4077 val = setjmp (buf) on machines where reference to val
4078 requires loading up part of an address in a separate insn.
4080 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4081 since it might be a promoted variable where the zero- or sign- extension
4082 needs to be done. Handling this in the normal way is safe because no
4083 computation is done before the call. */
4084 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4085 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4086 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4087 && GET_CODE (DECL_RTL (to)) == REG))
4089 rtx value;
4091 push_temp_slots ();
4092 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4093 if (to_rtx == 0)
4094 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4096 /* Handle calls that return values in multiple non-contiguous locations.
4097 The Irix 6 ABI has examples of this. */
4098 if (GET_CODE (to_rtx) == PARALLEL)
4099 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4100 else if (GET_MODE (to_rtx) == BLKmode)
4101 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4102 else
4104 #ifdef POINTERS_EXTEND_UNSIGNED
4105 if (POINTER_TYPE_P (TREE_TYPE (to))
4106 && GET_MODE (to_rtx) != GET_MODE (value))
4107 value = convert_memory_address (GET_MODE (to_rtx), value);
4108 #endif
4109 emit_move_insn (to_rtx, value);
4111 preserve_temp_slots (to_rtx);
4112 free_temp_slots ();
4113 pop_temp_slots ();
4114 return want_value ? to_rtx : NULL_RTX;
4117 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4118 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4120 if (to_rtx == 0)
4121 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4123 /* Don't move directly into a return register. */
4124 if (TREE_CODE (to) == RESULT_DECL
4125 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4127 rtx temp;
4129 push_temp_slots ();
4130 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4132 if (GET_CODE (to_rtx) == PARALLEL)
4133 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4134 else
4135 emit_move_insn (to_rtx, temp);
4137 preserve_temp_slots (to_rtx);
4138 free_temp_slots ();
4139 pop_temp_slots ();
4140 return want_value ? to_rtx : NULL_RTX;
4143 /* In case we are returning the contents of an object which overlaps
4144 the place the value is being stored, use a safe function when copying
4145 a value through a pointer into a structure value return block. */
4146 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4147 && current_function_returns_struct
4148 && !current_function_returns_pcc_struct)
4150 rtx from_rtx, size;
4152 push_temp_slots ();
4153 size = expr_size (from);
4154 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4156 if (TARGET_MEM_FUNCTIONS)
4157 emit_library_call (memmove_libfunc, LCT_NORMAL,
4158 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4159 XEXP (from_rtx, 0), Pmode,
4160 convert_to_mode (TYPE_MODE (sizetype),
4161 size, TREE_UNSIGNED (sizetype)),
4162 TYPE_MODE (sizetype));
4163 else
4164 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4165 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4166 XEXP (to_rtx, 0), Pmode,
4167 convert_to_mode (TYPE_MODE (integer_type_node),
4168 size,
4169 TREE_UNSIGNED (integer_type_node)),
4170 TYPE_MODE (integer_type_node));
4172 preserve_temp_slots (to_rtx);
4173 free_temp_slots ();
4174 pop_temp_slots ();
4175 return want_value ? to_rtx : NULL_RTX;
4178 /* Compute FROM and store the value in the rtx we got. */
4180 push_temp_slots ();
4181 result = store_expr (from, to_rtx, want_value);
4182 preserve_temp_slots (result);
4183 free_temp_slots ();
4184 pop_temp_slots ();
4185 return want_value ? result : NULL_RTX;
4188 /* Generate code for computing expression EXP,
4189 and storing the value into TARGET.
4190 TARGET may contain a QUEUED rtx.
4192 If WANT_VALUE is nonzero, return a copy of the value
4193 not in TARGET, so that we can be sure to use the proper
4194 value in a containing expression even if TARGET has something
4195 else stored in it. If possible, we copy the value through a pseudo
4196 and return that pseudo. Or, if the value is constant, we try to
4197 return the constant. In some cases, we return a pseudo
4198 copied *from* TARGET.
4200 If the mode is BLKmode then we may return TARGET itself.
4201 It turns out that in BLKmode it doesn't cause a problem.
4202 because C has no operators that could combine two different
4203 assignments into the same BLKmode object with different values
4204 with no sequence point. Will other languages need this to
4205 be more thorough?
4207 If WANT_VALUE is 0, we return NULL, to make sure
4208 to catch quickly any cases where the caller uses the value
4209 and fails to set WANT_VALUE. */
4212 store_expr (exp, target, want_value)
4213 tree exp;
4214 rtx target;
4215 int want_value;
4217 rtx temp;
4218 int dont_return_target = 0;
4219 int dont_store_target = 0;
4221 if (TREE_CODE (exp) == COMPOUND_EXPR)
4223 /* Perform first part of compound expression, then assign from second
4224 part. */
4225 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4226 emit_queue ();
4227 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4229 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4231 /* For conditional expression, get safe form of the target. Then
4232 test the condition, doing the appropriate assignment on either
4233 side. This avoids the creation of unnecessary temporaries.
4234 For non-BLKmode, it is more efficient not to do this. */
4236 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4238 emit_queue ();
4239 target = protect_from_queue (target, 1);
4241 do_pending_stack_adjust ();
4242 NO_DEFER_POP;
4243 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4244 start_cleanup_deferral ();
4245 store_expr (TREE_OPERAND (exp, 1), target, 0);
4246 end_cleanup_deferral ();
4247 emit_queue ();
4248 emit_jump_insn (gen_jump (lab2));
4249 emit_barrier ();
4250 emit_label (lab1);
4251 start_cleanup_deferral ();
4252 store_expr (TREE_OPERAND (exp, 2), target, 0);
4253 end_cleanup_deferral ();
4254 emit_queue ();
4255 emit_label (lab2);
4256 OK_DEFER_POP;
4258 return want_value ? target : NULL_RTX;
4260 else if (queued_subexp_p (target))
4261 /* If target contains a postincrement, let's not risk
4262 using it as the place to generate the rhs. */
4264 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4266 /* Expand EXP into a new pseudo. */
4267 temp = gen_reg_rtx (GET_MODE (target));
4268 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4270 else
4271 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4273 /* If target is volatile, ANSI requires accessing the value
4274 *from* the target, if it is accessed. So make that happen.
4275 In no case return the target itself. */
4276 if (! MEM_VOLATILE_P (target) && want_value)
4277 dont_return_target = 1;
4279 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4280 && GET_MODE (target) != BLKmode)
4281 /* If target is in memory and caller wants value in a register instead,
4282 arrange that. Pass TARGET as target for expand_expr so that,
4283 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4284 We know expand_expr will not use the target in that case.
4285 Don't do this if TARGET is volatile because we are supposed
4286 to write it and then read it. */
4288 temp = expand_expr (exp, target, GET_MODE (target), 0);
4289 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4291 /* If TEMP is already in the desired TARGET, only copy it from
4292 memory and don't store it there again. */
4293 if (temp == target
4294 || (rtx_equal_p (temp, target)
4295 && ! side_effects_p (temp) && ! side_effects_p (target)))
4296 dont_store_target = 1;
4297 temp = copy_to_reg (temp);
4299 dont_return_target = 1;
4301 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4302 /* If this is an scalar in a register that is stored in a wider mode
4303 than the declared mode, compute the result into its declared mode
4304 and then convert to the wider mode. Our value is the computed
4305 expression. */
4307 rtx inner_target = 0;
4309 /* If we don't want a value, we can do the conversion inside EXP,
4310 which will often result in some optimizations. Do the conversion
4311 in two steps: first change the signedness, if needed, then
4312 the extend. But don't do this if the type of EXP is a subtype
4313 of something else since then the conversion might involve
4314 more than just converting modes. */
4315 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4316 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4318 if (TREE_UNSIGNED (TREE_TYPE (exp))
4319 != SUBREG_PROMOTED_UNSIGNED_P (target))
4320 exp = convert
4321 ((*lang_hooks.types.signed_or_unsigned_type)
4322 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4324 exp = convert ((*lang_hooks.types.type_for_mode)
4325 (GET_MODE (SUBREG_REG (target)),
4326 SUBREG_PROMOTED_UNSIGNED_P (target)),
4327 exp);
4329 inner_target = SUBREG_REG (target);
4332 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4334 /* If TEMP is a volatile MEM and we want a result value, make
4335 the access now so it gets done only once. Likewise if
4336 it contains TARGET. */
4337 if (GET_CODE (temp) == MEM && want_value
4338 && (MEM_VOLATILE_P (temp)
4339 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4340 temp = copy_to_reg (temp);
4342 /* If TEMP is a VOIDmode constant, use convert_modes to make
4343 sure that we properly convert it. */
4344 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4346 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4347 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4348 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4349 GET_MODE (target), temp,
4350 SUBREG_PROMOTED_UNSIGNED_P (target));
4353 convert_move (SUBREG_REG (target), temp,
4354 SUBREG_PROMOTED_UNSIGNED_P (target));
4356 /* If we promoted a constant, change the mode back down to match
4357 target. Otherwise, the caller might get confused by a result whose
4358 mode is larger than expected. */
4360 if (want_value && GET_MODE (temp) != GET_MODE (target))
4362 if (GET_MODE (temp) != VOIDmode)
4364 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4365 SUBREG_PROMOTED_VAR_P (temp) = 1;
4366 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4367 SUBREG_PROMOTED_UNSIGNED_P (target));
4369 else
4370 temp = convert_modes (GET_MODE (target),
4371 GET_MODE (SUBREG_REG (target)),
4372 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4375 return want_value ? temp : NULL_RTX;
4377 else
4379 temp = expand_expr (exp, target, GET_MODE (target), 0);
4380 /* Return TARGET if it's a specified hardware register.
4381 If TARGET is a volatile mem ref, either return TARGET
4382 or return a reg copied *from* TARGET; ANSI requires this.
4384 Otherwise, if TEMP is not TARGET, return TEMP
4385 if it is constant (for efficiency),
4386 or if we really want the correct value. */
4387 if (!(target && GET_CODE (target) == REG
4388 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4389 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4390 && ! rtx_equal_p (temp, target)
4391 && (CONSTANT_P (temp) || want_value))
4392 dont_return_target = 1;
4395 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4396 the same as that of TARGET, adjust the constant. This is needed, for
4397 example, in case it is a CONST_DOUBLE and we want only a word-sized
4398 value. */
4399 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4400 && TREE_CODE (exp) != ERROR_MARK
4401 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4402 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4403 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4405 /* If value was not generated in the target, store it there.
4406 Convert the value to TARGET's type first if necessary.
4407 If TEMP and TARGET compare equal according to rtx_equal_p, but
4408 one or both of them are volatile memory refs, we have to distinguish
4409 two cases:
4410 - expand_expr has used TARGET. In this case, we must not generate
4411 another copy. This can be detected by TARGET being equal according
4412 to == .
4413 - expand_expr has not used TARGET - that means that the source just
4414 happens to have the same RTX form. Since temp will have been created
4415 by expand_expr, it will compare unequal according to == .
4416 We must generate a copy in this case, to reach the correct number
4417 of volatile memory references. */
4419 if ((! rtx_equal_p (temp, target)
4420 || (temp != target && (side_effects_p (temp)
4421 || side_effects_p (target))))
4422 && TREE_CODE (exp) != ERROR_MARK
4423 && ! dont_store_target
4424 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4425 but TARGET is not valid memory reference, TEMP will differ
4426 from TARGET although it is really the same location. */
4427 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4428 || target != DECL_RTL_IF_SET (exp))
4429 /* If there's nothing to copy, don't bother. Don't call expr_size
4430 unless necessary, because some front-ends (C++) expr_size-hook
4431 aborts on objects that are not supposed to be bit-copied or
4432 bit-initialized. */
4433 && expr_size (exp) != const0_rtx)
4435 target = protect_from_queue (target, 1);
4436 if (GET_MODE (temp) != GET_MODE (target)
4437 && GET_MODE (temp) != VOIDmode)
4439 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4440 if (dont_return_target)
4442 /* In this case, we will return TEMP,
4443 so make sure it has the proper mode.
4444 But don't forget to store the value into TARGET. */
4445 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4446 emit_move_insn (target, temp);
4448 else
4449 convert_move (target, temp, unsignedp);
4452 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4454 /* Handle copying a string constant into an array. The string
4455 constant may be shorter than the array. So copy just the string's
4456 actual length, and clear the rest. First get the size of the data
4457 type of the string, which is actually the size of the target. */
4458 rtx size = expr_size (exp);
4460 if (GET_CODE (size) == CONST_INT
4461 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4462 emit_block_move (target, temp, size, BLOCK_OP_NORMAL);
4463 else
4465 /* Compute the size of the data to copy from the string. */
4466 tree copy_size
4467 = size_binop (MIN_EXPR,
4468 make_tree (sizetype, size),
4469 size_int (TREE_STRING_LENGTH (exp)));
4470 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4471 VOIDmode, 0);
4472 rtx label = 0;
4474 /* Copy that much. */
4475 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4476 emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL);
4478 /* Figure out how much is left in TARGET that we have to clear.
4479 Do all calculations in ptr_mode. */
4480 if (GET_CODE (copy_size_rtx) == CONST_INT)
4482 size = plus_constant (size, -INTVAL (copy_size_rtx));
4483 target = adjust_address (target, BLKmode,
4484 INTVAL (copy_size_rtx));
4486 else
4488 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4489 copy_size_rtx, NULL_RTX, 0,
4490 OPTAB_LIB_WIDEN);
4492 #ifdef POINTERS_EXTEND_UNSIGNED
4493 if (GET_MODE (copy_size_rtx) != Pmode)
4494 copy_size_rtx = convert_memory_address (Pmode,
4495 copy_size_rtx);
4496 #endif
4498 target = offset_address (target, copy_size_rtx,
4499 highest_pow2_factor (copy_size));
4500 label = gen_label_rtx ();
4501 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4502 GET_MODE (size), 0, label);
4505 if (size != const0_rtx)
4506 clear_storage (target, size);
4508 if (label)
4509 emit_label (label);
4512 /* Handle calls that return values in multiple non-contiguous locations.
4513 The Irix 6 ABI has examples of this. */
4514 else if (GET_CODE (target) == PARALLEL)
4515 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4516 else if (GET_MODE (temp) == BLKmode)
4517 emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL);
4518 else
4519 emit_move_insn (target, temp);
4522 /* If we don't want a value, return NULL_RTX. */
4523 if (! want_value)
4524 return NULL_RTX;
4526 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4527 ??? The latter test doesn't seem to make sense. */
4528 else if (dont_return_target && GET_CODE (temp) != MEM)
4529 return temp;
4531 /* Return TARGET itself if it is a hard register. */
4532 else if (want_value && GET_MODE (target) != BLKmode
4533 && ! (GET_CODE (target) == REG
4534 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4535 return copy_to_reg (target);
4537 else
4538 return target;
4541 /* Return 1 if EXP just contains zeros. */
4543 static int
4544 is_zeros_p (exp)
4545 tree exp;
4547 tree elt;
4549 switch (TREE_CODE (exp))
4551 case CONVERT_EXPR:
4552 case NOP_EXPR:
4553 case NON_LVALUE_EXPR:
4554 case VIEW_CONVERT_EXPR:
4555 return is_zeros_p (TREE_OPERAND (exp, 0));
4557 case INTEGER_CST:
4558 return integer_zerop (exp);
4560 case COMPLEX_CST:
4561 return
4562 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4564 case REAL_CST:
4565 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4567 case VECTOR_CST:
4568 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4569 elt = TREE_CHAIN (elt))
4570 if (!is_zeros_p (TREE_VALUE (elt)))
4571 return 0;
4573 return 1;
4575 case CONSTRUCTOR:
4576 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4577 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4578 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4579 if (! is_zeros_p (TREE_VALUE (elt)))
4580 return 0;
4582 return 1;
4584 default:
4585 return 0;
4589 /* Return 1 if EXP contains mostly (3/4) zeros. */
4591 static int
4592 mostly_zeros_p (exp)
4593 tree exp;
4595 if (TREE_CODE (exp) == CONSTRUCTOR)
4597 int elts = 0, zeros = 0;
4598 tree elt = CONSTRUCTOR_ELTS (exp);
4599 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4601 /* If there are no ranges of true bits, it is all zero. */
4602 return elt == NULL_TREE;
4604 for (; elt; elt = TREE_CHAIN (elt))
4606 /* We do not handle the case where the index is a RANGE_EXPR,
4607 so the statistic will be somewhat inaccurate.
4608 We do make a more accurate count in store_constructor itself,
4609 so since this function is only used for nested array elements,
4610 this should be close enough. */
4611 if (mostly_zeros_p (TREE_VALUE (elt)))
4612 zeros++;
4613 elts++;
4616 return 4 * zeros >= 3 * elts;
4619 return is_zeros_p (exp);
4622 /* Helper function for store_constructor.
4623 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4624 TYPE is the type of the CONSTRUCTOR, not the element type.
4625 CLEARED is as for store_constructor.
4626 ALIAS_SET is the alias set to use for any stores.
4628 This provides a recursive shortcut back to store_constructor when it isn't
4629 necessary to go through store_field. This is so that we can pass through
4630 the cleared field to let store_constructor know that we may not have to
4631 clear a substructure if the outer structure has already been cleared. */
4633 static void
4634 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4635 alias_set)
4636 rtx target;
4637 unsigned HOST_WIDE_INT bitsize;
4638 HOST_WIDE_INT bitpos;
4639 enum machine_mode mode;
4640 tree exp, type;
4641 int cleared;
4642 int alias_set;
4644 if (TREE_CODE (exp) == CONSTRUCTOR
4645 && bitpos % BITS_PER_UNIT == 0
4646 /* If we have a non-zero bitpos for a register target, then we just
4647 let store_field do the bitfield handling. This is unlikely to
4648 generate unnecessary clear instructions anyways. */
4649 && (bitpos == 0 || GET_CODE (target) == MEM))
4651 if (GET_CODE (target) == MEM)
4652 target
4653 = adjust_address (target,
4654 GET_MODE (target) == BLKmode
4655 || 0 != (bitpos
4656 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4657 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4660 /* Update the alias set, if required. */
4661 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4662 && MEM_ALIAS_SET (target) != 0)
4664 target = copy_rtx (target);
4665 set_mem_alias_set (target, alias_set);
4668 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4670 else
4671 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4672 alias_set);
4675 /* Store the value of constructor EXP into the rtx TARGET.
4676 TARGET is either a REG or a MEM; we know it cannot conflict, since
4677 safe_from_p has been called.
4678 CLEARED is true if TARGET is known to have been zero'd.
4679 SIZE is the number of bytes of TARGET we are allowed to modify: this
4680 may not be the same as the size of EXP if we are assigning to a field
4681 which has been packed to exclude padding bits. */
4683 static void
4684 store_constructor (exp, target, cleared, size)
4685 tree exp;
4686 rtx target;
4687 int cleared;
4688 HOST_WIDE_INT size;
4690 tree type = TREE_TYPE (exp);
4691 #ifdef WORD_REGISTER_OPERATIONS
4692 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4693 #endif
4695 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4696 || TREE_CODE (type) == QUAL_UNION_TYPE)
4698 tree elt;
4700 /* We either clear the aggregate or indicate the value is dead. */
4701 if ((TREE_CODE (type) == UNION_TYPE
4702 || TREE_CODE (type) == QUAL_UNION_TYPE)
4703 && ! cleared
4704 && ! CONSTRUCTOR_ELTS (exp))
4705 /* If the constructor is empty, clear the union. */
4707 clear_storage (target, expr_size (exp));
4708 cleared = 1;
4711 /* If we are building a static constructor into a register,
4712 set the initial value as zero so we can fold the value into
4713 a constant. But if more than one register is involved,
4714 this probably loses. */
4715 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4716 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4718 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4719 cleared = 1;
4722 /* If the constructor has fewer fields than the structure
4723 or if we are initializing the structure to mostly zeros,
4724 clear the whole structure first. Don't do this if TARGET is a
4725 register whose mode size isn't equal to SIZE since clear_storage
4726 can't handle this case. */
4727 else if (! cleared && size > 0
4728 && ((list_length (CONSTRUCTOR_ELTS (exp))
4729 != fields_length (type))
4730 || mostly_zeros_p (exp))
4731 && (GET_CODE (target) != REG
4732 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4733 == size)))
4735 clear_storage (target, GEN_INT (size));
4736 cleared = 1;
4739 if (! cleared)
4740 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4742 /* Store each element of the constructor into
4743 the corresponding field of TARGET. */
4745 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4747 tree field = TREE_PURPOSE (elt);
4748 tree value = TREE_VALUE (elt);
4749 enum machine_mode mode;
4750 HOST_WIDE_INT bitsize;
4751 HOST_WIDE_INT bitpos = 0;
4752 int unsignedp;
4753 tree offset;
4754 rtx to_rtx = target;
4756 /* Just ignore missing fields.
4757 We cleared the whole structure, above,
4758 if any fields are missing. */
4759 if (field == 0)
4760 continue;
4762 if (cleared && is_zeros_p (value))
4763 continue;
4765 if (host_integerp (DECL_SIZE (field), 1))
4766 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4767 else
4768 bitsize = -1;
4770 unsignedp = TREE_UNSIGNED (field);
4771 mode = DECL_MODE (field);
4772 if (DECL_BIT_FIELD (field))
4773 mode = VOIDmode;
4775 offset = DECL_FIELD_OFFSET (field);
4776 if (host_integerp (offset, 0)
4777 && host_integerp (bit_position (field), 0))
4779 bitpos = int_bit_position (field);
4780 offset = 0;
4782 else
4783 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4785 if (offset)
4787 rtx offset_rtx;
4789 if (contains_placeholder_p (offset))
4790 offset = build (WITH_RECORD_EXPR, sizetype,
4791 offset, make_tree (TREE_TYPE (exp), target));
4793 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4794 if (GET_CODE (to_rtx) != MEM)
4795 abort ();
4797 #ifdef POINTERS_EXTEND_UNSIGNED
4798 if (GET_MODE (offset_rtx) != Pmode)
4799 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4800 #else
4801 if (GET_MODE (offset_rtx) != ptr_mode)
4802 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4803 #endif
4805 to_rtx = offset_address (to_rtx, offset_rtx,
4806 highest_pow2_factor (offset));
4809 if (TREE_READONLY (field))
4811 if (GET_CODE (to_rtx) == MEM)
4812 to_rtx = copy_rtx (to_rtx);
4814 RTX_UNCHANGING_P (to_rtx) = 1;
4817 #ifdef WORD_REGISTER_OPERATIONS
4818 /* If this initializes a field that is smaller than a word, at the
4819 start of a word, try to widen it to a full word.
4820 This special case allows us to output C++ member function
4821 initializations in a form that the optimizers can understand. */
4822 if (GET_CODE (target) == REG
4823 && bitsize < BITS_PER_WORD
4824 && bitpos % BITS_PER_WORD == 0
4825 && GET_MODE_CLASS (mode) == MODE_INT
4826 && TREE_CODE (value) == INTEGER_CST
4827 && exp_size >= 0
4828 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4830 tree type = TREE_TYPE (value);
4832 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4834 type = (*lang_hooks.types.type_for_size)
4835 (BITS_PER_WORD, TREE_UNSIGNED (type));
4836 value = convert (type, value);
4839 if (BYTES_BIG_ENDIAN)
4840 value
4841 = fold (build (LSHIFT_EXPR, type, value,
4842 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4843 bitsize = BITS_PER_WORD;
4844 mode = word_mode;
4846 #endif
4848 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4849 && DECL_NONADDRESSABLE_P (field))
4851 to_rtx = copy_rtx (to_rtx);
4852 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4855 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4856 value, type, cleared,
4857 get_alias_set (TREE_TYPE (field)));
4860 else if (TREE_CODE (type) == ARRAY_TYPE
4861 || TREE_CODE (type) == VECTOR_TYPE)
4863 tree elt;
4864 int i;
4865 int need_to_clear;
4866 tree domain = TYPE_DOMAIN (type);
4867 tree elttype = TREE_TYPE (type);
4868 int const_bounds_p;
4869 HOST_WIDE_INT minelt = 0;
4870 HOST_WIDE_INT maxelt = 0;
4872 /* Vectors are like arrays, but the domain is stored via an array
4873 type indirectly. */
4874 if (TREE_CODE (type) == VECTOR_TYPE)
4876 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4877 the same field as TYPE_DOMAIN, we are not guaranteed that
4878 it always will. */
4879 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4880 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4883 const_bounds_p = (TYPE_MIN_VALUE (domain)
4884 && TYPE_MAX_VALUE (domain)
4885 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4886 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4888 /* If we have constant bounds for the range of the type, get them. */
4889 if (const_bounds_p)
4891 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4892 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4895 /* If the constructor has fewer elements than the array,
4896 clear the whole array first. Similarly if this is
4897 static constructor of a non-BLKmode object. */
4898 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4899 need_to_clear = 1;
4900 else
4902 HOST_WIDE_INT count = 0, zero_count = 0;
4903 need_to_clear = ! const_bounds_p;
4905 /* This loop is a more accurate version of the loop in
4906 mostly_zeros_p (it handles RANGE_EXPR in an index).
4907 It is also needed to check for missing elements. */
4908 for (elt = CONSTRUCTOR_ELTS (exp);
4909 elt != NULL_TREE && ! need_to_clear;
4910 elt = TREE_CHAIN (elt))
4912 tree index = TREE_PURPOSE (elt);
4913 HOST_WIDE_INT this_node_count;
4915 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4917 tree lo_index = TREE_OPERAND (index, 0);
4918 tree hi_index = TREE_OPERAND (index, 1);
4920 if (! host_integerp (lo_index, 1)
4921 || ! host_integerp (hi_index, 1))
4923 need_to_clear = 1;
4924 break;
4927 this_node_count = (tree_low_cst (hi_index, 1)
4928 - tree_low_cst (lo_index, 1) + 1);
4930 else
4931 this_node_count = 1;
4933 count += this_node_count;
4934 if (mostly_zeros_p (TREE_VALUE (elt)))
4935 zero_count += this_node_count;
4938 /* Clear the entire array first if there are any missing elements,
4939 or if the incidence of zero elements is >= 75%. */
4940 if (! need_to_clear
4941 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4942 need_to_clear = 1;
4945 if (need_to_clear && size > 0)
4947 if (! cleared)
4949 if (REG_P (target))
4950 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4951 else
4952 clear_storage (target, GEN_INT (size));
4954 cleared = 1;
4956 else if (REG_P (target))
4957 /* Inform later passes that the old value is dead. */
4958 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4960 /* Store each element of the constructor into
4961 the corresponding element of TARGET, determined
4962 by counting the elements. */
4963 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4964 elt;
4965 elt = TREE_CHAIN (elt), i++)
4967 enum machine_mode mode;
4968 HOST_WIDE_INT bitsize;
4969 HOST_WIDE_INT bitpos;
4970 int unsignedp;
4971 tree value = TREE_VALUE (elt);
4972 tree index = TREE_PURPOSE (elt);
4973 rtx xtarget = target;
4975 if (cleared && is_zeros_p (value))
4976 continue;
4978 unsignedp = TREE_UNSIGNED (elttype);
4979 mode = TYPE_MODE (elttype);
4980 if (mode == BLKmode)
4981 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4982 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4983 : -1);
4984 else
4985 bitsize = GET_MODE_BITSIZE (mode);
4987 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4989 tree lo_index = TREE_OPERAND (index, 0);
4990 tree hi_index = TREE_OPERAND (index, 1);
4991 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4992 struct nesting *loop;
4993 HOST_WIDE_INT lo, hi, count;
4994 tree position;
4996 /* If the range is constant and "small", unroll the loop. */
4997 if (const_bounds_p
4998 && host_integerp (lo_index, 0)
4999 && host_integerp (hi_index, 0)
5000 && (lo = tree_low_cst (lo_index, 0),
5001 hi = tree_low_cst (hi_index, 0),
5002 count = hi - lo + 1,
5003 (GET_CODE (target) != MEM
5004 || count <= 2
5005 || (host_integerp (TYPE_SIZE (elttype), 1)
5006 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5007 <= 40 * 8)))))
5009 lo -= minelt; hi -= minelt;
5010 for (; lo <= hi; lo++)
5012 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5014 if (GET_CODE (target) == MEM
5015 && !MEM_KEEP_ALIAS_SET_P (target)
5016 && TREE_CODE (type) == ARRAY_TYPE
5017 && TYPE_NONALIASED_COMPONENT (type))
5019 target = copy_rtx (target);
5020 MEM_KEEP_ALIAS_SET_P (target) = 1;
5023 store_constructor_field
5024 (target, bitsize, bitpos, mode, value, type, cleared,
5025 get_alias_set (elttype));
5028 else
5030 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5031 loop_top = gen_label_rtx ();
5032 loop_end = gen_label_rtx ();
5034 unsignedp = TREE_UNSIGNED (domain);
5036 index = build_decl (VAR_DECL, NULL_TREE, domain);
5038 index_r
5039 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5040 &unsignedp, 0));
5041 SET_DECL_RTL (index, index_r);
5042 if (TREE_CODE (value) == SAVE_EXPR
5043 && SAVE_EXPR_RTL (value) == 0)
5045 /* Make sure value gets expanded once before the
5046 loop. */
5047 expand_expr (value, const0_rtx, VOIDmode, 0);
5048 emit_queue ();
5050 store_expr (lo_index, index_r, 0);
5051 loop = expand_start_loop (0);
5053 /* Assign value to element index. */
5054 position
5055 = convert (ssizetype,
5056 fold (build (MINUS_EXPR, TREE_TYPE (index),
5057 index, TYPE_MIN_VALUE (domain))));
5058 position = size_binop (MULT_EXPR, position,
5059 convert (ssizetype,
5060 TYPE_SIZE_UNIT (elttype)));
5062 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5063 xtarget = offset_address (target, pos_rtx,
5064 highest_pow2_factor (position));
5065 xtarget = adjust_address (xtarget, mode, 0);
5066 if (TREE_CODE (value) == CONSTRUCTOR)
5067 store_constructor (value, xtarget, cleared,
5068 bitsize / BITS_PER_UNIT);
5069 else
5070 store_expr (value, xtarget, 0);
5072 expand_exit_loop_if_false (loop,
5073 build (LT_EXPR, integer_type_node,
5074 index, hi_index));
5076 expand_increment (build (PREINCREMENT_EXPR,
5077 TREE_TYPE (index),
5078 index, integer_one_node), 0, 0);
5079 expand_end_loop ();
5080 emit_label (loop_end);
5083 else if ((index != 0 && ! host_integerp (index, 0))
5084 || ! host_integerp (TYPE_SIZE (elttype), 1))
5086 tree position;
5088 if (index == 0)
5089 index = ssize_int (1);
5091 if (minelt)
5092 index = convert (ssizetype,
5093 fold (build (MINUS_EXPR, index,
5094 TYPE_MIN_VALUE (domain))));
5096 position = size_binop (MULT_EXPR, index,
5097 convert (ssizetype,
5098 TYPE_SIZE_UNIT (elttype)));
5099 xtarget = offset_address (target,
5100 expand_expr (position, 0, VOIDmode, 0),
5101 highest_pow2_factor (position));
5102 xtarget = adjust_address (xtarget, mode, 0);
5103 store_expr (value, xtarget, 0);
5105 else
5107 if (index != 0)
5108 bitpos = ((tree_low_cst (index, 0) - minelt)
5109 * tree_low_cst (TYPE_SIZE (elttype), 1));
5110 else
5111 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5113 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5114 && TREE_CODE (type) == ARRAY_TYPE
5115 && TYPE_NONALIASED_COMPONENT (type))
5117 target = copy_rtx (target);
5118 MEM_KEEP_ALIAS_SET_P (target) = 1;
5121 store_constructor_field (target, bitsize, bitpos, mode, value,
5122 type, cleared, get_alias_set (elttype));
5128 /* Set constructor assignments. */
5129 else if (TREE_CODE (type) == SET_TYPE)
5131 tree elt = CONSTRUCTOR_ELTS (exp);
5132 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5133 tree domain = TYPE_DOMAIN (type);
5134 tree domain_min, domain_max, bitlength;
5136 /* The default implementation strategy is to extract the constant
5137 parts of the constructor, use that to initialize the target,
5138 and then "or" in whatever non-constant ranges we need in addition.
5140 If a large set is all zero or all ones, it is
5141 probably better to set it using memset (if available) or bzero.
5142 Also, if a large set has just a single range, it may also be
5143 better to first clear all the first clear the set (using
5144 bzero/memset), and set the bits we want. */
5146 /* Check for all zeros. */
5147 if (elt == NULL_TREE && size > 0)
5149 if (!cleared)
5150 clear_storage (target, GEN_INT (size));
5151 return;
5154 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5155 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5156 bitlength = size_binop (PLUS_EXPR,
5157 size_diffop (domain_max, domain_min),
5158 ssize_int (1));
5160 nbits = tree_low_cst (bitlength, 1);
5162 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5163 are "complicated" (more than one range), initialize (the
5164 constant parts) by copying from a constant. */
5165 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5166 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5168 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5169 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5170 char *bit_buffer = (char *) alloca (nbits);
5171 HOST_WIDE_INT word = 0;
5172 unsigned int bit_pos = 0;
5173 unsigned int ibit = 0;
5174 unsigned int offset = 0; /* In bytes from beginning of set. */
5176 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5177 for (;;)
5179 if (bit_buffer[ibit])
5181 if (BYTES_BIG_ENDIAN)
5182 word |= (1 << (set_word_size - 1 - bit_pos));
5183 else
5184 word |= 1 << bit_pos;
5187 bit_pos++; ibit++;
5188 if (bit_pos >= set_word_size || ibit == nbits)
5190 if (word != 0 || ! cleared)
5192 rtx datum = GEN_INT (word);
5193 rtx to_rtx;
5195 /* The assumption here is that it is safe to use
5196 XEXP if the set is multi-word, but not if
5197 it's single-word. */
5198 if (GET_CODE (target) == MEM)
5199 to_rtx = adjust_address (target, mode, offset);
5200 else if (offset == 0)
5201 to_rtx = target;
5202 else
5203 abort ();
5204 emit_move_insn (to_rtx, datum);
5207 if (ibit == nbits)
5208 break;
5209 word = 0;
5210 bit_pos = 0;
5211 offset += set_word_size / BITS_PER_UNIT;
5215 else if (!cleared)
5216 /* Don't bother clearing storage if the set is all ones. */
5217 if (TREE_CHAIN (elt) != NULL_TREE
5218 || (TREE_PURPOSE (elt) == NULL_TREE
5219 ? nbits != 1
5220 : ( ! host_integerp (TREE_VALUE (elt), 0)
5221 || ! host_integerp (TREE_PURPOSE (elt), 0)
5222 || (tree_low_cst (TREE_VALUE (elt), 0)
5223 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5224 != (HOST_WIDE_INT) nbits))))
5225 clear_storage (target, expr_size (exp));
5227 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5229 /* Start of range of element or NULL. */
5230 tree startbit = TREE_PURPOSE (elt);
5231 /* End of range of element, or element value. */
5232 tree endbit = TREE_VALUE (elt);
5233 HOST_WIDE_INT startb, endb;
5234 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5236 bitlength_rtx = expand_expr (bitlength,
5237 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5239 /* Handle non-range tuple element like [ expr ]. */
5240 if (startbit == NULL_TREE)
5242 startbit = save_expr (endbit);
5243 endbit = startbit;
5246 startbit = convert (sizetype, startbit);
5247 endbit = convert (sizetype, endbit);
5248 if (! integer_zerop (domain_min))
5250 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5251 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5253 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5254 EXPAND_CONST_ADDRESS);
5255 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5256 EXPAND_CONST_ADDRESS);
5258 if (REG_P (target))
5260 targetx
5261 = assign_temp
5262 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5263 (GET_MODE (target), 0),
5264 TYPE_QUAL_CONST)),
5265 0, 1, 1);
5266 emit_move_insn (targetx, target);
5269 else if (GET_CODE (target) == MEM)
5270 targetx = target;
5271 else
5272 abort ();
5274 /* Optimization: If startbit and endbit are constants divisible
5275 by BITS_PER_UNIT, call memset instead. */
5276 if (TARGET_MEM_FUNCTIONS
5277 && TREE_CODE (startbit) == INTEGER_CST
5278 && TREE_CODE (endbit) == INTEGER_CST
5279 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5280 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5282 emit_library_call (memset_libfunc, LCT_NORMAL,
5283 VOIDmode, 3,
5284 plus_constant (XEXP (targetx, 0),
5285 startb / BITS_PER_UNIT),
5286 Pmode,
5287 constm1_rtx, TYPE_MODE (integer_type_node),
5288 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5289 TYPE_MODE (sizetype));
5291 else
5292 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5293 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5294 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5295 startbit_rtx, TYPE_MODE (sizetype),
5296 endbit_rtx, TYPE_MODE (sizetype));
5298 if (REG_P (target))
5299 emit_move_insn (target, targetx);
5303 else
5304 abort ();
5307 /* Store the value of EXP (an expression tree)
5308 into a subfield of TARGET which has mode MODE and occupies
5309 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5310 If MODE is VOIDmode, it means that we are storing into a bit-field.
5312 If VALUE_MODE is VOIDmode, return nothing in particular.
5313 UNSIGNEDP is not used in this case.
5315 Otherwise, return an rtx for the value stored. This rtx
5316 has mode VALUE_MODE if that is convenient to do.
5317 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5319 TYPE is the type of the underlying object,
5321 ALIAS_SET is the alias set for the destination. This value will
5322 (in general) be different from that for TARGET, since TARGET is a
5323 reference to the containing structure. */
5325 static rtx
5326 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5327 alias_set)
5328 rtx target;
5329 HOST_WIDE_INT bitsize;
5330 HOST_WIDE_INT bitpos;
5331 enum machine_mode mode;
5332 tree exp;
5333 enum machine_mode value_mode;
5334 int unsignedp;
5335 tree type;
5336 int alias_set;
5338 HOST_WIDE_INT width_mask = 0;
5340 if (TREE_CODE (exp) == ERROR_MARK)
5341 return const0_rtx;
5343 /* If we have nothing to store, do nothing unless the expression has
5344 side-effects. */
5345 if (bitsize == 0)
5346 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5347 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5348 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5350 /* If we are storing into an unaligned field of an aligned union that is
5351 in a register, we may have the mode of TARGET being an integer mode but
5352 MODE == BLKmode. In that case, get an aligned object whose size and
5353 alignment are the same as TARGET and store TARGET into it (we can avoid
5354 the store if the field being stored is the entire width of TARGET). Then
5355 call ourselves recursively to store the field into a BLKmode version of
5356 that object. Finally, load from the object into TARGET. This is not
5357 very efficient in general, but should only be slightly more expensive
5358 than the otherwise-required unaligned accesses. Perhaps this can be
5359 cleaned up later. */
5361 if (mode == BLKmode
5362 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5364 rtx object
5365 = assign_temp
5366 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5367 0, 1, 1);
5368 rtx blk_object = adjust_address (object, BLKmode, 0);
5370 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5371 emit_move_insn (object, target);
5373 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5374 alias_set);
5376 emit_move_insn (target, object);
5378 /* We want to return the BLKmode version of the data. */
5379 return blk_object;
5382 if (GET_CODE (target) == CONCAT)
5384 /* We're storing into a struct containing a single __complex. */
5386 if (bitpos != 0)
5387 abort ();
5388 return store_expr (exp, target, 0);
5391 /* If the structure is in a register or if the component
5392 is a bit field, we cannot use addressing to access it.
5393 Use bit-field techniques or SUBREG to store in it. */
5395 if (mode == VOIDmode
5396 || (mode != BLKmode && ! direct_store[(int) mode]
5397 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5398 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5399 || GET_CODE (target) == REG
5400 || GET_CODE (target) == SUBREG
5401 /* If the field isn't aligned enough to store as an ordinary memref,
5402 store it as a bit field. */
5403 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5404 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5405 || bitpos % GET_MODE_ALIGNMENT (mode)))
5406 /* If the RHS and field are a constant size and the size of the
5407 RHS isn't the same size as the bitfield, we must use bitfield
5408 operations. */
5409 || (bitsize >= 0
5410 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5411 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5413 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5415 /* If BITSIZE is narrower than the size of the type of EXP
5416 we will be narrowing TEMP. Normally, what's wanted are the
5417 low-order bits. However, if EXP's type is a record and this is
5418 big-endian machine, we want the upper BITSIZE bits. */
5419 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5420 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5421 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5422 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5423 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5424 - bitsize),
5425 temp, 1);
5427 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5428 MODE. */
5429 if (mode != VOIDmode && mode != BLKmode
5430 && mode != TYPE_MODE (TREE_TYPE (exp)))
5431 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5433 /* If the modes of TARGET and TEMP are both BLKmode, both
5434 must be in memory and BITPOS must be aligned on a byte
5435 boundary. If so, we simply do a block copy. */
5436 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5438 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5439 || bitpos % BITS_PER_UNIT != 0)
5440 abort ();
5442 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5443 emit_block_move (target, temp,
5444 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5445 / BITS_PER_UNIT),
5446 BLOCK_OP_NORMAL);
5448 return value_mode == VOIDmode ? const0_rtx : target;
5451 /* Store the value in the bitfield. */
5452 store_bit_field (target, bitsize, bitpos, mode, temp,
5453 int_size_in_bytes (type));
5455 if (value_mode != VOIDmode)
5457 /* The caller wants an rtx for the value.
5458 If possible, avoid refetching from the bitfield itself. */
5459 if (width_mask != 0
5460 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5462 tree count;
5463 enum machine_mode tmode;
5465 tmode = GET_MODE (temp);
5466 if (tmode == VOIDmode)
5467 tmode = value_mode;
5469 if (unsignedp)
5470 return expand_and (tmode, temp,
5471 gen_int_mode (width_mask, tmode),
5472 NULL_RTX);
5474 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5475 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5476 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5479 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5480 NULL_RTX, value_mode, VOIDmode,
5481 int_size_in_bytes (type));
5483 return const0_rtx;
5485 else
5487 rtx addr = XEXP (target, 0);
5488 rtx to_rtx = target;
5490 /* If a value is wanted, it must be the lhs;
5491 so make the address stable for multiple use. */
5493 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5494 && ! CONSTANT_ADDRESS_P (addr)
5495 /* A frame-pointer reference is already stable. */
5496 && ! (GET_CODE (addr) == PLUS
5497 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5498 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5499 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5500 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5502 /* Now build a reference to just the desired component. */
5504 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5506 if (to_rtx == target)
5507 to_rtx = copy_rtx (to_rtx);
5509 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5510 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5511 set_mem_alias_set (to_rtx, alias_set);
5513 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5517 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5518 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5519 codes and find the ultimate containing object, which we return.
5521 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5522 bit position, and *PUNSIGNEDP to the signedness of the field.
5523 If the position of the field is variable, we store a tree
5524 giving the variable offset (in units) in *POFFSET.
5525 This offset is in addition to the bit position.
5526 If the position is not variable, we store 0 in *POFFSET.
5528 If any of the extraction expressions is volatile,
5529 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5531 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5532 is a mode that can be used to access the field. In that case, *PBITSIZE
5533 is redundant.
5535 If the field describes a variable-sized object, *PMODE is set to
5536 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5537 this case, but the address of the object can be found. */
5539 tree
5540 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5541 punsignedp, pvolatilep)
5542 tree exp;
5543 HOST_WIDE_INT *pbitsize;
5544 HOST_WIDE_INT *pbitpos;
5545 tree *poffset;
5546 enum machine_mode *pmode;
5547 int *punsignedp;
5548 int *pvolatilep;
5550 tree size_tree = 0;
5551 enum machine_mode mode = VOIDmode;
5552 tree offset = size_zero_node;
5553 tree bit_offset = bitsize_zero_node;
5554 tree placeholder_ptr = 0;
5555 tree tem;
5557 /* First get the mode, signedness, and size. We do this from just the
5558 outermost expression. */
5559 if (TREE_CODE (exp) == COMPONENT_REF)
5561 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5562 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5563 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5565 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5567 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5569 size_tree = TREE_OPERAND (exp, 1);
5570 *punsignedp = TREE_UNSIGNED (exp);
5572 else
5574 mode = TYPE_MODE (TREE_TYPE (exp));
5575 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5577 if (mode == BLKmode)
5578 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5579 else
5580 *pbitsize = GET_MODE_BITSIZE (mode);
5583 if (size_tree != 0)
5585 if (! host_integerp (size_tree, 1))
5586 mode = BLKmode, *pbitsize = -1;
5587 else
5588 *pbitsize = tree_low_cst (size_tree, 1);
5591 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5592 and find the ultimate containing object. */
5593 while (1)
5595 if (TREE_CODE (exp) == BIT_FIELD_REF)
5596 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5597 else if (TREE_CODE (exp) == COMPONENT_REF)
5599 tree field = TREE_OPERAND (exp, 1);
5600 tree this_offset = DECL_FIELD_OFFSET (field);
5602 /* If this field hasn't been filled in yet, don't go
5603 past it. This should only happen when folding expressions
5604 made during type construction. */
5605 if (this_offset == 0)
5606 break;
5607 else if (! TREE_CONSTANT (this_offset)
5608 && contains_placeholder_p (this_offset))
5609 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5611 offset = size_binop (PLUS_EXPR, offset, this_offset);
5612 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5613 DECL_FIELD_BIT_OFFSET (field));
5615 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5618 else if (TREE_CODE (exp) == ARRAY_REF
5619 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5621 tree index = TREE_OPERAND (exp, 1);
5622 tree array = TREE_OPERAND (exp, 0);
5623 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5624 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5625 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5627 /* We assume all arrays have sizes that are a multiple of a byte.
5628 First subtract the lower bound, if any, in the type of the
5629 index, then convert to sizetype and multiply by the size of the
5630 array element. */
5631 if (low_bound != 0 && ! integer_zerop (low_bound))
5632 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5633 index, low_bound));
5635 /* If the index has a self-referential type, pass it to a
5636 WITH_RECORD_EXPR; if the component size is, pass our
5637 component to one. */
5638 if (! TREE_CONSTANT (index)
5639 && contains_placeholder_p (index))
5640 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5641 if (! TREE_CONSTANT (unit_size)
5642 && contains_placeholder_p (unit_size))
5643 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5645 offset = size_binop (PLUS_EXPR, offset,
5646 size_binop (MULT_EXPR,
5647 convert (sizetype, index),
5648 unit_size));
5651 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5653 tree new = find_placeholder (exp, &placeholder_ptr);
5655 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5656 We might have been called from tree optimization where we
5657 haven't set up an object yet. */
5658 if (new == 0)
5659 break;
5660 else
5661 exp = new;
5663 continue;
5665 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5666 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5667 && ! ((TREE_CODE (exp) == NOP_EXPR
5668 || TREE_CODE (exp) == CONVERT_EXPR)
5669 && (TYPE_MODE (TREE_TYPE (exp))
5670 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5671 break;
5673 /* If any reference in the chain is volatile, the effect is volatile. */
5674 if (TREE_THIS_VOLATILE (exp))
5675 *pvolatilep = 1;
5677 exp = TREE_OPERAND (exp, 0);
5680 /* If OFFSET is constant, see if we can return the whole thing as a
5681 constant bit position. Otherwise, split it up. */
5682 if (host_integerp (offset, 0)
5683 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5684 bitsize_unit_node))
5685 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5686 && host_integerp (tem, 0))
5687 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5688 else
5689 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5691 *pmode = mode;
5692 return exp;
5695 /* Return 1 if T is an expression that get_inner_reference handles. */
5698 handled_component_p (t)
5699 tree t;
5701 switch (TREE_CODE (t))
5703 case BIT_FIELD_REF:
5704 case COMPONENT_REF:
5705 case ARRAY_REF:
5706 case ARRAY_RANGE_REF:
5707 case NON_LVALUE_EXPR:
5708 case VIEW_CONVERT_EXPR:
5709 return 1;
5711 case NOP_EXPR:
5712 case CONVERT_EXPR:
5713 return (TYPE_MODE (TREE_TYPE (t))
5714 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5716 default:
5717 return 0;
5721 /* Given an rtx VALUE that may contain additions and multiplications, return
5722 an equivalent value that just refers to a register, memory, or constant.
5723 This is done by generating instructions to perform the arithmetic and
5724 returning a pseudo-register containing the value.
5726 The returned value may be a REG, SUBREG, MEM or constant. */
5729 force_operand (value, target)
5730 rtx value, target;
5732 rtx op1, op2;
5733 /* Use subtarget as the target for operand 0 of a binary operation. */
5734 rtx subtarget = get_subtarget (target);
5735 enum rtx_code code = GET_CODE (value);
5737 /* Check for a PIC address load. */
5738 if ((code == PLUS || code == MINUS)
5739 && XEXP (value, 0) == pic_offset_table_rtx
5740 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5741 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5742 || GET_CODE (XEXP (value, 1)) == CONST))
5744 if (!subtarget)
5745 subtarget = gen_reg_rtx (GET_MODE (value));
5746 emit_move_insn (subtarget, value);
5747 return subtarget;
5750 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5752 if (!target)
5753 target = gen_reg_rtx (GET_MODE (value));
5754 convert_move (target, force_operand (XEXP (value, 0), NULL),
5755 code == ZERO_EXTEND);
5756 return target;
5759 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5761 op2 = XEXP (value, 1);
5762 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5763 subtarget = 0;
5764 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5766 code = PLUS;
5767 op2 = negate_rtx (GET_MODE (value), op2);
5770 /* Check for an addition with OP2 a constant integer and our first
5771 operand a PLUS of a virtual register and something else. In that
5772 case, we want to emit the sum of the virtual register and the
5773 constant first and then add the other value. This allows virtual
5774 register instantiation to simply modify the constant rather than
5775 creating another one around this addition. */
5776 if (code == PLUS && GET_CODE (op2) == CONST_INT
5777 && GET_CODE (XEXP (value, 0)) == PLUS
5778 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5779 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5780 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5782 rtx temp = expand_simple_binop (GET_MODE (value), code,
5783 XEXP (XEXP (value, 0), 0), op2,
5784 subtarget, 0, OPTAB_LIB_WIDEN);
5785 return expand_simple_binop (GET_MODE (value), code, temp,
5786 force_operand (XEXP (XEXP (value,
5787 0), 1), 0),
5788 target, 0, OPTAB_LIB_WIDEN);
5791 op1 = force_operand (XEXP (value, 0), subtarget);
5792 op2 = force_operand (op2, NULL_RTX);
5793 switch (code)
5795 case MULT:
5796 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5797 case DIV:
5798 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5799 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5800 target, 1, OPTAB_LIB_WIDEN);
5801 else
5802 return expand_divmod (0,
5803 FLOAT_MODE_P (GET_MODE (value))
5804 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5805 GET_MODE (value), op1, op2, target, 0);
5806 break;
5807 case MOD:
5808 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5809 target, 0);
5810 break;
5811 case UDIV:
5812 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5813 target, 1);
5814 break;
5815 case UMOD:
5816 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5817 target, 1);
5818 break;
5819 case ASHIFTRT:
5820 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5821 target, 0, OPTAB_LIB_WIDEN);
5822 break;
5823 default:
5824 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5825 target, 1, OPTAB_LIB_WIDEN);
5828 if (GET_RTX_CLASS (code) == '1')
5830 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5831 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5834 #ifdef INSN_SCHEDULING
5835 /* On machines that have insn scheduling, we want all memory reference to be
5836 explicit, so we need to deal with such paradoxical SUBREGs. */
5837 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5838 && (GET_MODE_SIZE (GET_MODE (value))
5839 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5840 value
5841 = simplify_gen_subreg (GET_MODE (value),
5842 force_reg (GET_MODE (SUBREG_REG (value)),
5843 force_operand (SUBREG_REG (value),
5844 NULL_RTX)),
5845 GET_MODE (SUBREG_REG (value)),
5846 SUBREG_BYTE (value));
5847 #endif
5849 return value;
5852 /* Subroutine of expand_expr: return nonzero iff there is no way that
5853 EXP can reference X, which is being modified. TOP_P is nonzero if this
5854 call is going to be used to determine whether we need a temporary
5855 for EXP, as opposed to a recursive call to this function.
5857 It is always safe for this routine to return zero since it merely
5858 searches for optimization opportunities. */
5861 safe_from_p (x, exp, top_p)
5862 rtx x;
5863 tree exp;
5864 int top_p;
5866 rtx exp_rtl = 0;
5867 int i, nops;
5868 static tree save_expr_list;
5870 if (x == 0
5871 /* If EXP has varying size, we MUST use a target since we currently
5872 have no way of allocating temporaries of variable size
5873 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5874 So we assume here that something at a higher level has prevented a
5875 clash. This is somewhat bogus, but the best we can do. Only
5876 do this when X is BLKmode and when we are at the top level. */
5877 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5878 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5879 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5880 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5881 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5882 != INTEGER_CST)
5883 && GET_MODE (x) == BLKmode)
5884 /* If X is in the outgoing argument area, it is always safe. */
5885 || (GET_CODE (x) == MEM
5886 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5887 || (GET_CODE (XEXP (x, 0)) == PLUS
5888 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5889 return 1;
5891 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5892 find the underlying pseudo. */
5893 if (GET_CODE (x) == SUBREG)
5895 x = SUBREG_REG (x);
5896 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5897 return 0;
5900 /* A SAVE_EXPR might appear many times in the expression passed to the
5901 top-level safe_from_p call, and if it has a complex subexpression,
5902 examining it multiple times could result in a combinatorial explosion.
5903 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5904 with optimization took about 28 minutes to compile -- even though it was
5905 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5906 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5907 we have processed. Note that the only test of top_p was above. */
5909 if (top_p)
5911 int rtn;
5912 tree t;
5914 save_expr_list = 0;
5916 rtn = safe_from_p (x, exp, 0);
5918 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5919 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5921 return rtn;
5924 /* Now look at our tree code and possibly recurse. */
5925 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5927 case 'd':
5928 exp_rtl = DECL_RTL_IF_SET (exp);
5929 break;
5931 case 'c':
5932 return 1;
5934 case 'x':
5935 if (TREE_CODE (exp) == TREE_LIST)
5936 return ((TREE_VALUE (exp) == 0
5937 || safe_from_p (x, TREE_VALUE (exp), 0))
5938 && (TREE_CHAIN (exp) == 0
5939 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5940 else if (TREE_CODE (exp) == ERROR_MARK)
5941 return 1; /* An already-visited SAVE_EXPR? */
5942 else
5943 return 0;
5945 case '1':
5946 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5948 case '2':
5949 case '<':
5950 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5951 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5953 case 'e':
5954 case 'r':
5955 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5956 the expression. If it is set, we conflict iff we are that rtx or
5957 both are in memory. Otherwise, we check all operands of the
5958 expression recursively. */
5960 switch (TREE_CODE (exp))
5962 case ADDR_EXPR:
5963 /* If the operand is static or we are static, we can't conflict.
5964 Likewise if we don't conflict with the operand at all. */
5965 if (staticp (TREE_OPERAND (exp, 0))
5966 || TREE_STATIC (exp)
5967 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5968 return 1;
5970 /* Otherwise, the only way this can conflict is if we are taking
5971 the address of a DECL a that address if part of X, which is
5972 very rare. */
5973 exp = TREE_OPERAND (exp, 0);
5974 if (DECL_P (exp))
5976 if (!DECL_RTL_SET_P (exp)
5977 || GET_CODE (DECL_RTL (exp)) != MEM)
5978 return 0;
5979 else
5980 exp_rtl = XEXP (DECL_RTL (exp), 0);
5982 break;
5984 case INDIRECT_REF:
5985 if (GET_CODE (x) == MEM
5986 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5987 get_alias_set (exp)))
5988 return 0;
5989 break;
5991 case CALL_EXPR:
5992 /* Assume that the call will clobber all hard registers and
5993 all of memory. */
5994 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5995 || GET_CODE (x) == MEM)
5996 return 0;
5997 break;
5999 case RTL_EXPR:
6000 /* If a sequence exists, we would have to scan every instruction
6001 in the sequence to see if it was safe. This is probably not
6002 worthwhile. */
6003 if (RTL_EXPR_SEQUENCE (exp))
6004 return 0;
6006 exp_rtl = RTL_EXPR_RTL (exp);
6007 break;
6009 case WITH_CLEANUP_EXPR:
6010 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6011 break;
6013 case CLEANUP_POINT_EXPR:
6014 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6016 case SAVE_EXPR:
6017 exp_rtl = SAVE_EXPR_RTL (exp);
6018 if (exp_rtl)
6019 break;
6021 /* If we've already scanned this, don't do it again. Otherwise,
6022 show we've scanned it and record for clearing the flag if we're
6023 going on. */
6024 if (TREE_PRIVATE (exp))
6025 return 1;
6027 TREE_PRIVATE (exp) = 1;
6028 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6030 TREE_PRIVATE (exp) = 0;
6031 return 0;
6034 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6035 return 1;
6037 case BIND_EXPR:
6038 /* The only operand we look at is operand 1. The rest aren't
6039 part of the expression. */
6040 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6042 case METHOD_CALL_EXPR:
6043 /* This takes an rtx argument, but shouldn't appear here. */
6044 abort ();
6046 default:
6047 break;
6050 /* If we have an rtx, we do not need to scan our operands. */
6051 if (exp_rtl)
6052 break;
6054 nops = first_rtl_op (TREE_CODE (exp));
6055 for (i = 0; i < nops; i++)
6056 if (TREE_OPERAND (exp, i) != 0
6057 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6058 return 0;
6060 /* If this is a language-specific tree code, it may require
6061 special handling. */
6062 if ((unsigned int) TREE_CODE (exp)
6063 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6064 && !(*lang_hooks.safe_from_p) (x, exp))
6065 return 0;
6068 /* If we have an rtl, find any enclosed object. Then see if we conflict
6069 with it. */
6070 if (exp_rtl)
6072 if (GET_CODE (exp_rtl) == SUBREG)
6074 exp_rtl = SUBREG_REG (exp_rtl);
6075 if (GET_CODE (exp_rtl) == REG
6076 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6077 return 0;
6080 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6081 are memory and they conflict. */
6082 return ! (rtx_equal_p (x, exp_rtl)
6083 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6084 && true_dependence (exp_rtl, VOIDmode, x,
6085 rtx_addr_varies_p)));
6088 /* If we reach here, it is safe. */
6089 return 1;
6092 /* Subroutine of expand_expr: return rtx if EXP is a
6093 variable or parameter; else return 0. */
6095 static rtx
6096 var_rtx (exp)
6097 tree exp;
6099 STRIP_NOPS (exp);
6100 switch (TREE_CODE (exp))
6102 case PARM_DECL:
6103 case VAR_DECL:
6104 return DECL_RTL (exp);
6105 default:
6106 return 0;
6110 #ifdef MAX_INTEGER_COMPUTATION_MODE
6112 void
6113 check_max_integer_computation_mode (exp)
6114 tree exp;
6116 enum tree_code code;
6117 enum machine_mode mode;
6119 /* Strip any NOPs that don't change the mode. */
6120 STRIP_NOPS (exp);
6121 code = TREE_CODE (exp);
6123 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6124 if (code == NOP_EXPR
6125 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6126 return;
6128 /* First check the type of the overall operation. We need only look at
6129 unary, binary and relational operations. */
6130 if (TREE_CODE_CLASS (code) == '1'
6131 || TREE_CODE_CLASS (code) == '2'
6132 || TREE_CODE_CLASS (code) == '<')
6134 mode = TYPE_MODE (TREE_TYPE (exp));
6135 if (GET_MODE_CLASS (mode) == MODE_INT
6136 && mode > MAX_INTEGER_COMPUTATION_MODE)
6137 internal_error ("unsupported wide integer operation");
6140 /* Check operand of a unary op. */
6141 if (TREE_CODE_CLASS (code) == '1')
6143 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6144 if (GET_MODE_CLASS (mode) == MODE_INT
6145 && mode > MAX_INTEGER_COMPUTATION_MODE)
6146 internal_error ("unsupported wide integer operation");
6149 /* Check operands of a binary/comparison op. */
6150 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6152 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6153 if (GET_MODE_CLASS (mode) == MODE_INT
6154 && mode > MAX_INTEGER_COMPUTATION_MODE)
6155 internal_error ("unsupported wide integer operation");
6157 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6158 if (GET_MODE_CLASS (mode) == MODE_INT
6159 && mode > MAX_INTEGER_COMPUTATION_MODE)
6160 internal_error ("unsupported wide integer operation");
6163 #endif
6165 /* Return the highest power of two that EXP is known to be a multiple of.
6166 This is used in updating alignment of MEMs in array references. */
6168 static HOST_WIDE_INT
6169 highest_pow2_factor (exp)
6170 tree exp;
6172 HOST_WIDE_INT c0, c1;
6174 switch (TREE_CODE (exp))
6176 case INTEGER_CST:
6177 /* We can find the lowest bit that's a one. If the low
6178 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6179 We need to handle this case since we can find it in a COND_EXPR,
6180 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6181 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6182 later ICE. */
6183 if (TREE_CONSTANT_OVERFLOW (exp))
6184 return BIGGEST_ALIGNMENT;
6185 else
6187 /* Note: tree_low_cst is intentionally not used here,
6188 we don't care about the upper bits. */
6189 c0 = TREE_INT_CST_LOW (exp);
6190 c0 &= -c0;
6191 return c0 ? c0 : BIGGEST_ALIGNMENT;
6193 break;
6195 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6196 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6197 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6198 return MIN (c0, c1);
6200 case MULT_EXPR:
6201 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6202 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6203 return c0 * c1;
6205 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6206 case CEIL_DIV_EXPR:
6207 if (integer_pow2p (TREE_OPERAND (exp, 1))
6208 && host_integerp (TREE_OPERAND (exp, 1), 1))
6210 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6211 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6212 return MAX (1, c0 / c1);
6214 break;
6216 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6217 case SAVE_EXPR: case WITH_RECORD_EXPR:
6218 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6220 case COMPOUND_EXPR:
6221 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6223 case COND_EXPR:
6224 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6225 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6226 return MIN (c0, c1);
6228 default:
6229 break;
6232 return 1;
6235 /* Similar, except that it is known that the expression must be a multiple
6236 of the alignment of TYPE. */
6238 static HOST_WIDE_INT
6239 highest_pow2_factor_for_type (type, exp)
6240 tree type;
6241 tree exp;
6243 HOST_WIDE_INT type_align, factor;
6245 factor = highest_pow2_factor (exp);
6246 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6247 return MAX (factor, type_align);
6250 /* Return an object on the placeholder list that matches EXP, a
6251 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6252 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6253 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6254 is a location which initially points to a starting location in the
6255 placeholder list (zero means start of the list) and where a pointer into
6256 the placeholder list at which the object is found is placed. */
6258 tree
6259 find_placeholder (exp, plist)
6260 tree exp;
6261 tree *plist;
6263 tree type = TREE_TYPE (exp);
6264 tree placeholder_expr;
6266 for (placeholder_expr
6267 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6268 placeholder_expr != 0;
6269 placeholder_expr = TREE_CHAIN (placeholder_expr))
6271 tree need_type = TYPE_MAIN_VARIANT (type);
6272 tree elt;
6274 /* Find the outermost reference that is of the type we want. If none,
6275 see if any object has a type that is a pointer to the type we
6276 want. */
6277 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6278 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6279 || TREE_CODE (elt) == COND_EXPR)
6280 ? TREE_OPERAND (elt, 1)
6281 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6282 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6283 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6284 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6285 ? TREE_OPERAND (elt, 0) : 0))
6286 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6288 if (plist)
6289 *plist = placeholder_expr;
6290 return elt;
6293 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6295 = ((TREE_CODE (elt) == COMPOUND_EXPR
6296 || TREE_CODE (elt) == COND_EXPR)
6297 ? TREE_OPERAND (elt, 1)
6298 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6299 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6300 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6301 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6302 ? TREE_OPERAND (elt, 0) : 0))
6303 if (POINTER_TYPE_P (TREE_TYPE (elt))
6304 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6305 == need_type))
6307 if (plist)
6308 *plist = placeholder_expr;
6309 return build1 (INDIRECT_REF, need_type, elt);
6313 return 0;
6316 /* expand_expr: generate code for computing expression EXP.
6317 An rtx for the computed value is returned. The value is never null.
6318 In the case of a void EXP, const0_rtx is returned.
6320 The value may be stored in TARGET if TARGET is nonzero.
6321 TARGET is just a suggestion; callers must assume that
6322 the rtx returned may not be the same as TARGET.
6324 If TARGET is CONST0_RTX, it means that the value will be ignored.
6326 If TMODE is not VOIDmode, it suggests generating the
6327 result in mode TMODE. But this is done only when convenient.
6328 Otherwise, TMODE is ignored and the value generated in its natural mode.
6329 TMODE is just a suggestion; callers must assume that
6330 the rtx returned may not have mode TMODE.
6332 Note that TARGET may have neither TMODE nor MODE. In that case, it
6333 probably will not be used.
6335 If MODIFIER is EXPAND_SUM then when EXP is an addition
6336 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6337 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6338 products as above, or REG or MEM, or constant.
6339 Ordinarily in such cases we would output mul or add instructions
6340 and then return a pseudo reg containing the sum.
6342 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6343 it also marks a label as absolutely required (it can't be dead).
6344 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6345 This is used for outputting expressions used in initializers.
6347 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6348 with a constant address even if that address is not normally legitimate.
6349 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6352 expand_expr (exp, target, tmode, modifier)
6353 tree exp;
6354 rtx target;
6355 enum machine_mode tmode;
6356 enum expand_modifier modifier;
6358 rtx op0, op1, temp;
6359 tree type = TREE_TYPE (exp);
6360 int unsignedp = TREE_UNSIGNED (type);
6361 enum machine_mode mode;
6362 enum tree_code code = TREE_CODE (exp);
6363 optab this_optab;
6364 rtx subtarget, original_target;
6365 int ignore;
6366 tree context;
6368 /* Handle ERROR_MARK before anybody tries to access its type. */
6369 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6371 op0 = CONST0_RTX (tmode);
6372 if (op0 != 0)
6373 return op0;
6374 return const0_rtx;
6377 mode = TYPE_MODE (type);
6378 /* Use subtarget as the target for operand 0 of a binary operation. */
6379 subtarget = get_subtarget (target);
6380 original_target = target;
6381 ignore = (target == const0_rtx
6382 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6383 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6384 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6385 && TREE_CODE (type) == VOID_TYPE));
6387 /* If we are going to ignore this result, we need only do something
6388 if there is a side-effect somewhere in the expression. If there
6389 is, short-circuit the most common cases here. Note that we must
6390 not call expand_expr with anything but const0_rtx in case this
6391 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6393 if (ignore)
6395 if (! TREE_SIDE_EFFECTS (exp))
6396 return const0_rtx;
6398 /* Ensure we reference a volatile object even if value is ignored, but
6399 don't do this if all we are doing is taking its address. */
6400 if (TREE_THIS_VOLATILE (exp)
6401 && TREE_CODE (exp) != FUNCTION_DECL
6402 && mode != VOIDmode && mode != BLKmode
6403 && modifier != EXPAND_CONST_ADDRESS)
6405 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6406 if (GET_CODE (temp) == MEM)
6407 temp = copy_to_reg (temp);
6408 return const0_rtx;
6411 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6412 || code == INDIRECT_REF || code == BUFFER_REF)
6413 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6414 modifier);
6416 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6417 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6419 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6420 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6421 return const0_rtx;
6423 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6424 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6425 /* If the second operand has no side effects, just evaluate
6426 the first. */
6427 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6428 modifier);
6429 else if (code == BIT_FIELD_REF)
6431 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6432 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6433 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6434 return const0_rtx;
6437 target = 0;
6440 #ifdef MAX_INTEGER_COMPUTATION_MODE
6441 /* Only check stuff here if the mode we want is different from the mode
6442 of the expression; if it's the same, check_max_integer_computiation_mode
6443 will handle it. Do we really need to check this stuff at all? */
6445 if (target
6446 && GET_MODE (target) != mode
6447 && TREE_CODE (exp) != INTEGER_CST
6448 && TREE_CODE (exp) != PARM_DECL
6449 && TREE_CODE (exp) != ARRAY_REF
6450 && TREE_CODE (exp) != ARRAY_RANGE_REF
6451 && TREE_CODE (exp) != COMPONENT_REF
6452 && TREE_CODE (exp) != BIT_FIELD_REF
6453 && TREE_CODE (exp) != INDIRECT_REF
6454 && TREE_CODE (exp) != CALL_EXPR
6455 && TREE_CODE (exp) != VAR_DECL
6456 && TREE_CODE (exp) != RTL_EXPR)
6458 enum machine_mode mode = GET_MODE (target);
6460 if (GET_MODE_CLASS (mode) == MODE_INT
6461 && mode > MAX_INTEGER_COMPUTATION_MODE)
6462 internal_error ("unsupported wide integer operation");
6465 if (tmode != mode
6466 && TREE_CODE (exp) != INTEGER_CST
6467 && TREE_CODE (exp) != PARM_DECL
6468 && TREE_CODE (exp) != ARRAY_REF
6469 && TREE_CODE (exp) != ARRAY_RANGE_REF
6470 && TREE_CODE (exp) != COMPONENT_REF
6471 && TREE_CODE (exp) != BIT_FIELD_REF
6472 && TREE_CODE (exp) != INDIRECT_REF
6473 && TREE_CODE (exp) != VAR_DECL
6474 && TREE_CODE (exp) != CALL_EXPR
6475 && TREE_CODE (exp) != RTL_EXPR
6476 && GET_MODE_CLASS (tmode) == MODE_INT
6477 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6478 internal_error ("unsupported wide integer operation");
6480 check_max_integer_computation_mode (exp);
6481 #endif
6483 /* If will do cse, generate all results into pseudo registers
6484 since 1) that allows cse to find more things
6485 and 2) otherwise cse could produce an insn the machine
6486 cannot support. And exception is a CONSTRUCTOR into a multi-word
6487 MEM: that's much more likely to be most efficient into the MEM. */
6489 if (! cse_not_expected && mode != BLKmode && target
6490 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6491 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6492 target = subtarget;
6494 switch (code)
6496 case LABEL_DECL:
6498 tree function = decl_function_context (exp);
6499 /* Handle using a label in a containing function. */
6500 if (function != current_function_decl
6501 && function != inline_function_decl && function != 0)
6503 struct function *p = find_function_data (function);
6504 p->expr->x_forced_labels
6505 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6506 p->expr->x_forced_labels);
6508 else
6510 if (modifier == EXPAND_INITIALIZER)
6511 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6512 label_rtx (exp),
6513 forced_labels);
6516 temp = gen_rtx_MEM (FUNCTION_MODE,
6517 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6518 if (function != current_function_decl
6519 && function != inline_function_decl && function != 0)
6520 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6521 return temp;
6524 case PARM_DECL:
6525 if (DECL_RTL (exp) == 0)
6527 error_with_decl (exp, "prior parameter's size depends on `%s'");
6528 return CONST0_RTX (mode);
6531 /* ... fall through ... */
6533 case VAR_DECL:
6534 /* If a static var's type was incomplete when the decl was written,
6535 but the type is complete now, lay out the decl now. */
6536 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6537 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6539 rtx value = DECL_RTL_IF_SET (exp);
6541 layout_decl (exp, 0);
6543 /* If the RTL was already set, update its mode and memory
6544 attributes. */
6545 if (value != 0)
6547 PUT_MODE (value, DECL_MODE (exp));
6548 SET_DECL_RTL (exp, 0);
6549 set_mem_attributes (value, exp, 1);
6550 SET_DECL_RTL (exp, value);
6554 /* ... fall through ... */
6556 case FUNCTION_DECL:
6557 case RESULT_DECL:
6558 if (DECL_RTL (exp) == 0)
6559 abort ();
6561 /* Ensure variable marked as used even if it doesn't go through
6562 a parser. If it hasn't be used yet, write out an external
6563 definition. */
6564 if (! TREE_USED (exp))
6566 assemble_external (exp);
6567 TREE_USED (exp) = 1;
6570 /* Show we haven't gotten RTL for this yet. */
6571 temp = 0;
6573 /* Handle variables inherited from containing functions. */
6574 context = decl_function_context (exp);
6576 /* We treat inline_function_decl as an alias for the current function
6577 because that is the inline function whose vars, types, etc.
6578 are being merged into the current function.
6579 See expand_inline_function. */
6581 if (context != 0 && context != current_function_decl
6582 && context != inline_function_decl
6583 /* If var is static, we don't need a static chain to access it. */
6584 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6585 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6587 rtx addr;
6589 /* Mark as non-local and addressable. */
6590 DECL_NONLOCAL (exp) = 1;
6591 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6592 abort ();
6593 (*lang_hooks.mark_addressable) (exp);
6594 if (GET_CODE (DECL_RTL (exp)) != MEM)
6595 abort ();
6596 addr = XEXP (DECL_RTL (exp), 0);
6597 if (GET_CODE (addr) == MEM)
6598 addr
6599 = replace_equiv_address (addr,
6600 fix_lexical_addr (XEXP (addr, 0), exp));
6601 else
6602 addr = fix_lexical_addr (addr, exp);
6604 temp = replace_equiv_address (DECL_RTL (exp), addr);
6607 /* This is the case of an array whose size is to be determined
6608 from its initializer, while the initializer is still being parsed.
6609 See expand_decl. */
6611 else if (GET_CODE (DECL_RTL (exp)) == MEM
6612 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6613 temp = validize_mem (DECL_RTL (exp));
6615 /* If DECL_RTL is memory, we are in the normal case and either
6616 the address is not valid or it is not a register and -fforce-addr
6617 is specified, get the address into a register. */
6619 else if (GET_CODE (DECL_RTL (exp)) == MEM
6620 && modifier != EXPAND_CONST_ADDRESS
6621 && modifier != EXPAND_SUM
6622 && modifier != EXPAND_INITIALIZER
6623 && (! memory_address_p (DECL_MODE (exp),
6624 XEXP (DECL_RTL (exp), 0))
6625 || (flag_force_addr
6626 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6627 temp = replace_equiv_address (DECL_RTL (exp),
6628 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6630 /* If we got something, return it. But first, set the alignment
6631 if the address is a register. */
6632 if (temp != 0)
6634 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6635 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6637 return temp;
6640 /* If the mode of DECL_RTL does not match that of the decl, it
6641 must be a promoted value. We return a SUBREG of the wanted mode,
6642 but mark it so that we know that it was already extended. */
6644 if (GET_CODE (DECL_RTL (exp)) == REG
6645 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6647 /* Get the signedness used for this variable. Ensure we get the
6648 same mode we got when the variable was declared. */
6649 if (GET_MODE (DECL_RTL (exp))
6650 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6651 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6652 abort ();
6654 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6655 SUBREG_PROMOTED_VAR_P (temp) = 1;
6656 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6657 return temp;
6660 return DECL_RTL (exp);
6662 case INTEGER_CST:
6663 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6664 TREE_INT_CST_HIGH (exp), mode);
6666 /* ??? If overflow is set, fold will have done an incomplete job,
6667 which can result in (plus xx (const_int 0)), which can get
6668 simplified by validate_replace_rtx during virtual register
6669 instantiation, which can result in unrecognizable insns.
6670 Avoid this by forcing all overflows into registers. */
6671 if (TREE_CONSTANT_OVERFLOW (exp)
6672 && modifier != EXPAND_INITIALIZER)
6673 temp = force_reg (mode, temp);
6675 return temp;
6677 case CONST_DECL:
6678 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6680 case REAL_CST:
6681 /* If optimized, generate immediate CONST_DOUBLE
6682 which will be turned into memory by reload if necessary.
6684 We used to force a register so that loop.c could see it. But
6685 this does not allow gen_* patterns to perform optimizations with
6686 the constants. It also produces two insns in cases like "x = 1.0;".
6687 On most machines, floating-point constants are not permitted in
6688 many insns, so we'd end up copying it to a register in any case.
6690 Now, we do the copying in expand_binop, if appropriate. */
6691 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6692 TYPE_MODE (TREE_TYPE (exp)));
6694 case COMPLEX_CST:
6695 case STRING_CST:
6696 if (! TREE_CST_RTL (exp))
6697 output_constant_def (exp, 1);
6699 /* TREE_CST_RTL probably contains a constant address.
6700 On RISC machines where a constant address isn't valid,
6701 make some insns to get that address into a register. */
6702 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6703 && modifier != EXPAND_CONST_ADDRESS
6704 && modifier != EXPAND_INITIALIZER
6705 && modifier != EXPAND_SUM
6706 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6707 || (flag_force_addr
6708 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6709 return replace_equiv_address (TREE_CST_RTL (exp),
6710 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6711 return TREE_CST_RTL (exp);
6713 case EXPR_WITH_FILE_LOCATION:
6715 rtx to_return;
6716 const char *saved_input_filename = input_filename;
6717 int saved_lineno = lineno;
6718 input_filename = EXPR_WFL_FILENAME (exp);
6719 lineno = EXPR_WFL_LINENO (exp);
6720 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6721 emit_line_note (input_filename, lineno);
6722 /* Possibly avoid switching back and forth here. */
6723 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6724 input_filename = saved_input_filename;
6725 lineno = saved_lineno;
6726 return to_return;
6729 case SAVE_EXPR:
6730 context = decl_function_context (exp);
6732 /* If this SAVE_EXPR was at global context, assume we are an
6733 initialization function and move it into our context. */
6734 if (context == 0)
6735 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6737 /* We treat inline_function_decl as an alias for the current function
6738 because that is the inline function whose vars, types, etc.
6739 are being merged into the current function.
6740 See expand_inline_function. */
6741 if (context == current_function_decl || context == inline_function_decl)
6742 context = 0;
6744 /* If this is non-local, handle it. */
6745 if (context)
6747 /* The following call just exists to abort if the context is
6748 not of a containing function. */
6749 find_function_data (context);
6751 temp = SAVE_EXPR_RTL (exp);
6752 if (temp && GET_CODE (temp) == REG)
6754 put_var_into_stack (exp);
6755 temp = SAVE_EXPR_RTL (exp);
6757 if (temp == 0 || GET_CODE (temp) != MEM)
6758 abort ();
6759 return
6760 replace_equiv_address (temp,
6761 fix_lexical_addr (XEXP (temp, 0), exp));
6763 if (SAVE_EXPR_RTL (exp) == 0)
6765 if (mode == VOIDmode)
6766 temp = const0_rtx;
6767 else
6768 temp = assign_temp (build_qualified_type (type,
6769 (TYPE_QUALS (type)
6770 | TYPE_QUAL_CONST)),
6771 3, 0, 0);
6773 SAVE_EXPR_RTL (exp) = temp;
6774 if (!optimize && GET_CODE (temp) == REG)
6775 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6776 save_expr_regs);
6778 /* If the mode of TEMP does not match that of the expression, it
6779 must be a promoted value. We pass store_expr a SUBREG of the
6780 wanted mode but mark it so that we know that it was already
6781 extended. Note that `unsignedp' was modified above in
6782 this case. */
6784 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6786 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6787 SUBREG_PROMOTED_VAR_P (temp) = 1;
6788 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6791 if (temp == const0_rtx)
6792 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6793 else
6794 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6796 TREE_USED (exp) = 1;
6799 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6800 must be a promoted value. We return a SUBREG of the wanted mode,
6801 but mark it so that we know that it was already extended. */
6803 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6804 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6806 /* Compute the signedness and make the proper SUBREG. */
6807 promote_mode (type, mode, &unsignedp, 0);
6808 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6809 SUBREG_PROMOTED_VAR_P (temp) = 1;
6810 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6811 return temp;
6814 return SAVE_EXPR_RTL (exp);
6816 case UNSAVE_EXPR:
6818 rtx temp;
6819 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6820 TREE_OPERAND (exp, 0)
6821 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6822 return temp;
6825 case PLACEHOLDER_EXPR:
6827 tree old_list = placeholder_list;
6828 tree placeholder_expr = 0;
6830 exp = find_placeholder (exp, &placeholder_expr);
6831 if (exp == 0)
6832 abort ();
6834 placeholder_list = TREE_CHAIN (placeholder_expr);
6835 temp = expand_expr (exp, original_target, tmode, modifier);
6836 placeholder_list = old_list;
6837 return temp;
6840 case WITH_RECORD_EXPR:
6841 /* Put the object on the placeholder list, expand our first operand,
6842 and pop the list. */
6843 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6844 placeholder_list);
6845 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6846 modifier);
6847 placeholder_list = TREE_CHAIN (placeholder_list);
6848 return target;
6850 case GOTO_EXPR:
6851 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6852 expand_goto (TREE_OPERAND (exp, 0));
6853 else
6854 expand_computed_goto (TREE_OPERAND (exp, 0));
6855 return const0_rtx;
6857 case EXIT_EXPR:
6858 expand_exit_loop_if_false (NULL,
6859 invert_truthvalue (TREE_OPERAND (exp, 0)));
6860 return const0_rtx;
6862 case LABELED_BLOCK_EXPR:
6863 if (LABELED_BLOCK_BODY (exp))
6864 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6865 /* Should perhaps use expand_label, but this is simpler and safer. */
6866 do_pending_stack_adjust ();
6867 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6868 return const0_rtx;
6870 case EXIT_BLOCK_EXPR:
6871 if (EXIT_BLOCK_RETURN (exp))
6872 sorry ("returned value in block_exit_expr");
6873 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6874 return const0_rtx;
6876 case LOOP_EXPR:
6877 push_temp_slots ();
6878 expand_start_loop (1);
6879 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6880 expand_end_loop ();
6881 pop_temp_slots ();
6883 return const0_rtx;
6885 case BIND_EXPR:
6887 tree vars = TREE_OPERAND (exp, 0);
6888 int vars_need_expansion = 0;
6890 /* Need to open a binding contour here because
6891 if there are any cleanups they must be contained here. */
6892 expand_start_bindings (2);
6894 /* Mark the corresponding BLOCK for output in its proper place. */
6895 if (TREE_OPERAND (exp, 2) != 0
6896 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6897 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6899 /* If VARS have not yet been expanded, expand them now. */
6900 while (vars)
6902 if (!DECL_RTL_SET_P (vars))
6904 vars_need_expansion = 1;
6905 expand_decl (vars);
6907 expand_decl_init (vars);
6908 vars = TREE_CHAIN (vars);
6911 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6913 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6915 return temp;
6918 case RTL_EXPR:
6919 if (RTL_EXPR_SEQUENCE (exp))
6921 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6922 abort ();
6923 emit_insn (RTL_EXPR_SEQUENCE (exp));
6924 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6926 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6927 free_temps_for_rtl_expr (exp);
6928 return RTL_EXPR_RTL (exp);
6930 case CONSTRUCTOR:
6931 /* If we don't need the result, just ensure we evaluate any
6932 subexpressions. */
6933 if (ignore)
6935 tree elt;
6937 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6938 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6940 return const0_rtx;
6943 /* All elts simple constants => refer to a constant in memory. But
6944 if this is a non-BLKmode mode, let it store a field at a time
6945 since that should make a CONST_INT or CONST_DOUBLE when we
6946 fold. Likewise, if we have a target we can use, it is best to
6947 store directly into the target unless the type is large enough
6948 that memcpy will be used. If we are making an initializer and
6949 all operands are constant, put it in memory as well.
6951 FIXME: Avoid trying to fill vector constructors piece-meal.
6952 Output them with output_constant_def below unless we're sure
6953 they're zeros. This should go away when vector initializers
6954 are treated like VECTOR_CST instead of arrays.
6956 else if ((TREE_STATIC (exp)
6957 && ((mode == BLKmode
6958 && ! (target != 0 && safe_from_p (target, exp, 1)))
6959 || TREE_ADDRESSABLE (exp)
6960 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6961 && (! MOVE_BY_PIECES_P
6962 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6963 TYPE_ALIGN (type)))
6964 && ((TREE_CODE (type) == VECTOR_TYPE
6965 && !is_zeros_p (exp))
6966 || ! mostly_zeros_p (exp)))))
6967 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6969 rtx constructor = output_constant_def (exp, 1);
6971 if (modifier != EXPAND_CONST_ADDRESS
6972 && modifier != EXPAND_INITIALIZER
6973 && modifier != EXPAND_SUM)
6974 constructor = validize_mem (constructor);
6976 return constructor;
6978 else
6980 /* Handle calls that pass values in multiple non-contiguous
6981 locations. The Irix 6 ABI has examples of this. */
6982 if (target == 0 || ! safe_from_p (target, exp, 1)
6983 || GET_CODE (target) == PARALLEL)
6984 target
6985 = assign_temp (build_qualified_type (type,
6986 (TYPE_QUALS (type)
6987 | (TREE_READONLY (exp)
6988 * TYPE_QUAL_CONST))),
6989 0, TREE_ADDRESSABLE (exp), 1);
6991 store_constructor (exp, target, 0, int_expr_size (exp));
6992 return target;
6995 case INDIRECT_REF:
6997 tree exp1 = TREE_OPERAND (exp, 0);
6998 tree index;
6999 tree string = string_constant (exp1, &index);
7001 /* Try to optimize reads from const strings. */
7002 if (string
7003 && TREE_CODE (string) == STRING_CST
7004 && TREE_CODE (index) == INTEGER_CST
7005 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7006 && GET_MODE_CLASS (mode) == MODE_INT
7007 && GET_MODE_SIZE (mode) == 1
7008 && modifier != EXPAND_WRITE)
7009 return gen_int_mode (TREE_STRING_POINTER (string)
7010 [TREE_INT_CST_LOW (index)], mode);
7012 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7013 op0 = memory_address (mode, op0);
7014 temp = gen_rtx_MEM (mode, op0);
7015 set_mem_attributes (temp, exp, 0);
7017 /* If we are writing to this object and its type is a record with
7018 readonly fields, we must mark it as readonly so it will
7019 conflict with readonly references to those fields. */
7020 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7021 RTX_UNCHANGING_P (temp) = 1;
7023 return temp;
7026 case ARRAY_REF:
7027 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7028 abort ();
7031 tree array = TREE_OPERAND (exp, 0);
7032 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7033 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7034 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7035 HOST_WIDE_INT i;
7037 /* Optimize the special-case of a zero lower bound.
7039 We convert the low_bound to sizetype to avoid some problems
7040 with constant folding. (E.g. suppose the lower bound is 1,
7041 and its mode is QI. Without the conversion, (ARRAY
7042 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7043 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7045 if (! integer_zerop (low_bound))
7046 index = size_diffop (index, convert (sizetype, low_bound));
7048 /* Fold an expression like: "foo"[2].
7049 This is not done in fold so it won't happen inside &.
7050 Don't fold if this is for wide characters since it's too
7051 difficult to do correctly and this is a very rare case. */
7053 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7054 && TREE_CODE (array) == STRING_CST
7055 && TREE_CODE (index) == INTEGER_CST
7056 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7057 && GET_MODE_CLASS (mode) == MODE_INT
7058 && GET_MODE_SIZE (mode) == 1)
7059 return gen_int_mode (TREE_STRING_POINTER (array)
7060 [TREE_INT_CST_LOW (index)], mode);
7062 /* If this is a constant index into a constant array,
7063 just get the value from the array. Handle both the cases when
7064 we have an explicit constructor and when our operand is a variable
7065 that was declared const. */
7067 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7068 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7069 && TREE_CODE (index) == INTEGER_CST
7070 && 0 > compare_tree_int (index,
7071 list_length (CONSTRUCTOR_ELTS
7072 (TREE_OPERAND (exp, 0)))))
7074 tree elem;
7076 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7077 i = TREE_INT_CST_LOW (index);
7078 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7081 if (elem)
7082 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7083 modifier);
7086 else if (optimize >= 1
7087 && modifier != EXPAND_CONST_ADDRESS
7088 && modifier != EXPAND_INITIALIZER
7089 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7090 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7091 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7093 if (TREE_CODE (index) == INTEGER_CST)
7095 tree init = DECL_INITIAL (array);
7097 if (TREE_CODE (init) == CONSTRUCTOR)
7099 tree elem;
7101 for (elem = CONSTRUCTOR_ELTS (init);
7102 (elem
7103 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7104 elem = TREE_CHAIN (elem))
7107 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7108 return expand_expr (fold (TREE_VALUE (elem)), target,
7109 tmode, modifier);
7111 else if (TREE_CODE (init) == STRING_CST
7112 && 0 > compare_tree_int (index,
7113 TREE_STRING_LENGTH (init)))
7115 tree type = TREE_TYPE (TREE_TYPE (init));
7116 enum machine_mode mode = TYPE_MODE (type);
7118 if (GET_MODE_CLASS (mode) == MODE_INT
7119 && GET_MODE_SIZE (mode) == 1)
7120 return gen_int_mode (TREE_STRING_POINTER (init)
7121 [TREE_INT_CST_LOW (index)], mode);
7126 /* Fall through. */
7128 case COMPONENT_REF:
7129 case BIT_FIELD_REF:
7130 case ARRAY_RANGE_REF:
7131 /* If the operand is a CONSTRUCTOR, we can just extract the
7132 appropriate field if it is present. Don't do this if we have
7133 already written the data since we want to refer to that copy
7134 and varasm.c assumes that's what we'll do. */
7135 if (code == COMPONENT_REF
7136 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7137 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7139 tree elt;
7141 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7142 elt = TREE_CHAIN (elt))
7143 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7144 /* We can normally use the value of the field in the
7145 CONSTRUCTOR. However, if this is a bitfield in
7146 an integral mode that we can fit in a HOST_WIDE_INT,
7147 we must mask only the number of bits in the bitfield,
7148 since this is done implicitly by the constructor. If
7149 the bitfield does not meet either of those conditions,
7150 we can't do this optimization. */
7151 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7152 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7153 == MODE_INT)
7154 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7155 <= HOST_BITS_PER_WIDE_INT))))
7157 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7158 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7160 HOST_WIDE_INT bitsize
7161 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7162 enum machine_mode imode
7163 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7165 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7167 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7168 op0 = expand_and (imode, op0, op1, target);
7170 else
7172 tree count
7173 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7176 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7177 target, 0);
7178 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7179 target, 0);
7183 return op0;
7188 enum machine_mode mode1;
7189 HOST_WIDE_INT bitsize, bitpos;
7190 tree offset;
7191 int volatilep = 0;
7192 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7193 &mode1, &unsignedp, &volatilep);
7194 rtx orig_op0;
7196 /* If we got back the original object, something is wrong. Perhaps
7197 we are evaluating an expression too early. In any event, don't
7198 infinitely recurse. */
7199 if (tem == exp)
7200 abort ();
7202 /* If TEM's type is a union of variable size, pass TARGET to the inner
7203 computation, since it will need a temporary and TARGET is known
7204 to have to do. This occurs in unchecked conversion in Ada. */
7206 orig_op0 = op0
7207 = expand_expr (tem,
7208 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7209 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7210 != INTEGER_CST)
7211 ? target : NULL_RTX),
7212 VOIDmode,
7213 (modifier == EXPAND_INITIALIZER
7214 || modifier == EXPAND_CONST_ADDRESS)
7215 ? modifier : EXPAND_NORMAL);
7217 /* If this is a constant, put it into a register if it is a
7218 legitimate constant and OFFSET is 0 and memory if it isn't. */
7219 if (CONSTANT_P (op0))
7221 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7222 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7223 && offset == 0)
7224 op0 = force_reg (mode, op0);
7225 else
7226 op0 = validize_mem (force_const_mem (mode, op0));
7229 if (offset != 0)
7231 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7233 /* If this object is in a register, put it into memory.
7234 This case can't occur in C, but can in Ada if we have
7235 unchecked conversion of an expression from a scalar type to
7236 an array or record type. */
7237 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7238 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7240 /* If the operand is a SAVE_EXPR, we can deal with this by
7241 forcing the SAVE_EXPR into memory. */
7242 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7244 put_var_into_stack (TREE_OPERAND (exp, 0));
7245 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7247 else
7249 tree nt
7250 = build_qualified_type (TREE_TYPE (tem),
7251 (TYPE_QUALS (TREE_TYPE (tem))
7252 | TYPE_QUAL_CONST));
7253 rtx memloc = assign_temp (nt, 1, 1, 1);
7255 emit_move_insn (memloc, op0);
7256 op0 = memloc;
7260 if (GET_CODE (op0) != MEM)
7261 abort ();
7263 #ifdef POINTERS_EXTEND_UNSIGNED
7264 if (GET_MODE (offset_rtx) != Pmode)
7265 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7266 #else
7267 if (GET_MODE (offset_rtx) != ptr_mode)
7268 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7269 #endif
7271 /* A constant address in OP0 can have VOIDmode, we must not try
7272 to call force_reg for that case. Avoid that case. */
7273 if (GET_CODE (op0) == MEM
7274 && GET_MODE (op0) == BLKmode
7275 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7276 && bitsize != 0
7277 && (bitpos % bitsize) == 0
7278 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7279 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7281 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7282 bitpos = 0;
7285 op0 = offset_address (op0, offset_rtx,
7286 highest_pow2_factor (offset));
7289 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7290 record its alignment as BIGGEST_ALIGNMENT. */
7291 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7292 && is_aligning_offset (offset, tem))
7293 set_mem_align (op0, BIGGEST_ALIGNMENT);
7295 /* Don't forget about volatility even if this is a bitfield. */
7296 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7298 if (op0 == orig_op0)
7299 op0 = copy_rtx (op0);
7301 MEM_VOLATILE_P (op0) = 1;
7304 /* The following code doesn't handle CONCAT.
7305 Assume only bitpos == 0 can be used for CONCAT, due to
7306 one element arrays having the same mode as its element. */
7307 if (GET_CODE (op0) == CONCAT)
7309 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7310 abort ();
7311 return op0;
7314 /* In cases where an aligned union has an unaligned object
7315 as a field, we might be extracting a BLKmode value from
7316 an integer-mode (e.g., SImode) object. Handle this case
7317 by doing the extract into an object as wide as the field
7318 (which we know to be the width of a basic mode), then
7319 storing into memory, and changing the mode to BLKmode. */
7320 if (mode1 == VOIDmode
7321 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7322 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7323 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7324 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7325 && modifier != EXPAND_CONST_ADDRESS
7326 && modifier != EXPAND_INITIALIZER)
7327 /* If the field isn't aligned enough to fetch as a memref,
7328 fetch it as a bit field. */
7329 || (mode1 != BLKmode
7330 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7331 && ((TYPE_ALIGN (TREE_TYPE (tem))
7332 < GET_MODE_ALIGNMENT (mode))
7333 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7334 /* If the type and the field are a constant size and the
7335 size of the type isn't the same size as the bitfield,
7336 we must use bitfield operations. */
7337 || (bitsize >= 0
7338 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7339 == INTEGER_CST)
7340 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7341 bitsize)))
7343 enum machine_mode ext_mode = mode;
7345 if (ext_mode == BLKmode
7346 && ! (target != 0 && GET_CODE (op0) == MEM
7347 && GET_CODE (target) == MEM
7348 && bitpos % BITS_PER_UNIT == 0))
7349 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7351 if (ext_mode == BLKmode)
7353 /* In this case, BITPOS must start at a byte boundary and
7354 TARGET, if specified, must be a MEM. */
7355 if (GET_CODE (op0) != MEM
7356 || (target != 0 && GET_CODE (target) != MEM)
7357 || bitpos % BITS_PER_UNIT != 0)
7358 abort ();
7360 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7361 if (target == 0)
7362 target = assign_temp (type, 0, 1, 1);
7364 emit_block_move (target, op0,
7365 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7366 / BITS_PER_UNIT),
7367 BLOCK_OP_NORMAL);
7369 return target;
7372 op0 = validize_mem (op0);
7374 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7375 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7377 op0 = extract_bit_field (op0, bitsize, bitpos,
7378 unsignedp, target, ext_mode, ext_mode,
7379 int_size_in_bytes (TREE_TYPE (tem)));
7381 /* If the result is a record type and BITSIZE is narrower than
7382 the mode of OP0, an integral mode, and this is a big endian
7383 machine, we must put the field into the high-order bits. */
7384 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7385 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7386 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7387 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7388 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7389 - bitsize),
7390 op0, 1);
7392 if (mode == BLKmode)
7394 rtx new = assign_temp (build_qualified_type
7395 ((*lang_hooks.types.type_for_mode)
7396 (ext_mode, 0),
7397 TYPE_QUAL_CONST), 0, 1, 1);
7399 emit_move_insn (new, op0);
7400 op0 = copy_rtx (new);
7401 PUT_MODE (op0, BLKmode);
7402 set_mem_attributes (op0, exp, 1);
7405 return op0;
7408 /* If the result is BLKmode, use that to access the object
7409 now as well. */
7410 if (mode == BLKmode)
7411 mode1 = BLKmode;
7413 /* Get a reference to just this component. */
7414 if (modifier == EXPAND_CONST_ADDRESS
7415 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7416 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7417 else
7418 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7420 if (op0 == orig_op0)
7421 op0 = copy_rtx (op0);
7423 set_mem_attributes (op0, exp, 0);
7424 if (GET_CODE (XEXP (op0, 0)) == REG)
7425 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7427 MEM_VOLATILE_P (op0) |= volatilep;
7428 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7429 || modifier == EXPAND_CONST_ADDRESS
7430 || modifier == EXPAND_INITIALIZER)
7431 return op0;
7432 else if (target == 0)
7433 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7435 convert_move (target, op0, unsignedp);
7436 return target;
7439 case VTABLE_REF:
7441 rtx insn, before = get_last_insn (), vtbl_ref;
7443 /* Evaluate the interior expression. */
7444 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7445 tmode, modifier);
7447 /* Get or create an instruction off which to hang a note. */
7448 if (REG_P (subtarget))
7450 target = subtarget;
7451 insn = get_last_insn ();
7452 if (insn == before)
7453 abort ();
7454 if (! INSN_P (insn))
7455 insn = prev_nonnote_insn (insn);
7457 else
7459 target = gen_reg_rtx (GET_MODE (subtarget));
7460 insn = emit_move_insn (target, subtarget);
7463 /* Collect the data for the note. */
7464 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7465 vtbl_ref = plus_constant (vtbl_ref,
7466 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7467 /* Discard the initial CONST that was added. */
7468 vtbl_ref = XEXP (vtbl_ref, 0);
7470 REG_NOTES (insn)
7471 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7473 return target;
7476 /* Intended for a reference to a buffer of a file-object in Pascal.
7477 But it's not certain that a special tree code will really be
7478 necessary for these. INDIRECT_REF might work for them. */
7479 case BUFFER_REF:
7480 abort ();
7482 case IN_EXPR:
7484 /* Pascal set IN expression.
7486 Algorithm:
7487 rlo = set_low - (set_low%bits_per_word);
7488 the_word = set [ (index - rlo)/bits_per_word ];
7489 bit_index = index % bits_per_word;
7490 bitmask = 1 << bit_index;
7491 return !!(the_word & bitmask); */
7493 tree set = TREE_OPERAND (exp, 0);
7494 tree index = TREE_OPERAND (exp, 1);
7495 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7496 tree set_type = TREE_TYPE (set);
7497 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7498 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7499 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7500 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7501 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7502 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7503 rtx setaddr = XEXP (setval, 0);
7504 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7505 rtx rlow;
7506 rtx diff, quo, rem, addr, bit, result;
7508 /* If domain is empty, answer is no. Likewise if index is constant
7509 and out of bounds. */
7510 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7511 && TREE_CODE (set_low_bound) == INTEGER_CST
7512 && tree_int_cst_lt (set_high_bound, set_low_bound))
7513 || (TREE_CODE (index) == INTEGER_CST
7514 && TREE_CODE (set_low_bound) == INTEGER_CST
7515 && tree_int_cst_lt (index, set_low_bound))
7516 || (TREE_CODE (set_high_bound) == INTEGER_CST
7517 && TREE_CODE (index) == INTEGER_CST
7518 && tree_int_cst_lt (set_high_bound, index))))
7519 return const0_rtx;
7521 if (target == 0)
7522 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7524 /* If we get here, we have to generate the code for both cases
7525 (in range and out of range). */
7527 op0 = gen_label_rtx ();
7528 op1 = gen_label_rtx ();
7530 if (! (GET_CODE (index_val) == CONST_INT
7531 && GET_CODE (lo_r) == CONST_INT))
7532 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7533 GET_MODE (index_val), iunsignedp, op1);
7535 if (! (GET_CODE (index_val) == CONST_INT
7536 && GET_CODE (hi_r) == CONST_INT))
7537 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7538 GET_MODE (index_val), iunsignedp, op1);
7540 /* Calculate the element number of bit zero in the first word
7541 of the set. */
7542 if (GET_CODE (lo_r) == CONST_INT)
7543 rlow = GEN_INT (INTVAL (lo_r)
7544 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7545 else
7546 rlow = expand_binop (index_mode, and_optab, lo_r,
7547 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7548 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7550 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7551 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7553 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7554 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7555 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7556 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7558 addr = memory_address (byte_mode,
7559 expand_binop (index_mode, add_optab, diff,
7560 setaddr, NULL_RTX, iunsignedp,
7561 OPTAB_LIB_WIDEN));
7563 /* Extract the bit we want to examine. */
7564 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7565 gen_rtx_MEM (byte_mode, addr),
7566 make_tree (TREE_TYPE (index), rem),
7567 NULL_RTX, 1);
7568 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7569 GET_MODE (target) == byte_mode ? target : 0,
7570 1, OPTAB_LIB_WIDEN);
7572 if (result != target)
7573 convert_move (target, result, 1);
7575 /* Output the code to handle the out-of-range case. */
7576 emit_jump (op0);
7577 emit_label (op1);
7578 emit_move_insn (target, const0_rtx);
7579 emit_label (op0);
7580 return target;
7583 case WITH_CLEANUP_EXPR:
7584 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7586 WITH_CLEANUP_EXPR_RTL (exp)
7587 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7588 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7589 CLEANUP_EH_ONLY (exp));
7591 /* That's it for this cleanup. */
7592 TREE_OPERAND (exp, 1) = 0;
7594 return WITH_CLEANUP_EXPR_RTL (exp);
7596 case CLEANUP_POINT_EXPR:
7598 /* Start a new binding layer that will keep track of all cleanup
7599 actions to be performed. */
7600 expand_start_bindings (2);
7602 target_temp_slot_level = temp_slot_level;
7604 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7605 /* If we're going to use this value, load it up now. */
7606 if (! ignore)
7607 op0 = force_not_mem (op0);
7608 preserve_temp_slots (op0);
7609 expand_end_bindings (NULL_TREE, 0, 0);
7611 return op0;
7613 case CALL_EXPR:
7614 /* Check for a built-in function. */
7615 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7616 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7617 == FUNCTION_DECL)
7618 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7620 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7621 == BUILT_IN_FRONTEND)
7622 return (*lang_hooks.expand_expr)
7623 (exp, original_target, tmode, modifier);
7624 else
7625 return expand_builtin (exp, target, subtarget, tmode, ignore);
7628 return expand_call (exp, target, ignore);
7630 case NON_LVALUE_EXPR:
7631 case NOP_EXPR:
7632 case CONVERT_EXPR:
7633 case REFERENCE_EXPR:
7634 if (TREE_OPERAND (exp, 0) == error_mark_node)
7635 return const0_rtx;
7637 if (TREE_CODE (type) == UNION_TYPE)
7639 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7641 /* If both input and output are BLKmode, this conversion isn't doing
7642 anything except possibly changing memory attribute. */
7643 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7645 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7646 modifier);
7648 result = copy_rtx (result);
7649 set_mem_attributes (result, exp, 0);
7650 return result;
7653 if (target == 0)
7654 target = assign_temp (type, 0, 1, 1);
7656 if (GET_CODE (target) == MEM)
7657 /* Store data into beginning of memory target. */
7658 store_expr (TREE_OPERAND (exp, 0),
7659 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7661 else if (GET_CODE (target) == REG)
7662 /* Store this field into a union of the proper type. */
7663 store_field (target,
7664 MIN ((int_size_in_bytes (TREE_TYPE
7665 (TREE_OPERAND (exp, 0)))
7666 * BITS_PER_UNIT),
7667 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7668 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7669 VOIDmode, 0, type, 0);
7670 else
7671 abort ();
7673 /* Return the entire union. */
7674 return target;
7677 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7679 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7680 modifier);
7682 /* If the signedness of the conversion differs and OP0 is
7683 a promoted SUBREG, clear that indication since we now
7684 have to do the proper extension. */
7685 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7686 && GET_CODE (op0) == SUBREG)
7687 SUBREG_PROMOTED_VAR_P (op0) = 0;
7689 return op0;
7692 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7693 if (GET_MODE (op0) == mode)
7694 return op0;
7696 /* If OP0 is a constant, just convert it into the proper mode. */
7697 if (CONSTANT_P (op0))
7699 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7700 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7702 if (modifier == EXPAND_INITIALIZER)
7703 return simplify_gen_subreg (mode, op0, inner_mode,
7704 subreg_lowpart_offset (mode,
7705 inner_mode));
7706 else
7707 return convert_modes (mode, inner_mode, op0,
7708 TREE_UNSIGNED (inner_type));
7711 if (modifier == EXPAND_INITIALIZER)
7712 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7714 if (target == 0)
7715 return
7716 convert_to_mode (mode, op0,
7717 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7718 else
7719 convert_move (target, op0,
7720 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7721 return target;
7723 case VIEW_CONVERT_EXPR:
7724 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7726 /* If the input and output modes are both the same, we are done.
7727 Otherwise, if neither mode is BLKmode and both are within a word, we
7728 can use gen_lowpart. If neither is true, make sure the operand is
7729 in memory and convert the MEM to the new mode. */
7730 if (TYPE_MODE (type) == GET_MODE (op0))
7732 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7733 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7734 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7735 op0 = gen_lowpart (TYPE_MODE (type), op0);
7736 else if (GET_CODE (op0) != MEM)
7738 /* If the operand is not a MEM, force it into memory. Since we
7739 are going to be be changing the mode of the MEM, don't call
7740 force_const_mem for constants because we don't allow pool
7741 constants to change mode. */
7742 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7744 if (TREE_ADDRESSABLE (exp))
7745 abort ();
7747 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7748 target
7749 = assign_stack_temp_for_type
7750 (TYPE_MODE (inner_type),
7751 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7753 emit_move_insn (target, op0);
7754 op0 = target;
7757 /* At this point, OP0 is in the correct mode. If the output type is such
7758 that the operand is known to be aligned, indicate that it is.
7759 Otherwise, we need only be concerned about alignment for non-BLKmode
7760 results. */
7761 if (GET_CODE (op0) == MEM)
7763 op0 = copy_rtx (op0);
7765 if (TYPE_ALIGN_OK (type))
7766 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7767 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7768 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7770 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7771 HOST_WIDE_INT temp_size
7772 = MAX (int_size_in_bytes (inner_type),
7773 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7774 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7775 temp_size, 0, type);
7776 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7778 if (TREE_ADDRESSABLE (exp))
7779 abort ();
7781 if (GET_MODE (op0) == BLKmode)
7782 emit_block_move (new_with_op0_mode, op0,
7783 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7784 BLOCK_OP_NORMAL);
7785 else
7786 emit_move_insn (new_with_op0_mode, op0);
7788 op0 = new;
7791 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7794 return op0;
7796 case PLUS_EXPR:
7797 /* We come here from MINUS_EXPR when the second operand is a
7798 constant. */
7799 plus_expr:
7800 this_optab = ! unsignedp && flag_trapv
7801 && (GET_MODE_CLASS (mode) == MODE_INT)
7802 ? addv_optab : add_optab;
7804 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7805 something else, make sure we add the register to the constant and
7806 then to the other thing. This case can occur during strength
7807 reduction and doing it this way will produce better code if the
7808 frame pointer or argument pointer is eliminated.
7810 fold-const.c will ensure that the constant is always in the inner
7811 PLUS_EXPR, so the only case we need to do anything about is if
7812 sp, ap, or fp is our second argument, in which case we must swap
7813 the innermost first argument and our second argument. */
7815 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7816 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7817 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7818 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7819 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7820 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7822 tree t = TREE_OPERAND (exp, 1);
7824 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7825 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7828 /* If the result is to be ptr_mode and we are adding an integer to
7829 something, we might be forming a constant. So try to use
7830 plus_constant. If it produces a sum and we can't accept it,
7831 use force_operand. This allows P = &ARR[const] to generate
7832 efficient code on machines where a SYMBOL_REF is not a valid
7833 address.
7835 If this is an EXPAND_SUM call, always return the sum. */
7836 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7837 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7839 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7840 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7841 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7843 rtx constant_part;
7845 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7846 EXPAND_SUM);
7847 /* Use immed_double_const to ensure that the constant is
7848 truncated according to the mode of OP1, then sign extended
7849 to a HOST_WIDE_INT. Using the constant directly can result
7850 in non-canonical RTL in a 64x32 cross compile. */
7851 constant_part
7852 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7853 (HOST_WIDE_INT) 0,
7854 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7855 op1 = plus_constant (op1, INTVAL (constant_part));
7856 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7857 op1 = force_operand (op1, target);
7858 return op1;
7861 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7862 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7863 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7865 rtx constant_part;
7867 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7868 (modifier == EXPAND_INITIALIZER
7869 ? EXPAND_INITIALIZER : EXPAND_SUM));
7870 if (! CONSTANT_P (op0))
7872 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7873 VOIDmode, modifier);
7874 /* Don't go to both_summands if modifier
7875 says it's not right to return a PLUS. */
7876 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7877 goto binop2;
7878 goto both_summands;
7880 /* Use immed_double_const to ensure that the constant is
7881 truncated according to the mode of OP1, then sign extended
7882 to a HOST_WIDE_INT. Using the constant directly can result
7883 in non-canonical RTL in a 64x32 cross compile. */
7884 constant_part
7885 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7886 (HOST_WIDE_INT) 0,
7887 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7888 op0 = plus_constant (op0, INTVAL (constant_part));
7889 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7890 op0 = force_operand (op0, target);
7891 return op0;
7895 /* No sense saving up arithmetic to be done
7896 if it's all in the wrong mode to form part of an address.
7897 And force_operand won't know whether to sign-extend or
7898 zero-extend. */
7899 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7900 || mode != ptr_mode)
7901 goto binop;
7903 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7904 subtarget = 0;
7906 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7907 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7909 both_summands:
7910 /* Make sure any term that's a sum with a constant comes last. */
7911 if (GET_CODE (op0) == PLUS
7912 && CONSTANT_P (XEXP (op0, 1)))
7914 temp = op0;
7915 op0 = op1;
7916 op1 = temp;
7918 /* If adding to a sum including a constant,
7919 associate it to put the constant outside. */
7920 if (GET_CODE (op1) == PLUS
7921 && CONSTANT_P (XEXP (op1, 1)))
7923 rtx constant_term = const0_rtx;
7925 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7926 if (temp != 0)
7927 op0 = temp;
7928 /* Ensure that MULT comes first if there is one. */
7929 else if (GET_CODE (op0) == MULT)
7930 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7931 else
7932 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7934 /* Let's also eliminate constants from op0 if possible. */
7935 op0 = eliminate_constant_term (op0, &constant_term);
7937 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7938 their sum should be a constant. Form it into OP1, since the
7939 result we want will then be OP0 + OP1. */
7941 temp = simplify_binary_operation (PLUS, mode, constant_term,
7942 XEXP (op1, 1));
7943 if (temp != 0)
7944 op1 = temp;
7945 else
7946 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7949 /* Put a constant term last and put a multiplication first. */
7950 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7951 temp = op1, op1 = op0, op0 = temp;
7953 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7954 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7956 case MINUS_EXPR:
7957 /* For initializers, we are allowed to return a MINUS of two
7958 symbolic constants. Here we handle all cases when both operands
7959 are constant. */
7960 /* Handle difference of two symbolic constants,
7961 for the sake of an initializer. */
7962 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7963 && really_constant_p (TREE_OPERAND (exp, 0))
7964 && really_constant_p (TREE_OPERAND (exp, 1)))
7966 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7967 modifier);
7968 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7969 modifier);
7971 /* If the last operand is a CONST_INT, use plus_constant of
7972 the negated constant. Else make the MINUS. */
7973 if (GET_CODE (op1) == CONST_INT)
7974 return plus_constant (op0, - INTVAL (op1));
7975 else
7976 return gen_rtx_MINUS (mode, op0, op1);
7978 /* Convert A - const to A + (-const). */
7979 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7981 tree negated = fold (build1 (NEGATE_EXPR, type,
7982 TREE_OPERAND (exp, 1)));
7984 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7985 /* If we can't negate the constant in TYPE, leave it alone and
7986 expand_binop will negate it for us. We used to try to do it
7987 here in the signed version of TYPE, but that doesn't work
7988 on POINTER_TYPEs. */;
7989 else
7991 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7992 goto plus_expr;
7995 this_optab = ! unsignedp && flag_trapv
7996 && (GET_MODE_CLASS(mode) == MODE_INT)
7997 ? subv_optab : sub_optab;
7998 goto binop;
8000 case MULT_EXPR:
8001 /* If first operand is constant, swap them.
8002 Thus the following special case checks need only
8003 check the second operand. */
8004 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8006 tree t1 = TREE_OPERAND (exp, 0);
8007 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8008 TREE_OPERAND (exp, 1) = t1;
8011 /* Attempt to return something suitable for generating an
8012 indexed address, for machines that support that. */
8014 if (modifier == EXPAND_SUM && mode == ptr_mode
8015 && host_integerp (TREE_OPERAND (exp, 1), 0))
8017 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8018 EXPAND_SUM);
8020 /* If we knew for certain that this is arithmetic for an array
8021 reference, and we knew the bounds of the array, then we could
8022 apply the distributive law across (PLUS X C) for constant C.
8023 Without such knowledge, we risk overflowing the computation
8024 when both X and C are large, but X+C isn't. */
8025 /* ??? Could perhaps special-case EXP being unsigned and C being
8026 positive. In that case we are certain that X+C is no smaller
8027 than X and so the transformed expression will overflow iff the
8028 original would have. */
8030 if (GET_CODE (op0) != REG)
8031 op0 = force_operand (op0, NULL_RTX);
8032 if (GET_CODE (op0) != REG)
8033 op0 = copy_to_mode_reg (mode, op0);
8035 return
8036 gen_rtx_MULT (mode, op0,
8037 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
8040 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8041 subtarget = 0;
8043 /* Check for multiplying things that have been extended
8044 from a narrower type. If this machine supports multiplying
8045 in that narrower type with a result in the desired type,
8046 do it that way, and avoid the explicit type-conversion. */
8047 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8048 && TREE_CODE (type) == INTEGER_TYPE
8049 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8050 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8051 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8052 && int_fits_type_p (TREE_OPERAND (exp, 1),
8053 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8054 /* Don't use a widening multiply if a shift will do. */
8055 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8056 > HOST_BITS_PER_WIDE_INT)
8057 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8059 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8060 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8062 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8063 /* If both operands are extended, they must either both
8064 be zero-extended or both be sign-extended. */
8065 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8067 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8069 enum machine_mode innermode
8070 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8071 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8072 ? smul_widen_optab : umul_widen_optab);
8073 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8074 ? umul_widen_optab : smul_widen_optab);
8075 if (mode == GET_MODE_WIDER_MODE (innermode))
8077 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8079 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8080 NULL_RTX, VOIDmode, 0);
8081 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8082 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8083 VOIDmode, 0);
8084 else
8085 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8086 NULL_RTX, VOIDmode, 0);
8087 goto binop2;
8089 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8090 && innermode == word_mode)
8092 rtx htem;
8093 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8094 NULL_RTX, VOIDmode, 0);
8095 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8096 op1 = convert_modes (innermode, mode,
8097 expand_expr (TREE_OPERAND (exp, 1),
8098 NULL_RTX, VOIDmode, 0),
8099 unsignedp);
8100 else
8101 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8102 NULL_RTX, VOIDmode, 0);
8103 temp = expand_binop (mode, other_optab, op0, op1, target,
8104 unsignedp, OPTAB_LIB_WIDEN);
8105 htem = expand_mult_highpart_adjust (innermode,
8106 gen_highpart (innermode, temp),
8107 op0, op1,
8108 gen_highpart (innermode, temp),
8109 unsignedp);
8110 emit_move_insn (gen_highpart (innermode, temp), htem);
8111 return temp;
8115 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8116 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8117 return expand_mult (mode, op0, op1, target, unsignedp);
8119 case TRUNC_DIV_EXPR:
8120 case FLOOR_DIV_EXPR:
8121 case CEIL_DIV_EXPR:
8122 case ROUND_DIV_EXPR:
8123 case EXACT_DIV_EXPR:
8124 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8125 subtarget = 0;
8126 /* Possible optimization: compute the dividend with EXPAND_SUM
8127 then if the divisor is constant can optimize the case
8128 where some terms of the dividend have coeffs divisible by it. */
8129 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8130 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8131 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8133 case RDIV_EXPR:
8134 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8135 expensive divide. If not, combine will rebuild the original
8136 computation. */
8137 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8138 && TREE_CODE (type) == REAL_TYPE
8139 && !real_onep (TREE_OPERAND (exp, 0)))
8140 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8141 build (RDIV_EXPR, type,
8142 build_real (type, dconst1),
8143 TREE_OPERAND (exp, 1))),
8144 target, tmode, unsignedp);
8145 this_optab = sdiv_optab;
8146 goto binop;
8148 case TRUNC_MOD_EXPR:
8149 case FLOOR_MOD_EXPR:
8150 case CEIL_MOD_EXPR:
8151 case ROUND_MOD_EXPR:
8152 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8153 subtarget = 0;
8154 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8155 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8156 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8158 case FIX_ROUND_EXPR:
8159 case FIX_FLOOR_EXPR:
8160 case FIX_CEIL_EXPR:
8161 abort (); /* Not used for C. */
8163 case FIX_TRUNC_EXPR:
8164 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8165 if (target == 0)
8166 target = gen_reg_rtx (mode);
8167 expand_fix (target, op0, unsignedp);
8168 return target;
8170 case FLOAT_EXPR:
8171 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8172 if (target == 0)
8173 target = gen_reg_rtx (mode);
8174 /* expand_float can't figure out what to do if FROM has VOIDmode.
8175 So give it the correct mode. With -O, cse will optimize this. */
8176 if (GET_MODE (op0) == VOIDmode)
8177 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8178 op0);
8179 expand_float (target, op0,
8180 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8181 return target;
8183 case NEGATE_EXPR:
8184 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8185 temp = expand_unop (mode,
8186 ! unsignedp && flag_trapv
8187 && (GET_MODE_CLASS(mode) == MODE_INT)
8188 ? negv_optab : neg_optab, op0, target, 0);
8189 if (temp == 0)
8190 abort ();
8191 return temp;
8193 case ABS_EXPR:
8194 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8196 /* Handle complex values specially. */
8197 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8198 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8199 return expand_complex_abs (mode, op0, target, unsignedp);
8201 /* Unsigned abs is simply the operand. Testing here means we don't
8202 risk generating incorrect code below. */
8203 if (TREE_UNSIGNED (type))
8204 return op0;
8206 return expand_abs (mode, op0, target, unsignedp,
8207 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8209 case MAX_EXPR:
8210 case MIN_EXPR:
8211 target = original_target;
8212 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8213 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8214 || GET_MODE (target) != mode
8215 || (GET_CODE (target) == REG
8216 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8217 target = gen_reg_rtx (mode);
8218 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8219 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8221 /* First try to do it with a special MIN or MAX instruction.
8222 If that does not win, use a conditional jump to select the proper
8223 value. */
8224 this_optab = (TREE_UNSIGNED (type)
8225 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8226 : (code == MIN_EXPR ? smin_optab : smax_optab));
8228 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8229 OPTAB_WIDEN);
8230 if (temp != 0)
8231 return temp;
8233 /* At this point, a MEM target is no longer useful; we will get better
8234 code without it. */
8236 if (GET_CODE (target) == MEM)
8237 target = gen_reg_rtx (mode);
8239 if (target != op0)
8240 emit_move_insn (target, op0);
8242 op0 = gen_label_rtx ();
8244 /* If this mode is an integer too wide to compare properly,
8245 compare word by word. Rely on cse to optimize constant cases. */
8246 if (GET_MODE_CLASS (mode) == MODE_INT
8247 && ! can_compare_p (GE, mode, ccp_jump))
8249 if (code == MAX_EXPR)
8250 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8251 target, op1, NULL_RTX, op0);
8252 else
8253 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8254 op1, target, NULL_RTX, op0);
8256 else
8258 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8259 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8260 unsignedp, mode, NULL_RTX, NULL_RTX,
8261 op0);
8263 emit_move_insn (target, op1);
8264 emit_label (op0);
8265 return target;
8267 case BIT_NOT_EXPR:
8268 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8269 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8270 if (temp == 0)
8271 abort ();
8272 return temp;
8274 case FFS_EXPR:
8275 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8276 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8277 if (temp == 0)
8278 abort ();
8279 return temp;
8281 /* ??? Can optimize bitwise operations with one arg constant.
8282 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8283 and (a bitwise1 b) bitwise2 b (etc)
8284 but that is probably not worth while. */
8286 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8287 boolean values when we want in all cases to compute both of them. In
8288 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8289 as actual zero-or-1 values and then bitwise anding. In cases where
8290 there cannot be any side effects, better code would be made by
8291 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8292 how to recognize those cases. */
8294 case TRUTH_AND_EXPR:
8295 case BIT_AND_EXPR:
8296 this_optab = and_optab;
8297 goto binop;
8299 case TRUTH_OR_EXPR:
8300 case BIT_IOR_EXPR:
8301 this_optab = ior_optab;
8302 goto binop;
8304 case TRUTH_XOR_EXPR:
8305 case BIT_XOR_EXPR:
8306 this_optab = xor_optab;
8307 goto binop;
8309 case LSHIFT_EXPR:
8310 case RSHIFT_EXPR:
8311 case LROTATE_EXPR:
8312 case RROTATE_EXPR:
8313 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8314 subtarget = 0;
8315 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8316 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8317 unsignedp);
8319 /* Could determine the answer when only additive constants differ. Also,
8320 the addition of one can be handled by changing the condition. */
8321 case LT_EXPR:
8322 case LE_EXPR:
8323 case GT_EXPR:
8324 case GE_EXPR:
8325 case EQ_EXPR:
8326 case NE_EXPR:
8327 case UNORDERED_EXPR:
8328 case ORDERED_EXPR:
8329 case UNLT_EXPR:
8330 case UNLE_EXPR:
8331 case UNGT_EXPR:
8332 case UNGE_EXPR:
8333 case UNEQ_EXPR:
8334 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8335 if (temp != 0)
8336 return temp;
8338 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8339 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8340 && original_target
8341 && GET_CODE (original_target) == REG
8342 && (GET_MODE (original_target)
8343 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8345 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8346 VOIDmode, 0);
8348 /* If temp is constant, we can just compute the result. */
8349 if (GET_CODE (temp) == CONST_INT)
8351 if (INTVAL (temp) != 0)
8352 emit_move_insn (target, const1_rtx);
8353 else
8354 emit_move_insn (target, const0_rtx);
8356 return target;
8359 if (temp != original_target)
8361 enum machine_mode mode1 = GET_MODE (temp);
8362 if (mode1 == VOIDmode)
8363 mode1 = tmode != VOIDmode ? tmode : mode;
8365 temp = copy_to_mode_reg (mode1, temp);
8368 op1 = gen_label_rtx ();
8369 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8370 GET_MODE (temp), unsignedp, op1);
8371 emit_move_insn (temp, const1_rtx);
8372 emit_label (op1);
8373 return temp;
8376 /* If no set-flag instruction, must generate a conditional
8377 store into a temporary variable. Drop through
8378 and handle this like && and ||. */
8380 case TRUTH_ANDIF_EXPR:
8381 case TRUTH_ORIF_EXPR:
8382 if (! ignore
8383 && (target == 0 || ! safe_from_p (target, exp, 1)
8384 /* Make sure we don't have a hard reg (such as function's return
8385 value) live across basic blocks, if not optimizing. */
8386 || (!optimize && GET_CODE (target) == REG
8387 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8388 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8390 if (target)
8391 emit_clr_insn (target);
8393 op1 = gen_label_rtx ();
8394 jumpifnot (exp, op1);
8396 if (target)
8397 emit_0_to_1_insn (target);
8399 emit_label (op1);
8400 return ignore ? const0_rtx : target;
8402 case TRUTH_NOT_EXPR:
8403 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8404 /* The parser is careful to generate TRUTH_NOT_EXPR
8405 only with operands that are always zero or one. */
8406 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8407 target, 1, OPTAB_LIB_WIDEN);
8408 if (temp == 0)
8409 abort ();
8410 return temp;
8412 case COMPOUND_EXPR:
8413 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8414 emit_queue ();
8415 return expand_expr (TREE_OPERAND (exp, 1),
8416 (ignore ? const0_rtx : target),
8417 VOIDmode, 0);
8419 case COND_EXPR:
8420 /* If we would have a "singleton" (see below) were it not for a
8421 conversion in each arm, bring that conversion back out. */
8422 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8423 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8424 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8425 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8427 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8428 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8430 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8431 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8432 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8433 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8434 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8435 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8436 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8437 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8438 return expand_expr (build1 (NOP_EXPR, type,
8439 build (COND_EXPR, TREE_TYPE (iftrue),
8440 TREE_OPERAND (exp, 0),
8441 iftrue, iffalse)),
8442 target, tmode, modifier);
8446 /* Note that COND_EXPRs whose type is a structure or union
8447 are required to be constructed to contain assignments of
8448 a temporary variable, so that we can evaluate them here
8449 for side effect only. If type is void, we must do likewise. */
8451 /* If an arm of the branch requires a cleanup,
8452 only that cleanup is performed. */
8454 tree singleton = 0;
8455 tree binary_op = 0, unary_op = 0;
8457 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8458 convert it to our mode, if necessary. */
8459 if (integer_onep (TREE_OPERAND (exp, 1))
8460 && integer_zerop (TREE_OPERAND (exp, 2))
8461 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8463 if (ignore)
8465 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8466 modifier);
8467 return const0_rtx;
8470 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8471 if (GET_MODE (op0) == mode)
8472 return op0;
8474 if (target == 0)
8475 target = gen_reg_rtx (mode);
8476 convert_move (target, op0, unsignedp);
8477 return target;
8480 /* Check for X ? A + B : A. If we have this, we can copy A to the
8481 output and conditionally add B. Similarly for unary operations.
8482 Don't do this if X has side-effects because those side effects
8483 might affect A or B and the "?" operation is a sequence point in
8484 ANSI. (operand_equal_p tests for side effects.) */
8486 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8487 && operand_equal_p (TREE_OPERAND (exp, 2),
8488 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8489 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8490 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8491 && operand_equal_p (TREE_OPERAND (exp, 1),
8492 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8493 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8494 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8495 && operand_equal_p (TREE_OPERAND (exp, 2),
8496 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8497 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8498 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8499 && operand_equal_p (TREE_OPERAND (exp, 1),
8500 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8501 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8503 /* If we are not to produce a result, we have no target. Otherwise,
8504 if a target was specified use it; it will not be used as an
8505 intermediate target unless it is safe. If no target, use a
8506 temporary. */
8508 if (ignore)
8509 temp = 0;
8510 else if (original_target
8511 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8512 || (singleton && GET_CODE (original_target) == REG
8513 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8514 && original_target == var_rtx (singleton)))
8515 && GET_MODE (original_target) == mode
8516 #ifdef HAVE_conditional_move
8517 && (! can_conditionally_move_p (mode)
8518 || GET_CODE (original_target) == REG
8519 || TREE_ADDRESSABLE (type))
8520 #endif
8521 && (GET_CODE (original_target) != MEM
8522 || TREE_ADDRESSABLE (type)))
8523 temp = original_target;
8524 else if (TREE_ADDRESSABLE (type))
8525 abort ();
8526 else
8527 temp = assign_temp (type, 0, 0, 1);
8529 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8530 do the test of X as a store-flag operation, do this as
8531 A + ((X != 0) << log C). Similarly for other simple binary
8532 operators. Only do for C == 1 if BRANCH_COST is low. */
8533 if (temp && singleton && binary_op
8534 && (TREE_CODE (binary_op) == PLUS_EXPR
8535 || TREE_CODE (binary_op) == MINUS_EXPR
8536 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8537 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8538 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8539 : integer_onep (TREE_OPERAND (binary_op, 1)))
8540 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8542 rtx result;
8543 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8544 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8545 ? addv_optab : add_optab)
8546 : TREE_CODE (binary_op) == MINUS_EXPR
8547 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8548 ? subv_optab : sub_optab)
8549 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8550 : xor_optab);
8552 /* If we had X ? A : A + 1, do this as A + (X == 0).
8554 We have to invert the truth value here and then put it
8555 back later if do_store_flag fails. We cannot simply copy
8556 TREE_OPERAND (exp, 0) to another variable and modify that
8557 because invert_truthvalue can modify the tree pointed to
8558 by its argument. */
8559 if (singleton == TREE_OPERAND (exp, 1))
8560 TREE_OPERAND (exp, 0)
8561 = invert_truthvalue (TREE_OPERAND (exp, 0));
8563 result = do_store_flag (TREE_OPERAND (exp, 0),
8564 (safe_from_p (temp, singleton, 1)
8565 ? temp : NULL_RTX),
8566 mode, BRANCH_COST <= 1);
8568 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8569 result = expand_shift (LSHIFT_EXPR, mode, result,
8570 build_int_2 (tree_log2
8571 (TREE_OPERAND
8572 (binary_op, 1)),
8574 (safe_from_p (temp, singleton, 1)
8575 ? temp : NULL_RTX), 0);
8577 if (result)
8579 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8580 return expand_binop (mode, boptab, op1, result, temp,
8581 unsignedp, OPTAB_LIB_WIDEN);
8583 else if (singleton == TREE_OPERAND (exp, 1))
8584 TREE_OPERAND (exp, 0)
8585 = invert_truthvalue (TREE_OPERAND (exp, 0));
8588 do_pending_stack_adjust ();
8589 NO_DEFER_POP;
8590 op0 = gen_label_rtx ();
8592 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8594 if (temp != 0)
8596 /* If the target conflicts with the other operand of the
8597 binary op, we can't use it. Also, we can't use the target
8598 if it is a hard register, because evaluating the condition
8599 might clobber it. */
8600 if ((binary_op
8601 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8602 || (GET_CODE (temp) == REG
8603 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8604 temp = gen_reg_rtx (mode);
8605 store_expr (singleton, temp, 0);
8607 else
8608 expand_expr (singleton,
8609 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8610 if (singleton == TREE_OPERAND (exp, 1))
8611 jumpif (TREE_OPERAND (exp, 0), op0);
8612 else
8613 jumpifnot (TREE_OPERAND (exp, 0), op0);
8615 start_cleanup_deferral ();
8616 if (binary_op && temp == 0)
8617 /* Just touch the other operand. */
8618 expand_expr (TREE_OPERAND (binary_op, 1),
8619 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8620 else if (binary_op)
8621 store_expr (build (TREE_CODE (binary_op), type,
8622 make_tree (type, temp),
8623 TREE_OPERAND (binary_op, 1)),
8624 temp, 0);
8625 else
8626 store_expr (build1 (TREE_CODE (unary_op), type,
8627 make_tree (type, temp)),
8628 temp, 0);
8629 op1 = op0;
8631 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8632 comparison operator. If we have one of these cases, set the
8633 output to A, branch on A (cse will merge these two references),
8634 then set the output to FOO. */
8635 else if (temp
8636 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8637 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8638 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8639 TREE_OPERAND (exp, 1), 0)
8640 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8641 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8642 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8644 if (GET_CODE (temp) == REG
8645 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8646 temp = gen_reg_rtx (mode);
8647 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8648 jumpif (TREE_OPERAND (exp, 0), op0);
8650 start_cleanup_deferral ();
8651 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8652 op1 = op0;
8654 else if (temp
8655 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8656 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8657 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8658 TREE_OPERAND (exp, 2), 0)
8659 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8660 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8661 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8663 if (GET_CODE (temp) == REG
8664 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8665 temp = gen_reg_rtx (mode);
8666 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8667 jumpifnot (TREE_OPERAND (exp, 0), op0);
8669 start_cleanup_deferral ();
8670 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8671 op1 = op0;
8673 else
8675 op1 = gen_label_rtx ();
8676 jumpifnot (TREE_OPERAND (exp, 0), op0);
8678 start_cleanup_deferral ();
8680 /* One branch of the cond can be void, if it never returns. For
8681 example A ? throw : E */
8682 if (temp != 0
8683 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8684 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8685 else
8686 expand_expr (TREE_OPERAND (exp, 1),
8687 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8688 end_cleanup_deferral ();
8689 emit_queue ();
8690 emit_jump_insn (gen_jump (op1));
8691 emit_barrier ();
8692 emit_label (op0);
8693 start_cleanup_deferral ();
8694 if (temp != 0
8695 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8696 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8697 else
8698 expand_expr (TREE_OPERAND (exp, 2),
8699 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8702 end_cleanup_deferral ();
8704 emit_queue ();
8705 emit_label (op1);
8706 OK_DEFER_POP;
8708 return temp;
8711 case TARGET_EXPR:
8713 /* Something needs to be initialized, but we didn't know
8714 where that thing was when building the tree. For example,
8715 it could be the return value of a function, or a parameter
8716 to a function which lays down in the stack, or a temporary
8717 variable which must be passed by reference.
8719 We guarantee that the expression will either be constructed
8720 or copied into our original target. */
8722 tree slot = TREE_OPERAND (exp, 0);
8723 tree cleanups = NULL_TREE;
8724 tree exp1;
8726 if (TREE_CODE (slot) != VAR_DECL)
8727 abort ();
8729 if (! ignore)
8730 target = original_target;
8732 /* Set this here so that if we get a target that refers to a
8733 register variable that's already been used, put_reg_into_stack
8734 knows that it should fix up those uses. */
8735 TREE_USED (slot) = 1;
8737 if (target == 0)
8739 if (DECL_RTL_SET_P (slot))
8741 target = DECL_RTL (slot);
8742 /* If we have already expanded the slot, so don't do
8743 it again. (mrs) */
8744 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8745 return target;
8747 else
8749 target = assign_temp (type, 2, 0, 1);
8750 /* All temp slots at this level must not conflict. */
8751 preserve_temp_slots (target);
8752 SET_DECL_RTL (slot, target);
8753 if (TREE_ADDRESSABLE (slot))
8754 put_var_into_stack (slot);
8756 /* Since SLOT is not known to the called function
8757 to belong to its stack frame, we must build an explicit
8758 cleanup. This case occurs when we must build up a reference
8759 to pass the reference as an argument. In this case,
8760 it is very likely that such a reference need not be
8761 built here. */
8763 if (TREE_OPERAND (exp, 2) == 0)
8764 TREE_OPERAND (exp, 2)
8765 = (*lang_hooks.maybe_build_cleanup) (slot);
8766 cleanups = TREE_OPERAND (exp, 2);
8769 else
8771 /* This case does occur, when expanding a parameter which
8772 needs to be constructed on the stack. The target
8773 is the actual stack address that we want to initialize.
8774 The function we call will perform the cleanup in this case. */
8776 /* If we have already assigned it space, use that space,
8777 not target that we were passed in, as our target
8778 parameter is only a hint. */
8779 if (DECL_RTL_SET_P (slot))
8781 target = DECL_RTL (slot);
8782 /* If we have already expanded the slot, so don't do
8783 it again. (mrs) */
8784 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8785 return target;
8787 else
8789 SET_DECL_RTL (slot, target);
8790 /* If we must have an addressable slot, then make sure that
8791 the RTL that we just stored in slot is OK. */
8792 if (TREE_ADDRESSABLE (slot))
8793 put_var_into_stack (slot);
8797 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8798 /* Mark it as expanded. */
8799 TREE_OPERAND (exp, 1) = NULL_TREE;
8801 store_expr (exp1, target, 0);
8803 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8805 return target;
8808 case INIT_EXPR:
8810 tree lhs = TREE_OPERAND (exp, 0);
8811 tree rhs = TREE_OPERAND (exp, 1);
8813 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8814 return temp;
8817 case MODIFY_EXPR:
8819 /* If lhs is complex, expand calls in rhs before computing it.
8820 That's so we don't compute a pointer and save it over a
8821 call. If lhs is simple, compute it first so we can give it
8822 as a target if the rhs is just a call. This avoids an
8823 extra temp and copy and that prevents a partial-subsumption
8824 which makes bad code. Actually we could treat
8825 component_ref's of vars like vars. */
8827 tree lhs = TREE_OPERAND (exp, 0);
8828 tree rhs = TREE_OPERAND (exp, 1);
8830 temp = 0;
8832 /* Check for |= or &= of a bitfield of size one into another bitfield
8833 of size 1. In this case, (unless we need the result of the
8834 assignment) we can do this more efficiently with a
8835 test followed by an assignment, if necessary.
8837 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8838 things change so we do, this code should be enhanced to
8839 support it. */
8840 if (ignore
8841 && TREE_CODE (lhs) == COMPONENT_REF
8842 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8843 || TREE_CODE (rhs) == BIT_AND_EXPR)
8844 && TREE_OPERAND (rhs, 0) == lhs
8845 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8846 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8847 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8849 rtx label = gen_label_rtx ();
8851 do_jump (TREE_OPERAND (rhs, 1),
8852 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8853 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8854 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8855 (TREE_CODE (rhs) == BIT_IOR_EXPR
8856 ? integer_one_node
8857 : integer_zero_node)),
8858 0, 0);
8859 do_pending_stack_adjust ();
8860 emit_label (label);
8861 return const0_rtx;
8864 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8866 return temp;
8869 case RETURN_EXPR:
8870 if (!TREE_OPERAND (exp, 0))
8871 expand_null_return ();
8872 else
8873 expand_return (TREE_OPERAND (exp, 0));
8874 return const0_rtx;
8876 case PREINCREMENT_EXPR:
8877 case PREDECREMENT_EXPR:
8878 return expand_increment (exp, 0, ignore);
8880 case POSTINCREMENT_EXPR:
8881 case POSTDECREMENT_EXPR:
8882 /* Faster to treat as pre-increment if result is not used. */
8883 return expand_increment (exp, ! ignore, ignore);
8885 case ADDR_EXPR:
8886 /* Are we taking the address of a nested function? */
8887 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8888 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8889 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8890 && ! TREE_STATIC (exp))
8892 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8893 op0 = force_operand (op0, target);
8895 /* If we are taking the address of something erroneous, just
8896 return a zero. */
8897 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8898 return const0_rtx;
8899 /* If we are taking the address of a constant and are at the
8900 top level, we have to use output_constant_def since we can't
8901 call force_const_mem at top level. */
8902 else if (cfun == 0
8903 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8904 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8905 == 'c')))
8906 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8907 else
8909 /* We make sure to pass const0_rtx down if we came in with
8910 ignore set, to avoid doing the cleanups twice for something. */
8911 op0 = expand_expr (TREE_OPERAND (exp, 0),
8912 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8913 (modifier == EXPAND_INITIALIZER
8914 ? modifier : EXPAND_CONST_ADDRESS));
8916 /* If we are going to ignore the result, OP0 will have been set
8917 to const0_rtx, so just return it. Don't get confused and
8918 think we are taking the address of the constant. */
8919 if (ignore)
8920 return op0;
8922 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8923 clever and returns a REG when given a MEM. */
8924 op0 = protect_from_queue (op0, 1);
8926 /* We would like the object in memory. If it is a constant, we can
8927 have it be statically allocated into memory. For a non-constant,
8928 we need to allocate some memory and store the value into it. */
8930 if (CONSTANT_P (op0))
8931 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8932 op0);
8933 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8934 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8935 || GET_CODE (op0) == PARALLEL)
8937 /* If the operand is a SAVE_EXPR, we can deal with this by
8938 forcing the SAVE_EXPR into memory. */
8939 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8941 put_var_into_stack (TREE_OPERAND (exp, 0));
8942 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8944 else
8946 /* If this object is in a register, it can't be BLKmode. */
8947 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8948 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8950 if (GET_CODE (op0) == PARALLEL)
8951 /* Handle calls that pass values in multiple
8952 non-contiguous locations. The Irix 6 ABI has examples
8953 of this. */
8954 emit_group_store (memloc, op0,
8955 int_size_in_bytes (inner_type));
8956 else
8957 emit_move_insn (memloc, op0);
8959 op0 = memloc;
8963 if (GET_CODE (op0) != MEM)
8964 abort ();
8966 mark_temp_addr_taken (op0);
8967 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8969 op0 = XEXP (op0, 0);
8970 #ifdef POINTERS_EXTEND_UNSIGNED
8971 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8972 && mode == ptr_mode)
8973 op0 = convert_memory_address (ptr_mode, op0);
8974 #endif
8975 return op0;
8978 /* If OP0 is not aligned as least as much as the type requires, we
8979 need to make a temporary, copy OP0 to it, and take the address of
8980 the temporary. We want to use the alignment of the type, not of
8981 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8982 the test for BLKmode means that can't happen. The test for
8983 BLKmode is because we never make mis-aligned MEMs with
8984 non-BLKmode.
8986 We don't need to do this at all if the machine doesn't have
8987 strict alignment. */
8988 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8989 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8990 > MEM_ALIGN (op0))
8991 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8993 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8994 rtx new
8995 = assign_stack_temp_for_type
8996 (TYPE_MODE (inner_type),
8997 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8998 : int_size_in_bytes (inner_type),
8999 1, build_qualified_type (inner_type,
9000 (TYPE_QUALS (inner_type)
9001 | TYPE_QUAL_CONST)));
9003 if (TYPE_ALIGN_OK (inner_type))
9004 abort ();
9006 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9007 BLOCK_OP_NORMAL);
9008 op0 = new;
9011 op0 = force_operand (XEXP (op0, 0), target);
9014 if (flag_force_addr
9015 && GET_CODE (op0) != REG
9016 && modifier != EXPAND_CONST_ADDRESS
9017 && modifier != EXPAND_INITIALIZER
9018 && modifier != EXPAND_SUM)
9019 op0 = force_reg (Pmode, op0);
9021 if (GET_CODE (op0) == REG
9022 && ! REG_USERVAR_P (op0))
9023 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9025 #ifdef POINTERS_EXTEND_UNSIGNED
9026 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9027 && mode == ptr_mode)
9028 op0 = convert_memory_address (ptr_mode, op0);
9029 #endif
9031 return op0;
9033 case ENTRY_VALUE_EXPR:
9034 abort ();
9036 /* COMPLEX type for Extended Pascal & Fortran */
9037 case COMPLEX_EXPR:
9039 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9040 rtx insns;
9042 /* Get the rtx code of the operands. */
9043 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9044 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9046 if (! target)
9047 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9049 start_sequence ();
9051 /* Move the real (op0) and imaginary (op1) parts to their location. */
9052 emit_move_insn (gen_realpart (mode, target), op0);
9053 emit_move_insn (gen_imagpart (mode, target), op1);
9055 insns = get_insns ();
9056 end_sequence ();
9058 /* Complex construction should appear as a single unit. */
9059 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9060 each with a separate pseudo as destination.
9061 It's not correct for flow to treat them as a unit. */
9062 if (GET_CODE (target) != CONCAT)
9063 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9064 else
9065 emit_insn (insns);
9067 return target;
9070 case REALPART_EXPR:
9071 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9072 return gen_realpart (mode, op0);
9074 case IMAGPART_EXPR:
9075 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9076 return gen_imagpart (mode, op0);
9078 case CONJ_EXPR:
9080 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9081 rtx imag_t;
9082 rtx insns;
9084 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9086 if (! target)
9087 target = gen_reg_rtx (mode);
9089 start_sequence ();
9091 /* Store the realpart and the negated imagpart to target. */
9092 emit_move_insn (gen_realpart (partmode, target),
9093 gen_realpart (partmode, op0));
9095 imag_t = gen_imagpart (partmode, target);
9096 temp = expand_unop (partmode,
9097 ! unsignedp && flag_trapv
9098 && (GET_MODE_CLASS(partmode) == MODE_INT)
9099 ? negv_optab : neg_optab,
9100 gen_imagpart (partmode, op0), imag_t, 0);
9101 if (temp != imag_t)
9102 emit_move_insn (imag_t, temp);
9104 insns = get_insns ();
9105 end_sequence ();
9107 /* Conjugate should appear as a single unit
9108 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9109 each with a separate pseudo as destination.
9110 It's not correct for flow to treat them as a unit. */
9111 if (GET_CODE (target) != CONCAT)
9112 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9113 else
9114 emit_insn (insns);
9116 return target;
9119 case TRY_CATCH_EXPR:
9121 tree handler = TREE_OPERAND (exp, 1);
9123 expand_eh_region_start ();
9125 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9127 expand_eh_region_end_cleanup (handler);
9129 return op0;
9132 case TRY_FINALLY_EXPR:
9134 tree try_block = TREE_OPERAND (exp, 0);
9135 tree finally_block = TREE_OPERAND (exp, 1);
9137 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9139 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9140 is not sufficient, so we cannot expand the block twice.
9141 So we play games with GOTO_SUBROUTINE_EXPR to let us
9142 expand the thing only once. */
9143 /* When not optimizing, we go ahead with this form since
9144 (1) user breakpoints operate more predictably without
9145 code duplication, and
9146 (2) we're not running any of the global optimizers
9147 that would explode in time/space with the highly
9148 connected CFG created by the indirect branching. */
9150 rtx finally_label = gen_label_rtx ();
9151 rtx done_label = gen_label_rtx ();
9152 rtx return_link = gen_reg_rtx (Pmode);
9153 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9154 (tree) finally_label, (tree) return_link);
9155 TREE_SIDE_EFFECTS (cleanup) = 1;
9157 /* Start a new binding layer that will keep track of all cleanup
9158 actions to be performed. */
9159 expand_start_bindings (2);
9160 target_temp_slot_level = temp_slot_level;
9162 expand_decl_cleanup (NULL_TREE, cleanup);
9163 op0 = expand_expr (try_block, target, tmode, modifier);
9165 preserve_temp_slots (op0);
9166 expand_end_bindings (NULL_TREE, 0, 0);
9167 emit_jump (done_label);
9168 emit_label (finally_label);
9169 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9170 emit_indirect_jump (return_link);
9171 emit_label (done_label);
9173 else
9175 expand_start_bindings (2);
9176 target_temp_slot_level = temp_slot_level;
9178 expand_decl_cleanup (NULL_TREE, finally_block);
9179 op0 = expand_expr (try_block, target, tmode, modifier);
9181 preserve_temp_slots (op0);
9182 expand_end_bindings (NULL_TREE, 0, 0);
9185 return op0;
9188 case GOTO_SUBROUTINE_EXPR:
9190 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9191 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9192 rtx return_address = gen_label_rtx ();
9193 emit_move_insn (return_link,
9194 gen_rtx_LABEL_REF (Pmode, return_address));
9195 emit_jump (subr);
9196 emit_label (return_address);
9197 return const0_rtx;
9200 case VA_ARG_EXPR:
9201 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9203 case EXC_PTR_EXPR:
9204 return get_exception_pointer (cfun);
9206 case FDESC_EXPR:
9207 /* Function descriptors are not valid except for as
9208 initialization constants, and should not be expanded. */
9209 abort ();
9211 default:
9212 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9215 /* Here to do an ordinary binary operator, generating an instruction
9216 from the optab already placed in `this_optab'. */
9217 binop:
9218 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9219 subtarget = 0;
9220 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9221 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9222 binop2:
9223 temp = expand_binop (mode, this_optab, op0, op1, target,
9224 unsignedp, OPTAB_LIB_WIDEN);
9225 if (temp == 0)
9226 abort ();
9227 return temp;
9230 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9231 when applied to the address of EXP produces an address known to be
9232 aligned more than BIGGEST_ALIGNMENT. */
9234 static int
9235 is_aligning_offset (offset, exp)
9236 tree offset;
9237 tree exp;
9239 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9240 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9241 || TREE_CODE (offset) == NOP_EXPR
9242 || TREE_CODE (offset) == CONVERT_EXPR
9243 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9244 offset = TREE_OPERAND (offset, 0);
9246 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9247 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9248 if (TREE_CODE (offset) != BIT_AND_EXPR
9249 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9250 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9251 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9252 return 0;
9254 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9255 It must be NEGATE_EXPR. Then strip any more conversions. */
9256 offset = TREE_OPERAND (offset, 0);
9257 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9258 || TREE_CODE (offset) == NOP_EXPR
9259 || TREE_CODE (offset) == CONVERT_EXPR)
9260 offset = TREE_OPERAND (offset, 0);
9262 if (TREE_CODE (offset) != NEGATE_EXPR)
9263 return 0;
9265 offset = TREE_OPERAND (offset, 0);
9266 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9267 || TREE_CODE (offset) == NOP_EXPR
9268 || TREE_CODE (offset) == CONVERT_EXPR)
9269 offset = TREE_OPERAND (offset, 0);
9271 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9272 whose type is the same as EXP. */
9273 return (TREE_CODE (offset) == ADDR_EXPR
9274 && (TREE_OPERAND (offset, 0) == exp
9275 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9276 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9277 == TREE_TYPE (exp)))));
9280 /* Return the tree node if an ARG corresponds to a string constant or zero
9281 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9282 in bytes within the string that ARG is accessing. The type of the
9283 offset will be `sizetype'. */
9285 tree
9286 string_constant (arg, ptr_offset)
9287 tree arg;
9288 tree *ptr_offset;
9290 STRIP_NOPS (arg);
9292 if (TREE_CODE (arg) == ADDR_EXPR
9293 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9295 *ptr_offset = size_zero_node;
9296 return TREE_OPERAND (arg, 0);
9298 else if (TREE_CODE (arg) == PLUS_EXPR)
9300 tree arg0 = TREE_OPERAND (arg, 0);
9301 tree arg1 = TREE_OPERAND (arg, 1);
9303 STRIP_NOPS (arg0);
9304 STRIP_NOPS (arg1);
9306 if (TREE_CODE (arg0) == ADDR_EXPR
9307 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9309 *ptr_offset = convert (sizetype, arg1);
9310 return TREE_OPERAND (arg0, 0);
9312 else if (TREE_CODE (arg1) == ADDR_EXPR
9313 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9315 *ptr_offset = convert (sizetype, arg0);
9316 return TREE_OPERAND (arg1, 0);
9320 return 0;
9323 /* Expand code for a post- or pre- increment or decrement
9324 and return the RTX for the result.
9325 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9327 static rtx
9328 expand_increment (exp, post, ignore)
9329 tree exp;
9330 int post, ignore;
9332 rtx op0, op1;
9333 rtx temp, value;
9334 tree incremented = TREE_OPERAND (exp, 0);
9335 optab this_optab = add_optab;
9336 int icode;
9337 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9338 int op0_is_copy = 0;
9339 int single_insn = 0;
9340 /* 1 means we can't store into OP0 directly,
9341 because it is a subreg narrower than a word,
9342 and we don't dare clobber the rest of the word. */
9343 int bad_subreg = 0;
9345 /* Stabilize any component ref that might need to be
9346 evaluated more than once below. */
9347 if (!post
9348 || TREE_CODE (incremented) == BIT_FIELD_REF
9349 || (TREE_CODE (incremented) == COMPONENT_REF
9350 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9351 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9352 incremented = stabilize_reference (incremented);
9353 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9354 ones into save exprs so that they don't accidentally get evaluated
9355 more than once by the code below. */
9356 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9357 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9358 incremented = save_expr (incremented);
9360 /* Compute the operands as RTX.
9361 Note whether OP0 is the actual lvalue or a copy of it:
9362 I believe it is a copy iff it is a register or subreg
9363 and insns were generated in computing it. */
9365 temp = get_last_insn ();
9366 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9368 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9369 in place but instead must do sign- or zero-extension during assignment,
9370 so we copy it into a new register and let the code below use it as
9371 a copy.
9373 Note that we can safely modify this SUBREG since it is know not to be
9374 shared (it was made by the expand_expr call above). */
9376 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9378 if (post)
9379 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9380 else
9381 bad_subreg = 1;
9383 else if (GET_CODE (op0) == SUBREG
9384 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9386 /* We cannot increment this SUBREG in place. If we are
9387 post-incrementing, get a copy of the old value. Otherwise,
9388 just mark that we cannot increment in place. */
9389 if (post)
9390 op0 = copy_to_reg (op0);
9391 else
9392 bad_subreg = 1;
9395 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9396 && temp != get_last_insn ());
9397 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9399 /* Decide whether incrementing or decrementing. */
9400 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9401 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9402 this_optab = sub_optab;
9404 /* Convert decrement by a constant into a negative increment. */
9405 if (this_optab == sub_optab
9406 && GET_CODE (op1) == CONST_INT)
9408 op1 = GEN_INT (-INTVAL (op1));
9409 this_optab = add_optab;
9412 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9413 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9415 /* For a preincrement, see if we can do this with a single instruction. */
9416 if (!post)
9418 icode = (int) this_optab->handlers[(int) mode].insn_code;
9419 if (icode != (int) CODE_FOR_nothing
9420 /* Make sure that OP0 is valid for operands 0 and 1
9421 of the insn we want to queue. */
9422 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9423 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9424 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9425 single_insn = 1;
9428 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9429 then we cannot just increment OP0. We must therefore contrive to
9430 increment the original value. Then, for postincrement, we can return
9431 OP0 since it is a copy of the old value. For preincrement, expand here
9432 unless we can do it with a single insn.
9434 Likewise if storing directly into OP0 would clobber high bits
9435 we need to preserve (bad_subreg). */
9436 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9438 /* This is the easiest way to increment the value wherever it is.
9439 Problems with multiple evaluation of INCREMENTED are prevented
9440 because either (1) it is a component_ref or preincrement,
9441 in which case it was stabilized above, or (2) it is an array_ref
9442 with constant index in an array in a register, which is
9443 safe to reevaluate. */
9444 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9445 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9446 ? MINUS_EXPR : PLUS_EXPR),
9447 TREE_TYPE (exp),
9448 incremented,
9449 TREE_OPERAND (exp, 1));
9451 while (TREE_CODE (incremented) == NOP_EXPR
9452 || TREE_CODE (incremented) == CONVERT_EXPR)
9454 newexp = convert (TREE_TYPE (incremented), newexp);
9455 incremented = TREE_OPERAND (incremented, 0);
9458 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9459 return post ? op0 : temp;
9462 if (post)
9464 /* We have a true reference to the value in OP0.
9465 If there is an insn to add or subtract in this mode, queue it.
9466 Queueing the increment insn avoids the register shuffling
9467 that often results if we must increment now and first save
9468 the old value for subsequent use. */
9470 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9471 op0 = stabilize (op0);
9472 #endif
9474 icode = (int) this_optab->handlers[(int) mode].insn_code;
9475 if (icode != (int) CODE_FOR_nothing
9476 /* Make sure that OP0 is valid for operands 0 and 1
9477 of the insn we want to queue. */
9478 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9479 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9481 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9482 op1 = force_reg (mode, op1);
9484 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9486 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9488 rtx addr = (general_operand (XEXP (op0, 0), mode)
9489 ? force_reg (Pmode, XEXP (op0, 0))
9490 : copy_to_reg (XEXP (op0, 0)));
9491 rtx temp, result;
9493 op0 = replace_equiv_address (op0, addr);
9494 temp = force_reg (GET_MODE (op0), op0);
9495 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9496 op1 = force_reg (mode, op1);
9498 /* The increment queue is LIFO, thus we have to `queue'
9499 the instructions in reverse order. */
9500 enqueue_insn (op0, gen_move_insn (op0, temp));
9501 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9502 return result;
9506 /* Preincrement, or we can't increment with one simple insn. */
9507 if (post)
9508 /* Save a copy of the value before inc or dec, to return it later. */
9509 temp = value = copy_to_reg (op0);
9510 else
9511 /* Arrange to return the incremented value. */
9512 /* Copy the rtx because expand_binop will protect from the queue,
9513 and the results of that would be invalid for us to return
9514 if our caller does emit_queue before using our result. */
9515 temp = copy_rtx (value = op0);
9517 /* Increment however we can. */
9518 op1 = expand_binop (mode, this_optab, value, op1, op0,
9519 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9521 /* Make sure the value is stored into OP0. */
9522 if (op1 != op0)
9523 emit_move_insn (op0, op1);
9525 return temp;
9528 /* At the start of a function, record that we have no previously-pushed
9529 arguments waiting to be popped. */
9531 void
9532 init_pending_stack_adjust ()
9534 pending_stack_adjust = 0;
9537 /* When exiting from function, if safe, clear out any pending stack adjust
9538 so the adjustment won't get done.
9540 Note, if the current function calls alloca, then it must have a
9541 frame pointer regardless of the value of flag_omit_frame_pointer. */
9543 void
9544 clear_pending_stack_adjust ()
9546 #ifdef EXIT_IGNORE_STACK
9547 if (optimize > 0
9548 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9549 && EXIT_IGNORE_STACK
9550 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9551 && ! flag_inline_functions)
9553 stack_pointer_delta -= pending_stack_adjust,
9554 pending_stack_adjust = 0;
9556 #endif
9559 /* Pop any previously-pushed arguments that have not been popped yet. */
9561 void
9562 do_pending_stack_adjust ()
9564 if (inhibit_defer_pop == 0)
9566 if (pending_stack_adjust != 0)
9567 adjust_stack (GEN_INT (pending_stack_adjust));
9568 pending_stack_adjust = 0;
9572 /* Expand conditional expressions. */
9574 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9575 LABEL is an rtx of code CODE_LABEL, in this function and all the
9576 functions here. */
9578 void
9579 jumpifnot (exp, label)
9580 tree exp;
9581 rtx label;
9583 do_jump (exp, label, NULL_RTX);
9586 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9588 void
9589 jumpif (exp, label)
9590 tree exp;
9591 rtx label;
9593 do_jump (exp, NULL_RTX, label);
9596 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9597 the result is zero, or IF_TRUE_LABEL if the result is one.
9598 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9599 meaning fall through in that case.
9601 do_jump always does any pending stack adjust except when it does not
9602 actually perform a jump. An example where there is no jump
9603 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9605 This function is responsible for optimizing cases such as
9606 &&, || and comparison operators in EXP. */
9608 void
9609 do_jump (exp, if_false_label, if_true_label)
9610 tree exp;
9611 rtx if_false_label, if_true_label;
9613 enum tree_code code = TREE_CODE (exp);
9614 /* Some cases need to create a label to jump to
9615 in order to properly fall through.
9616 These cases set DROP_THROUGH_LABEL nonzero. */
9617 rtx drop_through_label = 0;
9618 rtx temp;
9619 int i;
9620 tree type;
9621 enum machine_mode mode;
9623 #ifdef MAX_INTEGER_COMPUTATION_MODE
9624 check_max_integer_computation_mode (exp);
9625 #endif
9627 emit_queue ();
9629 switch (code)
9631 case ERROR_MARK:
9632 break;
9634 case INTEGER_CST:
9635 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9636 if (temp)
9637 emit_jump (temp);
9638 break;
9640 #if 0
9641 /* This is not true with #pragma weak */
9642 case ADDR_EXPR:
9643 /* The address of something can never be zero. */
9644 if (if_true_label)
9645 emit_jump (if_true_label);
9646 break;
9647 #endif
9649 case NOP_EXPR:
9650 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9651 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9652 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9653 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9654 goto normal;
9655 case CONVERT_EXPR:
9656 /* If we are narrowing the operand, we have to do the compare in the
9657 narrower mode. */
9658 if ((TYPE_PRECISION (TREE_TYPE (exp))
9659 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9660 goto normal;
9661 case NON_LVALUE_EXPR:
9662 case REFERENCE_EXPR:
9663 case ABS_EXPR:
9664 case NEGATE_EXPR:
9665 case LROTATE_EXPR:
9666 case RROTATE_EXPR:
9667 /* These cannot change zero->non-zero or vice versa. */
9668 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9669 break;
9671 case WITH_RECORD_EXPR:
9672 /* Put the object on the placeholder list, recurse through our first
9673 operand, and pop the list. */
9674 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9675 placeholder_list);
9676 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9677 placeholder_list = TREE_CHAIN (placeholder_list);
9678 break;
9680 #if 0
9681 /* This is never less insns than evaluating the PLUS_EXPR followed by
9682 a test and can be longer if the test is eliminated. */
9683 case PLUS_EXPR:
9684 /* Reduce to minus. */
9685 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9686 TREE_OPERAND (exp, 0),
9687 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9688 TREE_OPERAND (exp, 1))));
9689 /* Process as MINUS. */
9690 #endif
9692 case MINUS_EXPR:
9693 /* Non-zero iff operands of minus differ. */
9694 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9695 TREE_OPERAND (exp, 0),
9696 TREE_OPERAND (exp, 1)),
9697 NE, NE, if_false_label, if_true_label);
9698 break;
9700 case BIT_AND_EXPR:
9701 /* If we are AND'ing with a small constant, do this comparison in the
9702 smallest type that fits. If the machine doesn't have comparisons
9703 that small, it will be converted back to the wider comparison.
9704 This helps if we are testing the sign bit of a narrower object.
9705 combine can't do this for us because it can't know whether a
9706 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9708 if (! SLOW_BYTE_ACCESS
9709 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9710 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9711 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9712 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9713 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9714 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9715 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9716 != CODE_FOR_nothing))
9718 do_jump (convert (type, exp), if_false_label, if_true_label);
9719 break;
9721 goto normal;
9723 case TRUTH_NOT_EXPR:
9724 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9725 break;
9727 case TRUTH_ANDIF_EXPR:
9728 if (if_false_label == 0)
9729 if_false_label = drop_through_label = gen_label_rtx ();
9730 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9731 start_cleanup_deferral ();
9732 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9733 end_cleanup_deferral ();
9734 break;
9736 case TRUTH_ORIF_EXPR:
9737 if (if_true_label == 0)
9738 if_true_label = drop_through_label = gen_label_rtx ();
9739 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9740 start_cleanup_deferral ();
9741 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9742 end_cleanup_deferral ();
9743 break;
9745 case COMPOUND_EXPR:
9746 push_temp_slots ();
9747 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9748 preserve_temp_slots (NULL_RTX);
9749 free_temp_slots ();
9750 pop_temp_slots ();
9751 emit_queue ();
9752 do_pending_stack_adjust ();
9753 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9754 break;
9756 case COMPONENT_REF:
9757 case BIT_FIELD_REF:
9758 case ARRAY_REF:
9759 case ARRAY_RANGE_REF:
9761 HOST_WIDE_INT bitsize, bitpos;
9762 int unsignedp;
9763 enum machine_mode mode;
9764 tree type;
9765 tree offset;
9766 int volatilep = 0;
9768 /* Get description of this reference. We don't actually care
9769 about the underlying object here. */
9770 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9771 &unsignedp, &volatilep);
9773 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9774 if (! SLOW_BYTE_ACCESS
9775 && type != 0 && bitsize >= 0
9776 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9777 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9778 != CODE_FOR_nothing))
9780 do_jump (convert (type, exp), if_false_label, if_true_label);
9781 break;
9783 goto normal;
9786 case COND_EXPR:
9787 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9788 if (integer_onep (TREE_OPERAND (exp, 1))
9789 && integer_zerop (TREE_OPERAND (exp, 2)))
9790 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9792 else if (integer_zerop (TREE_OPERAND (exp, 1))
9793 && integer_onep (TREE_OPERAND (exp, 2)))
9794 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9796 else
9798 rtx label1 = gen_label_rtx ();
9799 drop_through_label = gen_label_rtx ();
9801 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9803 start_cleanup_deferral ();
9804 /* Now the THEN-expression. */
9805 do_jump (TREE_OPERAND (exp, 1),
9806 if_false_label ? if_false_label : drop_through_label,
9807 if_true_label ? if_true_label : drop_through_label);
9808 /* In case the do_jump just above never jumps. */
9809 do_pending_stack_adjust ();
9810 emit_label (label1);
9812 /* Now the ELSE-expression. */
9813 do_jump (TREE_OPERAND (exp, 2),
9814 if_false_label ? if_false_label : drop_through_label,
9815 if_true_label ? if_true_label : drop_through_label);
9816 end_cleanup_deferral ();
9818 break;
9820 case EQ_EXPR:
9822 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9824 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9825 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9827 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9828 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9829 do_jump
9830 (fold
9831 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9832 fold (build (EQ_EXPR, TREE_TYPE (exp),
9833 fold (build1 (REALPART_EXPR,
9834 TREE_TYPE (inner_type),
9835 exp0)),
9836 fold (build1 (REALPART_EXPR,
9837 TREE_TYPE (inner_type),
9838 exp1)))),
9839 fold (build (EQ_EXPR, TREE_TYPE (exp),
9840 fold (build1 (IMAGPART_EXPR,
9841 TREE_TYPE (inner_type),
9842 exp0)),
9843 fold (build1 (IMAGPART_EXPR,
9844 TREE_TYPE (inner_type),
9845 exp1)))))),
9846 if_false_label, if_true_label);
9849 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9850 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9852 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9853 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9854 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9855 else
9856 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9857 break;
9860 case NE_EXPR:
9862 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9864 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9865 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9867 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9868 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9869 do_jump
9870 (fold
9871 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9872 fold (build (NE_EXPR, TREE_TYPE (exp),
9873 fold (build1 (REALPART_EXPR,
9874 TREE_TYPE (inner_type),
9875 exp0)),
9876 fold (build1 (REALPART_EXPR,
9877 TREE_TYPE (inner_type),
9878 exp1)))),
9879 fold (build (NE_EXPR, TREE_TYPE (exp),
9880 fold (build1 (IMAGPART_EXPR,
9881 TREE_TYPE (inner_type),
9882 exp0)),
9883 fold (build1 (IMAGPART_EXPR,
9884 TREE_TYPE (inner_type),
9885 exp1)))))),
9886 if_false_label, if_true_label);
9889 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9890 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9892 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9893 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9894 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9895 else
9896 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9897 break;
9900 case LT_EXPR:
9901 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9902 if (GET_MODE_CLASS (mode) == MODE_INT
9903 && ! can_compare_p (LT, mode, ccp_jump))
9904 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9905 else
9906 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9907 break;
9909 case LE_EXPR:
9910 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9911 if (GET_MODE_CLASS (mode) == MODE_INT
9912 && ! can_compare_p (LE, mode, ccp_jump))
9913 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9914 else
9915 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9916 break;
9918 case GT_EXPR:
9919 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9920 if (GET_MODE_CLASS (mode) == MODE_INT
9921 && ! can_compare_p (GT, mode, ccp_jump))
9922 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9923 else
9924 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9925 break;
9927 case GE_EXPR:
9928 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9929 if (GET_MODE_CLASS (mode) == MODE_INT
9930 && ! can_compare_p (GE, mode, ccp_jump))
9931 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9932 else
9933 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9934 break;
9936 case UNORDERED_EXPR:
9937 case ORDERED_EXPR:
9939 enum rtx_code cmp, rcmp;
9940 int do_rev;
9942 if (code == UNORDERED_EXPR)
9943 cmp = UNORDERED, rcmp = ORDERED;
9944 else
9945 cmp = ORDERED, rcmp = UNORDERED;
9946 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9948 do_rev = 0;
9949 if (! can_compare_p (cmp, mode, ccp_jump)
9950 && (can_compare_p (rcmp, mode, ccp_jump)
9951 /* If the target doesn't provide either UNORDERED or ORDERED
9952 comparisons, canonicalize on UNORDERED for the library. */
9953 || rcmp == UNORDERED))
9954 do_rev = 1;
9956 if (! do_rev)
9957 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9958 else
9959 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9961 break;
9964 enum rtx_code rcode1;
9965 enum tree_code tcode2;
9967 case UNLT_EXPR:
9968 rcode1 = UNLT;
9969 tcode2 = LT_EXPR;
9970 goto unordered_bcc;
9971 case UNLE_EXPR:
9972 rcode1 = UNLE;
9973 tcode2 = LE_EXPR;
9974 goto unordered_bcc;
9975 case UNGT_EXPR:
9976 rcode1 = UNGT;
9977 tcode2 = GT_EXPR;
9978 goto unordered_bcc;
9979 case UNGE_EXPR:
9980 rcode1 = UNGE;
9981 tcode2 = GE_EXPR;
9982 goto unordered_bcc;
9983 case UNEQ_EXPR:
9984 rcode1 = UNEQ;
9985 tcode2 = EQ_EXPR;
9986 goto unordered_bcc;
9988 unordered_bcc:
9989 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9990 if (can_compare_p (rcode1, mode, ccp_jump))
9991 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9992 if_true_label);
9993 else
9995 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9996 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9997 tree cmp0, cmp1;
9999 /* If the target doesn't support combined unordered
10000 compares, decompose into UNORDERED + comparison. */
10001 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10002 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10003 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10004 do_jump (exp, if_false_label, if_true_label);
10007 break;
10009 /* Special case:
10010 __builtin_expect (<test>, 0) and
10011 __builtin_expect (<test>, 1)
10013 We need to do this here, so that <test> is not converted to a SCC
10014 operation on machines that use condition code registers and COMPARE
10015 like the PowerPC, and then the jump is done based on whether the SCC
10016 operation produced a 1 or 0. */
10017 case CALL_EXPR:
10018 /* Check for a built-in function. */
10019 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10021 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10022 tree arglist = TREE_OPERAND (exp, 1);
10024 if (TREE_CODE (fndecl) == FUNCTION_DECL
10025 && DECL_BUILT_IN (fndecl)
10026 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10027 && arglist != NULL_TREE
10028 && TREE_CHAIN (arglist) != NULL_TREE)
10030 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10031 if_true_label);
10033 if (seq != NULL_RTX)
10035 emit_insn (seq);
10036 return;
10040 /* fall through and generate the normal code. */
10042 default:
10043 normal:
10044 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10045 #if 0
10046 /* This is not needed any more and causes poor code since it causes
10047 comparisons and tests from non-SI objects to have different code
10048 sequences. */
10049 /* Copy to register to avoid generating bad insns by cse
10050 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10051 if (!cse_not_expected && GET_CODE (temp) == MEM)
10052 temp = copy_to_reg (temp);
10053 #endif
10054 do_pending_stack_adjust ();
10055 /* Do any postincrements in the expression that was tested. */
10056 emit_queue ();
10058 if (GET_CODE (temp) == CONST_INT
10059 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10060 || GET_CODE (temp) == LABEL_REF)
10062 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10063 if (target)
10064 emit_jump (target);
10066 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10067 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10068 /* Note swapping the labels gives us not-equal. */
10069 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10070 else if (GET_MODE (temp) != VOIDmode)
10071 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10072 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10073 GET_MODE (temp), NULL_RTX,
10074 if_false_label, if_true_label);
10075 else
10076 abort ();
10079 if (drop_through_label)
10081 /* If do_jump produces code that might be jumped around,
10082 do any stack adjusts from that code, before the place
10083 where control merges in. */
10084 do_pending_stack_adjust ();
10085 emit_label (drop_through_label);
10089 /* Given a comparison expression EXP for values too wide to be compared
10090 with one insn, test the comparison and jump to the appropriate label.
10091 The code of EXP is ignored; we always test GT if SWAP is 0,
10092 and LT if SWAP is 1. */
10094 static void
10095 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10096 tree exp;
10097 int swap;
10098 rtx if_false_label, if_true_label;
10100 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10101 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10102 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10103 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10105 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10108 /* Compare OP0 with OP1, word at a time, in mode MODE.
10109 UNSIGNEDP says to do unsigned comparison.
10110 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10112 void
10113 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10114 enum machine_mode mode;
10115 int unsignedp;
10116 rtx op0, op1;
10117 rtx if_false_label, if_true_label;
10119 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10120 rtx drop_through_label = 0;
10121 int i;
10123 if (! if_true_label || ! if_false_label)
10124 drop_through_label = gen_label_rtx ();
10125 if (! if_true_label)
10126 if_true_label = drop_through_label;
10127 if (! if_false_label)
10128 if_false_label = drop_through_label;
10130 /* Compare a word at a time, high order first. */
10131 for (i = 0; i < nwords; i++)
10133 rtx op0_word, op1_word;
10135 if (WORDS_BIG_ENDIAN)
10137 op0_word = operand_subword_force (op0, i, mode);
10138 op1_word = operand_subword_force (op1, i, mode);
10140 else
10142 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10143 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10146 /* All but high-order word must be compared as unsigned. */
10147 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10148 (unsignedp || i > 0), word_mode, NULL_RTX,
10149 NULL_RTX, if_true_label);
10151 /* Consider lower words only if these are equal. */
10152 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10153 NULL_RTX, NULL_RTX, if_false_label);
10156 if (if_false_label)
10157 emit_jump (if_false_label);
10158 if (drop_through_label)
10159 emit_label (drop_through_label);
10162 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10163 with one insn, test the comparison and jump to the appropriate label. */
10165 static void
10166 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10167 tree exp;
10168 rtx if_false_label, if_true_label;
10170 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10171 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10172 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10173 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10174 int i;
10175 rtx drop_through_label = 0;
10177 if (! if_false_label)
10178 drop_through_label = if_false_label = gen_label_rtx ();
10180 for (i = 0; i < nwords; i++)
10181 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10182 operand_subword_force (op1, i, mode),
10183 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10184 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10186 if (if_true_label)
10187 emit_jump (if_true_label);
10188 if (drop_through_label)
10189 emit_label (drop_through_label);
10192 /* Jump according to whether OP0 is 0.
10193 We assume that OP0 has an integer mode that is too wide
10194 for the available compare insns. */
10196 void
10197 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10198 rtx op0;
10199 rtx if_false_label, if_true_label;
10201 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10202 rtx part;
10203 int i;
10204 rtx drop_through_label = 0;
10206 /* The fastest way of doing this comparison on almost any machine is to
10207 "or" all the words and compare the result. If all have to be loaded
10208 from memory and this is a very wide item, it's possible this may
10209 be slower, but that's highly unlikely. */
10211 part = gen_reg_rtx (word_mode);
10212 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10213 for (i = 1; i < nwords && part != 0; i++)
10214 part = expand_binop (word_mode, ior_optab, part,
10215 operand_subword_force (op0, i, GET_MODE (op0)),
10216 part, 1, OPTAB_WIDEN);
10218 if (part != 0)
10220 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10221 NULL_RTX, if_false_label, if_true_label);
10223 return;
10226 /* If we couldn't do the "or" simply, do this with a series of compares. */
10227 if (! if_false_label)
10228 drop_through_label = if_false_label = gen_label_rtx ();
10230 for (i = 0; i < nwords; i++)
10231 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10232 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10233 if_false_label, NULL_RTX);
10235 if (if_true_label)
10236 emit_jump (if_true_label);
10238 if (drop_through_label)
10239 emit_label (drop_through_label);
10242 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10243 (including code to compute the values to be compared)
10244 and set (CC0) according to the result.
10245 The decision as to signed or unsigned comparison must be made by the caller.
10247 We force a stack adjustment unless there are currently
10248 things pushed on the stack that aren't yet used.
10250 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10251 compared. */
10254 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10255 rtx op0, op1;
10256 enum rtx_code code;
10257 int unsignedp;
10258 enum machine_mode mode;
10259 rtx size;
10261 enum rtx_code ucode;
10262 rtx tem;
10264 /* If one operand is constant, make it the second one. Only do this
10265 if the other operand is not constant as well. */
10267 if (swap_commutative_operands_p (op0, op1))
10269 tem = op0;
10270 op0 = op1;
10271 op1 = tem;
10272 code = swap_condition (code);
10275 if (flag_force_mem)
10277 op0 = force_not_mem (op0);
10278 op1 = force_not_mem (op1);
10281 do_pending_stack_adjust ();
10283 ucode = unsignedp ? unsigned_condition (code) : code;
10284 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10285 return tem;
10287 #if 0
10288 /* There's no need to do this now that combine.c can eliminate lots of
10289 sign extensions. This can be less efficient in certain cases on other
10290 machines. */
10292 /* If this is a signed equality comparison, we can do it as an
10293 unsigned comparison since zero-extension is cheaper than sign
10294 extension and comparisons with zero are done as unsigned. This is
10295 the case even on machines that can do fast sign extension, since
10296 zero-extension is easier to combine with other operations than
10297 sign-extension is. If we are comparing against a constant, we must
10298 convert it to what it would look like unsigned. */
10299 if ((code == EQ || code == NE) && ! unsignedp
10300 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10302 if (GET_CODE (op1) == CONST_INT
10303 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10304 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10305 unsignedp = 1;
10307 #endif
10309 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10311 #if HAVE_cc0
10312 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10313 #else
10314 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10315 #endif
10318 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10319 The decision as to signed or unsigned comparison must be made by the caller.
10321 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10322 compared. */
10324 void
10325 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10326 if_false_label, if_true_label)
10327 rtx op0, op1;
10328 enum rtx_code code;
10329 int unsignedp;
10330 enum machine_mode mode;
10331 rtx size;
10332 rtx if_false_label, if_true_label;
10334 enum rtx_code ucode;
10335 rtx tem;
10336 int dummy_true_label = 0;
10338 /* Reverse the comparison if that is safe and we want to jump if it is
10339 false. */
10340 if (! if_true_label && ! FLOAT_MODE_P (mode))
10342 if_true_label = if_false_label;
10343 if_false_label = 0;
10344 code = reverse_condition (code);
10347 /* If one operand is constant, make it the second one. Only do this
10348 if the other operand is not constant as well. */
10350 if (swap_commutative_operands_p (op0, op1))
10352 tem = op0;
10353 op0 = op1;
10354 op1 = tem;
10355 code = swap_condition (code);
10358 if (flag_force_mem)
10360 op0 = force_not_mem (op0);
10361 op1 = force_not_mem (op1);
10364 do_pending_stack_adjust ();
10366 ucode = unsignedp ? unsigned_condition (code) : code;
10367 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10369 if (tem == const_true_rtx)
10371 if (if_true_label)
10372 emit_jump (if_true_label);
10374 else
10376 if (if_false_label)
10377 emit_jump (if_false_label);
10379 return;
10382 #if 0
10383 /* There's no need to do this now that combine.c can eliminate lots of
10384 sign extensions. This can be less efficient in certain cases on other
10385 machines. */
10387 /* If this is a signed equality comparison, we can do it as an
10388 unsigned comparison since zero-extension is cheaper than sign
10389 extension and comparisons with zero are done as unsigned. This is
10390 the case even on machines that can do fast sign extension, since
10391 zero-extension is easier to combine with other operations than
10392 sign-extension is. If we are comparing against a constant, we must
10393 convert it to what it would look like unsigned. */
10394 if ((code == EQ || code == NE) && ! unsignedp
10395 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10397 if (GET_CODE (op1) == CONST_INT
10398 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10399 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10400 unsignedp = 1;
10402 #endif
10404 if (! if_true_label)
10406 dummy_true_label = 1;
10407 if_true_label = gen_label_rtx ();
10410 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10411 if_true_label);
10413 if (if_false_label)
10414 emit_jump (if_false_label);
10415 if (dummy_true_label)
10416 emit_label (if_true_label);
10419 /* Generate code for a comparison expression EXP (including code to compute
10420 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10421 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10422 generated code will drop through.
10423 SIGNED_CODE should be the rtx operation for this comparison for
10424 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10426 We force a stack adjustment unless there are currently
10427 things pushed on the stack that aren't yet used. */
10429 static void
10430 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10431 if_true_label)
10432 tree exp;
10433 enum rtx_code signed_code, unsigned_code;
10434 rtx if_false_label, if_true_label;
10436 rtx op0, op1;
10437 tree type;
10438 enum machine_mode mode;
10439 int unsignedp;
10440 enum rtx_code code;
10442 /* Don't crash if the comparison was erroneous. */
10443 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10444 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10445 return;
10447 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10448 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10449 return;
10451 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10452 mode = TYPE_MODE (type);
10453 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10454 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10455 || (GET_MODE_BITSIZE (mode)
10456 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10457 1)))))))
10459 /* op0 might have been replaced by promoted constant, in which
10460 case the type of second argument should be used. */
10461 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10462 mode = TYPE_MODE (type);
10464 unsignedp = TREE_UNSIGNED (type);
10465 code = unsignedp ? unsigned_code : signed_code;
10467 #ifdef HAVE_canonicalize_funcptr_for_compare
10468 /* If function pointers need to be "canonicalized" before they can
10469 be reliably compared, then canonicalize them. */
10470 if (HAVE_canonicalize_funcptr_for_compare
10471 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10472 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10473 == FUNCTION_TYPE))
10475 rtx new_op0 = gen_reg_rtx (mode);
10477 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10478 op0 = new_op0;
10481 if (HAVE_canonicalize_funcptr_for_compare
10482 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10483 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10484 == FUNCTION_TYPE))
10486 rtx new_op1 = gen_reg_rtx (mode);
10488 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10489 op1 = new_op1;
10491 #endif
10493 /* Do any postincrements in the expression that was tested. */
10494 emit_queue ();
10496 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10497 ((mode == BLKmode)
10498 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10499 if_false_label, if_true_label);
10502 /* Generate code to calculate EXP using a store-flag instruction
10503 and return an rtx for the result. EXP is either a comparison
10504 or a TRUTH_NOT_EXPR whose operand is a comparison.
10506 If TARGET is nonzero, store the result there if convenient.
10508 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10509 cheap.
10511 Return zero if there is no suitable set-flag instruction
10512 available on this machine.
10514 Once expand_expr has been called on the arguments of the comparison,
10515 we are committed to doing the store flag, since it is not safe to
10516 re-evaluate the expression. We emit the store-flag insn by calling
10517 emit_store_flag, but only expand the arguments if we have a reason
10518 to believe that emit_store_flag will be successful. If we think that
10519 it will, but it isn't, we have to simulate the store-flag with a
10520 set/jump/set sequence. */
10522 static rtx
10523 do_store_flag (exp, target, mode, only_cheap)
10524 tree exp;
10525 rtx target;
10526 enum machine_mode mode;
10527 int only_cheap;
10529 enum rtx_code code;
10530 tree arg0, arg1, type;
10531 tree tem;
10532 enum machine_mode operand_mode;
10533 int invert = 0;
10534 int unsignedp;
10535 rtx op0, op1;
10536 enum insn_code icode;
10537 rtx subtarget = target;
10538 rtx result, label;
10540 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10541 result at the end. We can't simply invert the test since it would
10542 have already been inverted if it were valid. This case occurs for
10543 some floating-point comparisons. */
10545 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10546 invert = 1, exp = TREE_OPERAND (exp, 0);
10548 arg0 = TREE_OPERAND (exp, 0);
10549 arg1 = TREE_OPERAND (exp, 1);
10551 /* Don't crash if the comparison was erroneous. */
10552 if (arg0 == error_mark_node || arg1 == error_mark_node)
10553 return const0_rtx;
10555 type = TREE_TYPE (arg0);
10556 operand_mode = TYPE_MODE (type);
10557 unsignedp = TREE_UNSIGNED (type);
10559 /* We won't bother with BLKmode store-flag operations because it would mean
10560 passing a lot of information to emit_store_flag. */
10561 if (operand_mode == BLKmode)
10562 return 0;
10564 /* We won't bother with store-flag operations involving function pointers
10565 when function pointers must be canonicalized before comparisons. */
10566 #ifdef HAVE_canonicalize_funcptr_for_compare
10567 if (HAVE_canonicalize_funcptr_for_compare
10568 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10569 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10570 == FUNCTION_TYPE))
10571 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10572 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10573 == FUNCTION_TYPE))))
10574 return 0;
10575 #endif
10577 STRIP_NOPS (arg0);
10578 STRIP_NOPS (arg1);
10580 /* Get the rtx comparison code to use. We know that EXP is a comparison
10581 operation of some type. Some comparisons against 1 and -1 can be
10582 converted to comparisons with zero. Do so here so that the tests
10583 below will be aware that we have a comparison with zero. These
10584 tests will not catch constants in the first operand, but constants
10585 are rarely passed as the first operand. */
10587 switch (TREE_CODE (exp))
10589 case EQ_EXPR:
10590 code = EQ;
10591 break;
10592 case NE_EXPR:
10593 code = NE;
10594 break;
10595 case LT_EXPR:
10596 if (integer_onep (arg1))
10597 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10598 else
10599 code = unsignedp ? LTU : LT;
10600 break;
10601 case LE_EXPR:
10602 if (! unsignedp && integer_all_onesp (arg1))
10603 arg1 = integer_zero_node, code = LT;
10604 else
10605 code = unsignedp ? LEU : LE;
10606 break;
10607 case GT_EXPR:
10608 if (! unsignedp && integer_all_onesp (arg1))
10609 arg1 = integer_zero_node, code = GE;
10610 else
10611 code = unsignedp ? GTU : GT;
10612 break;
10613 case GE_EXPR:
10614 if (integer_onep (arg1))
10615 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10616 else
10617 code = unsignedp ? GEU : GE;
10618 break;
10620 case UNORDERED_EXPR:
10621 code = UNORDERED;
10622 break;
10623 case ORDERED_EXPR:
10624 code = ORDERED;
10625 break;
10626 case UNLT_EXPR:
10627 code = UNLT;
10628 break;
10629 case UNLE_EXPR:
10630 code = UNLE;
10631 break;
10632 case UNGT_EXPR:
10633 code = UNGT;
10634 break;
10635 case UNGE_EXPR:
10636 code = UNGE;
10637 break;
10638 case UNEQ_EXPR:
10639 code = UNEQ;
10640 break;
10642 default:
10643 abort ();
10646 /* Put a constant second. */
10647 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10649 tem = arg0; arg0 = arg1; arg1 = tem;
10650 code = swap_condition (code);
10653 /* If this is an equality or inequality test of a single bit, we can
10654 do this by shifting the bit being tested to the low-order bit and
10655 masking the result with the constant 1. If the condition was EQ,
10656 we xor it with 1. This does not require an scc insn and is faster
10657 than an scc insn even if we have it. */
10659 if ((code == NE || code == EQ)
10660 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10661 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10663 tree inner = TREE_OPERAND (arg0, 0);
10664 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10665 int ops_unsignedp;
10667 /* If INNER is a right shift of a constant and it plus BITNUM does
10668 not overflow, adjust BITNUM and INNER. */
10670 if (TREE_CODE (inner) == RSHIFT_EXPR
10671 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10672 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10673 && bitnum < TYPE_PRECISION (type)
10674 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10675 bitnum - TYPE_PRECISION (type)))
10677 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10678 inner = TREE_OPERAND (inner, 0);
10681 /* If we are going to be able to omit the AND below, we must do our
10682 operations as unsigned. If we must use the AND, we have a choice.
10683 Normally unsigned is faster, but for some machines signed is. */
10684 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10685 #ifdef LOAD_EXTEND_OP
10686 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10687 #else
10689 #endif
10692 if (! get_subtarget (subtarget)
10693 || GET_MODE (subtarget) != operand_mode
10694 || ! safe_from_p (subtarget, inner, 1))
10695 subtarget = 0;
10697 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10699 if (bitnum != 0)
10700 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10701 size_int (bitnum), subtarget, ops_unsignedp);
10703 if (GET_MODE (op0) != mode)
10704 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10706 if ((code == EQ && ! invert) || (code == NE && invert))
10707 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10708 ops_unsignedp, OPTAB_LIB_WIDEN);
10710 /* Put the AND last so it can combine with more things. */
10711 if (bitnum != TYPE_PRECISION (type) - 1)
10712 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10714 return op0;
10717 /* Now see if we are likely to be able to do this. Return if not. */
10718 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10719 return 0;
10721 icode = setcc_gen_code[(int) code];
10722 if (icode == CODE_FOR_nothing
10723 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10725 /* We can only do this if it is one of the special cases that
10726 can be handled without an scc insn. */
10727 if ((code == LT && integer_zerop (arg1))
10728 || (! only_cheap && code == GE && integer_zerop (arg1)))
10730 else if (BRANCH_COST >= 0
10731 && ! only_cheap && (code == NE || code == EQ)
10732 && TREE_CODE (type) != REAL_TYPE
10733 && ((abs_optab->handlers[(int) operand_mode].insn_code
10734 != CODE_FOR_nothing)
10735 || (ffs_optab->handlers[(int) operand_mode].insn_code
10736 != CODE_FOR_nothing)))
10738 else
10739 return 0;
10742 if (! get_subtarget (target)
10743 || GET_MODE (subtarget) != operand_mode
10744 || ! safe_from_p (subtarget, arg1, 1))
10745 subtarget = 0;
10747 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10748 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10750 if (target == 0)
10751 target = gen_reg_rtx (mode);
10753 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10754 because, if the emit_store_flag does anything it will succeed and
10755 OP0 and OP1 will not be used subsequently. */
10757 result = emit_store_flag (target, code,
10758 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10759 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10760 operand_mode, unsignedp, 1);
10762 if (result)
10764 if (invert)
10765 result = expand_binop (mode, xor_optab, result, const1_rtx,
10766 result, 0, OPTAB_LIB_WIDEN);
10767 return result;
10770 /* If this failed, we have to do this with set/compare/jump/set code. */
10771 if (GET_CODE (target) != REG
10772 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10773 target = gen_reg_rtx (GET_MODE (target));
10775 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10776 result = compare_from_rtx (op0, op1, code, unsignedp,
10777 operand_mode, NULL_RTX);
10778 if (GET_CODE (result) == CONST_INT)
10779 return (((result == const0_rtx && ! invert)
10780 || (result != const0_rtx && invert))
10781 ? const0_rtx : const1_rtx);
10783 /* The code of RESULT may not match CODE if compare_from_rtx
10784 decided to swap its operands and reverse the original code.
10786 We know that compare_from_rtx returns either a CONST_INT or
10787 a new comparison code, so it is safe to just extract the
10788 code from RESULT. */
10789 code = GET_CODE (result);
10791 label = gen_label_rtx ();
10792 if (bcc_gen_fctn[(int) code] == 0)
10793 abort ();
10795 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10796 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10797 emit_label (label);
10799 return target;
10803 /* Stubs in case we haven't got a casesi insn. */
10804 #ifndef HAVE_casesi
10805 # define HAVE_casesi 0
10806 # define gen_casesi(a, b, c, d, e) (0)
10807 # define CODE_FOR_casesi CODE_FOR_nothing
10808 #endif
10810 /* If the machine does not have a case insn that compares the bounds,
10811 this means extra overhead for dispatch tables, which raises the
10812 threshold for using them. */
10813 #ifndef CASE_VALUES_THRESHOLD
10814 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10815 #endif /* CASE_VALUES_THRESHOLD */
10817 unsigned int
10818 case_values_threshold ()
10820 return CASE_VALUES_THRESHOLD;
10823 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10824 0 otherwise (i.e. if there is no casesi instruction). */
10826 try_casesi (index_type, index_expr, minval, range,
10827 table_label, default_label)
10828 tree index_type, index_expr, minval, range;
10829 rtx table_label ATTRIBUTE_UNUSED;
10830 rtx default_label;
10832 enum machine_mode index_mode = SImode;
10833 int index_bits = GET_MODE_BITSIZE (index_mode);
10834 rtx op1, op2, index;
10835 enum machine_mode op_mode;
10837 if (! HAVE_casesi)
10838 return 0;
10840 /* Convert the index to SImode. */
10841 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10843 enum machine_mode omode = TYPE_MODE (index_type);
10844 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10846 /* We must handle the endpoints in the original mode. */
10847 index_expr = build (MINUS_EXPR, index_type,
10848 index_expr, minval);
10849 minval = integer_zero_node;
10850 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10851 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10852 omode, 1, default_label);
10853 /* Now we can safely truncate. */
10854 index = convert_to_mode (index_mode, index, 0);
10856 else
10858 if (TYPE_MODE (index_type) != index_mode)
10860 index_expr = convert ((*lang_hooks.types.type_for_size)
10861 (index_bits, 0), index_expr);
10862 index_type = TREE_TYPE (index_expr);
10865 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10867 emit_queue ();
10868 index = protect_from_queue (index, 0);
10869 do_pending_stack_adjust ();
10871 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10872 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10873 (index, op_mode))
10874 index = copy_to_mode_reg (op_mode, index);
10876 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10878 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10879 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10880 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10881 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10882 (op1, op_mode))
10883 op1 = copy_to_mode_reg (op_mode, op1);
10885 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10887 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10888 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10889 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10890 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10891 (op2, op_mode))
10892 op2 = copy_to_mode_reg (op_mode, op2);
10894 emit_jump_insn (gen_casesi (index, op1, op2,
10895 table_label, default_label));
10896 return 1;
10899 /* Attempt to generate a tablejump instruction; same concept. */
10900 #ifndef HAVE_tablejump
10901 #define HAVE_tablejump 0
10902 #define gen_tablejump(x, y) (0)
10903 #endif
10905 /* Subroutine of the next function.
10907 INDEX is the value being switched on, with the lowest value
10908 in the table already subtracted.
10909 MODE is its expected mode (needed if INDEX is constant).
10910 RANGE is the length of the jump table.
10911 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10913 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10914 index value is out of range. */
10916 static void
10917 do_tablejump (index, mode, range, table_label, default_label)
10918 rtx index, range, table_label, default_label;
10919 enum machine_mode mode;
10921 rtx temp, vector;
10923 /* Do an unsigned comparison (in the proper mode) between the index
10924 expression and the value which represents the length of the range.
10925 Since we just finished subtracting the lower bound of the range
10926 from the index expression, this comparison allows us to simultaneously
10927 check that the original index expression value is both greater than
10928 or equal to the minimum value of the range and less than or equal to
10929 the maximum value of the range. */
10931 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10932 default_label);
10934 /* If index is in range, it must fit in Pmode.
10935 Convert to Pmode so we can index with it. */
10936 if (mode != Pmode)
10937 index = convert_to_mode (Pmode, index, 1);
10939 /* Don't let a MEM slip thru, because then INDEX that comes
10940 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10941 and break_out_memory_refs will go to work on it and mess it up. */
10942 #ifdef PIC_CASE_VECTOR_ADDRESS
10943 if (flag_pic && GET_CODE (index) != REG)
10944 index = copy_to_mode_reg (Pmode, index);
10945 #endif
10947 /* If flag_force_addr were to affect this address
10948 it could interfere with the tricky assumptions made
10949 about addresses that contain label-refs,
10950 which may be valid only very near the tablejump itself. */
10951 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10952 GET_MODE_SIZE, because this indicates how large insns are. The other
10953 uses should all be Pmode, because they are addresses. This code
10954 could fail if addresses and insns are not the same size. */
10955 index = gen_rtx_PLUS (Pmode,
10956 gen_rtx_MULT (Pmode, index,
10957 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10958 gen_rtx_LABEL_REF (Pmode, table_label));
10959 #ifdef PIC_CASE_VECTOR_ADDRESS
10960 if (flag_pic)
10961 index = PIC_CASE_VECTOR_ADDRESS (index);
10962 else
10963 #endif
10964 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10965 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10966 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10967 RTX_UNCHANGING_P (vector) = 1;
10968 convert_move (temp, vector, 0);
10970 emit_jump_insn (gen_tablejump (temp, table_label));
10972 /* If we are generating PIC code or if the table is PC-relative, the
10973 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10974 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10975 emit_barrier ();
10979 try_tablejump (index_type, index_expr, minval, range,
10980 table_label, default_label)
10981 tree index_type, index_expr, minval, range;
10982 rtx table_label, default_label;
10984 rtx index;
10986 if (! HAVE_tablejump)
10987 return 0;
10989 index_expr = fold (build (MINUS_EXPR, index_type,
10990 convert (index_type, index_expr),
10991 convert (index_type, minval)));
10992 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10993 emit_queue ();
10994 index = protect_from_queue (index, 0);
10995 do_pending_stack_adjust ();
10997 do_tablejump (index, TYPE_MODE (index_type),
10998 convert_modes (TYPE_MODE (index_type),
10999 TYPE_MODE (TREE_TYPE (range)),
11000 expand_expr (range, NULL_RTX,
11001 VOIDmode, 0),
11002 TREE_UNSIGNED (TREE_TYPE (range))),
11003 table_label, default_label);
11004 return 1;
11007 /* Nonzero if the mode is a valid vector mode for this architecture.
11008 This returns nonzero even if there is no hardware support for the
11009 vector mode, but we can emulate with narrower modes. */
11012 vector_mode_valid_p (mode)
11013 enum machine_mode mode;
11015 enum mode_class class = GET_MODE_CLASS (mode);
11016 enum machine_mode innermode;
11018 /* Doh! What's going on? */
11019 if (class != MODE_VECTOR_INT
11020 && class != MODE_VECTOR_FLOAT)
11021 return 0;
11023 /* Hardware support. Woo hoo! */
11024 if (VECTOR_MODE_SUPPORTED_P (mode))
11025 return 1;
11027 innermode = GET_MODE_INNER (mode);
11029 /* We should probably return 1 if requesting V4DI and we have no DI,
11030 but we have V2DI, but this is probably very unlikely. */
11032 /* If we have support for the inner mode, we can safely emulate it.
11033 We may not have V2DI, but me can emulate with a pair of DIs. */
11034 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11037 #include "gt-expr.h"