Fix for PR1654 - implement "movstrsi" pattern to copy simple blocks of memory.
[official-gcc.git] / gcc / expr.c
blob028f9b21cfda28e73651d81caa8a95e58c745607
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
52 #ifdef PUSH_ROUNDING
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
58 #endif
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
84 /* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87 int do_preexpand_calls = 1;
89 /* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91 int pending_stack_adjust;
93 /* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97 int inhibit_defer_pop;
99 /* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
101 returned. */
102 static rtx saveregs_value;
104 /* Similarly for __builtin_apply_args. */
105 static rtx apply_args_value;
107 /* Nonzero if the machine description has been fixed to accept
108 CONSTANT_P_RTX patterns. We will emit a warning and continue
109 if we find we must actually use such a beast. */
110 static int can_handle_constant_p;
112 /* Don't check memory usage, since code is being emitted to check a memory
113 usage. Used when flag_check_memory_usage is true, to avoid infinite
114 recursion. */
115 static int in_check_memory_usage;
117 /* This structure is used by move_by_pieces to describe the move to
118 be performed. */
119 struct move_by_pieces
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 int to_struct;
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
130 int from_struct;
131 int len;
132 int offset;
133 int reverse;
136 /* This structure is used by clear_by_pieces to describe the clear to
137 be performed. */
139 struct clear_by_pieces
141 rtx to;
142 rtx to_addr;
143 int autinc_to;
144 int explicit_inc_to;
145 int to_struct;
146 int len;
147 int offset;
148 int reverse;
151 extern struct obstack permanent_obstack;
152 extern rtx arg_pointer_save_area;
154 static rtx get_push_address PROTO ((int));
156 static rtx enqueue_insn PROTO((rtx, rtx));
157 static int queued_subexp_p PROTO((rtx));
158 static void init_queue PROTO((void));
159 static int move_by_pieces_ninsns PROTO((unsigned int, int));
160 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
161 struct move_by_pieces *));
162 static void clear_by_pieces PROTO((rtx, int, int));
163 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
164 struct clear_by_pieces *));
165 static int is_zeros_p PROTO((tree));
166 static int mostly_zeros_p PROTO((tree));
167 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
168 tree, tree, int));
169 static void store_constructor PROTO((tree, rtx, int));
170 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
171 enum machine_mode, int, int, int));
172 static enum memory_use_mode
173 get_memory_usage_from_modifier PROTO((enum expand_modifier));
174 static tree save_noncopied_parts PROTO((tree, tree));
175 static tree init_noncopied_parts PROTO((tree, tree));
176 static int safe_from_p PROTO((rtx, tree, int));
177 static int fixed_type_p PROTO((tree));
178 static rtx var_rtx PROTO((tree));
179 static int get_pointer_alignment PROTO((tree, unsigned));
180 static tree string_constant PROTO((tree, tree *));
181 static tree c_strlen PROTO((tree));
182 static rtx get_memory_rtx PROTO((tree));
183 static rtx expand_builtin PROTO((tree, rtx, rtx,
184 enum machine_mode, int));
185 static int apply_args_size PROTO((void));
186 static int apply_result_size PROTO((void));
187 static rtx result_vector PROTO((int, rtx));
188 static rtx expand_builtin_apply_args PROTO((void));
189 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
190 static void expand_builtin_return PROTO((rtx));
191 static rtx expand_increment PROTO((tree, int, int));
192 static void preexpand_calls PROTO((tree));
193 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
194 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
195 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
196 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
197 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
199 /* Record for each mode whether we can move a register directly to or
200 from an object of that mode in memory. If we can't, we won't try
201 to use that mode directly when accessing a field of that mode. */
203 static char direct_load[NUM_MACHINE_MODES];
204 static char direct_store[NUM_MACHINE_MODES];
206 /* If a memory-to-memory move would take MOVE_RATIO or more simple
207 move-instruction sequences, we will do a movstr or libcall instead. */
209 #ifndef MOVE_RATIO
210 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
211 #define MOVE_RATIO 2
212 #else
213 /* If we are optimizing for space (-Os), cut down the default move ratio */
214 #define MOVE_RATIO (optimize_size ? 3 : 15)
215 #endif
216 #endif
218 /* This array records the insn_code of insns to perform block moves. */
219 enum insn_code movstr_optab[NUM_MACHINE_MODES];
221 /* This array records the insn_code of insns to perform block clears. */
222 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
224 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
226 #ifndef SLOW_UNALIGNED_ACCESS
227 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
228 #endif
230 /* Register mappings for target machines without register windows. */
231 #ifndef INCOMING_REGNO
232 #define INCOMING_REGNO(OUT) (OUT)
233 #endif
234 #ifndef OUTGOING_REGNO
235 #define OUTGOING_REGNO(IN) (IN)
236 #endif
238 /* This is run once per compilation to set up which modes can be used
239 directly in memory and to initialize the block move optab. */
241 void
242 init_expr_once ()
244 rtx insn, pat;
245 enum machine_mode mode;
246 int num_clobbers;
247 rtx mem, mem1;
248 char *free_point;
250 start_sequence ();
252 /* Since we are on the permanent obstack, we must be sure we save this
253 spot AFTER we call start_sequence, since it will reuse the rtl it
254 makes. */
255 free_point = (char *) oballoc (0);
257 /* Try indexing by frame ptr and try by stack ptr.
258 It is known that on the Convex the stack ptr isn't a valid index.
259 With luck, one or the other is valid on any machine. */
260 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
261 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
263 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
264 pat = PATTERN (insn);
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
269 int regno;
270 rtx reg;
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
274 PUT_MODE (mem1, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
287 reg = gen_rtx_REG (mode, regno);
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 /* Find out if CONSTANT_P_RTX is accepted. */
312 SET_DEST (pat) = gen_rtx_REG (TYPE_MODE (integer_type_node),
313 FIRST_PSEUDO_REGISTER);
314 SET_SRC (pat) = gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
315 SET_DEST (pat));
316 if (recog (pat, insn, &num_clobbers) >= 0)
317 can_handle_constant_p = 1;
319 end_sequence ();
320 obfree (free_point);
323 /* This is run at the start of compiling a function. */
325 void
326 init_expr ()
328 init_queue ();
330 pending_stack_adjust = 0;
331 inhibit_defer_pop = 0;
332 saveregs_value = 0;
333 apply_args_value = 0;
334 forced_labels = 0;
337 /* Save all variables describing the current status into the structure *P.
338 This is used before starting a nested function. */
340 void
341 save_expr_status (p)
342 struct function *p;
344 /* Instead of saving the postincrement queue, empty it. */
345 emit_queue ();
347 p->pending_stack_adjust = pending_stack_adjust;
348 p->inhibit_defer_pop = inhibit_defer_pop;
349 p->saveregs_value = saveregs_value;
350 p->apply_args_value = apply_args_value;
351 p->forced_labels = forced_labels;
353 pending_stack_adjust = 0;
354 inhibit_defer_pop = 0;
355 saveregs_value = 0;
356 apply_args_value = 0;
357 forced_labels = 0;
360 /* Restore all variables describing the current status from the structure *P.
361 This is used after a nested function. */
363 void
364 restore_expr_status (p)
365 struct function *p;
367 pending_stack_adjust = p->pending_stack_adjust;
368 inhibit_defer_pop = p->inhibit_defer_pop;
369 saveregs_value = p->saveregs_value;
370 apply_args_value = p->apply_args_value;
371 forced_labels = p->forced_labels;
374 /* Manage the queue of increment instructions to be output
375 for POSTINCREMENT_EXPR expressions, etc. */
377 static rtx pending_chain;
379 /* Queue up to increment (or change) VAR later. BODY says how:
380 BODY should be the same thing you would pass to emit_insn
381 to increment right away. It will go to emit_insn later on.
383 The value is a QUEUED expression to be used in place of VAR
384 where you want to guarantee the pre-incrementation value of VAR. */
386 static rtx
387 enqueue_insn (var, body)
388 rtx var, body;
390 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
391 var, NULL_RTX, NULL_RTX, body,
392 pending_chain);
393 return pending_chain;
396 /* Use protect_from_queue to convert a QUEUED expression
397 into something that you can put immediately into an instruction.
398 If the queued incrementation has not happened yet,
399 protect_from_queue returns the variable itself.
400 If the incrementation has happened, protect_from_queue returns a temp
401 that contains a copy of the old value of the variable.
403 Any time an rtx which might possibly be a QUEUED is to be put
404 into an instruction, it must be passed through protect_from_queue first.
405 QUEUED expressions are not meaningful in instructions.
407 Do not pass a value through protect_from_queue and then hold
408 on to it for a while before putting it in an instruction!
409 If the queue is flushed in between, incorrect code will result. */
412 protect_from_queue (x, modify)
413 register rtx x;
414 int modify;
416 register RTX_CODE code = GET_CODE (x);
418 #if 0 /* A QUEUED can hang around after the queue is forced out. */
419 /* Shortcut for most common case. */
420 if (pending_chain == 0)
421 return x;
422 #endif
424 if (code != QUEUED)
426 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
427 use of autoincrement. Make a copy of the contents of the memory
428 location rather than a copy of the address, but not if the value is
429 of mode BLKmode. Don't modify X in place since it might be
430 shared. */
431 if (code == MEM && GET_MODE (x) != BLKmode
432 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
434 register rtx y = XEXP (x, 0);
435 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
437 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
438 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
439 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
440 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
442 if (QUEUED_INSN (y))
444 register rtx temp = gen_reg_rtx (GET_MODE (new));
445 emit_insn_before (gen_move_insn (temp, new),
446 QUEUED_INSN (y));
447 return temp;
449 return new;
451 /* Otherwise, recursively protect the subexpressions of all
452 the kinds of rtx's that can contain a QUEUED. */
453 if (code == MEM)
455 rtx tem = protect_from_queue (XEXP (x, 0), 0);
456 if (tem != XEXP (x, 0))
458 x = copy_rtx (x);
459 XEXP (x, 0) = tem;
462 else if (code == PLUS || code == MULT)
464 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
465 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
466 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
468 x = copy_rtx (x);
469 XEXP (x, 0) = new0;
470 XEXP (x, 1) = new1;
473 return x;
475 /* If the increment has not happened, use the variable itself. */
476 if (QUEUED_INSN (x) == 0)
477 return QUEUED_VAR (x);
478 /* If the increment has happened and a pre-increment copy exists,
479 use that copy. */
480 if (QUEUED_COPY (x) != 0)
481 return QUEUED_COPY (x);
482 /* The increment has happened but we haven't set up a pre-increment copy.
483 Set one up now, and use it. */
484 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
485 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
486 QUEUED_INSN (x));
487 return QUEUED_COPY (x);
490 /* Return nonzero if X contains a QUEUED expression:
491 if it contains anything that will be altered by a queued increment.
492 We handle only combinations of MEM, PLUS, MINUS and MULT operators
493 since memory addresses generally contain only those. */
495 static int
496 queued_subexp_p (x)
497 rtx x;
499 register enum rtx_code code = GET_CODE (x);
500 switch (code)
502 case QUEUED:
503 return 1;
504 case MEM:
505 return queued_subexp_p (XEXP (x, 0));
506 case MULT:
507 case PLUS:
508 case MINUS:
509 return (queued_subexp_p (XEXP (x, 0))
510 || queued_subexp_p (XEXP (x, 1)));
511 default:
512 return 0;
516 /* Perform all the pending incrementations. */
518 void
519 emit_queue ()
521 register rtx p;
522 while ((p = pending_chain))
524 rtx body = QUEUED_BODY (p);
526 if (GET_CODE (body) == SEQUENCE)
528 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
529 emit_insn (QUEUED_BODY (p));
531 else
532 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
533 pending_chain = QUEUED_NEXT (p);
537 static void
538 init_queue ()
540 if (pending_chain)
541 abort ();
544 /* Copy data from FROM to TO, where the machine modes are not the same.
545 Both modes may be integer, or both may be floating.
546 UNSIGNEDP should be nonzero if FROM is an unsigned type.
547 This causes zero-extension instead of sign-extension. */
549 void
550 convert_move (to, from, unsignedp)
551 register rtx to, from;
552 int unsignedp;
554 enum machine_mode to_mode = GET_MODE (to);
555 enum machine_mode from_mode = GET_MODE (from);
556 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
557 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
558 enum insn_code code;
559 rtx libcall;
561 /* rtx code for making an equivalent value. */
562 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
564 to = protect_from_queue (to, 1);
565 from = protect_from_queue (from, 0);
567 if (to_real != from_real)
568 abort ();
570 /* If FROM is a SUBREG that indicates that we have already done at least
571 the required extension, strip it. We don't handle such SUBREGs as
572 TO here. */
574 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
575 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
576 >= GET_MODE_SIZE (to_mode))
577 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
578 from = gen_lowpart (to_mode, from), from_mode = to_mode;
580 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
581 abort ();
583 if (to_mode == from_mode
584 || (from_mode == VOIDmode && CONSTANT_P (from)))
586 emit_move_insn (to, from);
587 return;
590 if (to_real)
592 rtx value;
594 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
596 /* Try converting directly if the insn is supported. */
597 if ((code = can_extend_p (to_mode, from_mode, 0))
598 != CODE_FOR_nothing)
600 emit_unop_insn (code, to, from, UNKNOWN);
601 return;
605 #ifdef HAVE_trunchfqf2
606 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
609 return;
611 #endif
612 #ifdef HAVE_trunctqfqf2
613 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
615 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
616 return;
618 #endif
619 #ifdef HAVE_truncsfqf2
620 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
622 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
623 return;
625 #endif
626 #ifdef HAVE_truncdfqf2
627 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
630 return;
632 #endif
633 #ifdef HAVE_truncxfqf2
634 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
636 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
637 return;
639 #endif
640 #ifdef HAVE_trunctfqf2
641 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
643 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
644 return;
646 #endif
648 #ifdef HAVE_trunctqfhf2
649 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
651 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncsfhf2
656 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
658 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_truncdfhf2
663 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
665 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
666 return;
668 #endif
669 #ifdef HAVE_truncxfhf2
670 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
672 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
673 return;
675 #endif
676 #ifdef HAVE_trunctfhf2
677 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
679 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
680 return;
682 #endif
684 #ifdef HAVE_truncsftqf2
685 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
687 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_truncdftqf2
692 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
694 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_truncxftqf2
699 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
701 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
702 return;
704 #endif
705 #ifdef HAVE_trunctftqf2
706 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
708 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
709 return;
711 #endif
713 #ifdef HAVE_truncdfsf2
714 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
716 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
717 return;
719 #endif
720 #ifdef HAVE_truncxfsf2
721 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
723 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
724 return;
726 #endif
727 #ifdef HAVE_trunctfsf2
728 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
730 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
731 return;
733 #endif
734 #ifdef HAVE_truncxfdf2
735 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
737 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
738 return;
740 #endif
741 #ifdef HAVE_trunctfdf2
742 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
744 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
745 return;
747 #endif
749 libcall = (rtx) 0;
750 switch (from_mode)
752 case SFmode:
753 switch (to_mode)
755 case DFmode:
756 libcall = extendsfdf2_libfunc;
757 break;
759 case XFmode:
760 libcall = extendsfxf2_libfunc;
761 break;
763 case TFmode:
764 libcall = extendsftf2_libfunc;
765 break;
767 default:
768 break;
770 break;
772 case DFmode:
773 switch (to_mode)
775 case SFmode:
776 libcall = truncdfsf2_libfunc;
777 break;
779 case XFmode:
780 libcall = extenddfxf2_libfunc;
781 break;
783 case TFmode:
784 libcall = extenddftf2_libfunc;
785 break;
787 default:
788 break;
790 break;
792 case XFmode:
793 switch (to_mode)
795 case SFmode:
796 libcall = truncxfsf2_libfunc;
797 break;
799 case DFmode:
800 libcall = truncxfdf2_libfunc;
801 break;
803 default:
804 break;
806 break;
808 case TFmode:
809 switch (to_mode)
811 case SFmode:
812 libcall = trunctfsf2_libfunc;
813 break;
815 case DFmode:
816 libcall = trunctfdf2_libfunc;
817 break;
819 default:
820 break;
822 break;
824 default:
825 break;
828 if (libcall == (rtx) 0)
829 /* This conversion is not implemented yet. */
830 abort ();
832 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
833 1, from, from_mode);
834 emit_move_insn (to, value);
835 return;
838 /* Now both modes are integers. */
840 /* Handle expanding beyond a word. */
841 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
842 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
844 rtx insns;
845 rtx lowpart;
846 rtx fill_value;
847 rtx lowfrom;
848 int i;
849 enum machine_mode lowpart_mode;
850 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
852 /* Try converting directly if the insn is supported. */
853 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
854 != CODE_FOR_nothing)
856 /* If FROM is a SUBREG, put it into a register. Do this
857 so that we always generate the same set of insns for
858 better cse'ing; if an intermediate assignment occurred,
859 we won't be doing the operation directly on the SUBREG. */
860 if (optimize > 0 && GET_CODE (from) == SUBREG)
861 from = force_reg (from_mode, from);
862 emit_unop_insn (code, to, from, equiv_code);
863 return;
865 /* Next, try converting via full word. */
866 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
867 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
868 != CODE_FOR_nothing))
870 if (GET_CODE (to) == REG)
871 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
872 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
873 emit_unop_insn (code, to,
874 gen_lowpart (word_mode, to), equiv_code);
875 return;
878 /* No special multiword conversion insn; do it by hand. */
879 start_sequence ();
881 /* Since we will turn this into a no conflict block, we must ensure
882 that the source does not overlap the target. */
884 if (reg_overlap_mentioned_p (to, from))
885 from = force_reg (from_mode, from);
887 /* Get a copy of FROM widened to a word, if necessary. */
888 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
889 lowpart_mode = word_mode;
890 else
891 lowpart_mode = from_mode;
893 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
895 lowpart = gen_lowpart (lowpart_mode, to);
896 emit_move_insn (lowpart, lowfrom);
898 /* Compute the value to put in each remaining word. */
899 if (unsignedp)
900 fill_value = const0_rtx;
901 else
903 #ifdef HAVE_slt
904 if (HAVE_slt
905 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
906 && STORE_FLAG_VALUE == -1)
908 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
909 lowpart_mode, 0, 0);
910 fill_value = gen_reg_rtx (word_mode);
911 emit_insn (gen_slt (fill_value));
913 else
914 #endif
916 fill_value
917 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
918 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
919 NULL_RTX, 0);
920 fill_value = convert_to_mode (word_mode, fill_value, 1);
924 /* Fill the remaining words. */
925 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
927 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
928 rtx subword = operand_subword (to, index, 1, to_mode);
930 if (subword == 0)
931 abort ();
933 if (fill_value != subword)
934 emit_move_insn (subword, fill_value);
937 insns = get_insns ();
938 end_sequence ();
940 emit_no_conflict_block (insns, to, from, NULL_RTX,
941 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
942 return;
945 /* Truncating multi-word to a word or less. */
946 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
947 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
949 if (!((GET_CODE (from) == MEM
950 && ! MEM_VOLATILE_P (from)
951 && direct_load[(int) to_mode]
952 && ! mode_dependent_address_p (XEXP (from, 0)))
953 || GET_CODE (from) == REG
954 || GET_CODE (from) == SUBREG))
955 from = force_reg (from_mode, from);
956 convert_move (to, gen_lowpart (word_mode, from), 0);
957 return;
960 /* Handle pointer conversion */ /* SPEE 900220 */
961 if (to_mode == PQImode)
963 if (from_mode != QImode)
964 from = convert_to_mode (QImode, from, unsignedp);
966 #ifdef HAVE_truncqipqi2
967 if (HAVE_truncqipqi2)
969 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
970 return;
972 #endif /* HAVE_truncqipqi2 */
973 abort ();
976 if (from_mode == PQImode)
978 if (to_mode != QImode)
980 from = convert_to_mode (QImode, from, unsignedp);
981 from_mode = QImode;
983 else
985 #ifdef HAVE_extendpqiqi2
986 if (HAVE_extendpqiqi2)
988 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
989 return;
991 #endif /* HAVE_extendpqiqi2 */
992 abort ();
996 if (to_mode == PSImode)
998 if (from_mode != SImode)
999 from = convert_to_mode (SImode, from, unsignedp);
1001 #ifdef HAVE_truncsipsi2
1002 if (HAVE_truncsipsi2)
1004 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1005 return;
1007 #endif /* HAVE_truncsipsi2 */
1008 abort ();
1011 if (from_mode == PSImode)
1013 if (to_mode != SImode)
1015 from = convert_to_mode (SImode, from, unsignedp);
1016 from_mode = SImode;
1018 else
1020 #ifdef HAVE_extendpsisi2
1021 if (HAVE_extendpsisi2)
1023 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1024 return;
1026 #endif /* HAVE_extendpsisi2 */
1027 abort ();
1031 if (to_mode == PDImode)
1033 if (from_mode != DImode)
1034 from = convert_to_mode (DImode, from, unsignedp);
1036 #ifdef HAVE_truncdipdi2
1037 if (HAVE_truncdipdi2)
1039 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1040 return;
1042 #endif /* HAVE_truncdipdi2 */
1043 abort ();
1046 if (from_mode == PDImode)
1048 if (to_mode != DImode)
1050 from = convert_to_mode (DImode, from, unsignedp);
1051 from_mode = DImode;
1053 else
1055 #ifdef HAVE_extendpdidi2
1056 if (HAVE_extendpdidi2)
1058 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1059 return;
1061 #endif /* HAVE_extendpdidi2 */
1062 abort ();
1066 /* Now follow all the conversions between integers
1067 no more than a word long. */
1069 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1070 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1071 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1072 GET_MODE_BITSIZE (from_mode)))
1074 if (!((GET_CODE (from) == MEM
1075 && ! MEM_VOLATILE_P (from)
1076 && direct_load[(int) to_mode]
1077 && ! mode_dependent_address_p (XEXP (from, 0)))
1078 || GET_CODE (from) == REG
1079 || GET_CODE (from) == SUBREG))
1080 from = force_reg (from_mode, from);
1081 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1082 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1083 from = copy_to_reg (from);
1084 emit_move_insn (to, gen_lowpart (to_mode, from));
1085 return;
1088 /* Handle extension. */
1089 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1091 /* Convert directly if that works. */
1092 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1093 != CODE_FOR_nothing)
1095 emit_unop_insn (code, to, from, equiv_code);
1096 return;
1098 else
1100 enum machine_mode intermediate;
1102 /* Search for a mode to convert via. */
1103 for (intermediate = from_mode; intermediate != VOIDmode;
1104 intermediate = GET_MODE_WIDER_MODE (intermediate))
1105 if (((can_extend_p (to_mode, intermediate, unsignedp)
1106 != CODE_FOR_nothing)
1107 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1108 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1109 && (can_extend_p (intermediate, from_mode, unsignedp)
1110 != CODE_FOR_nothing))
1112 convert_move (to, convert_to_mode (intermediate, from,
1113 unsignedp), unsignedp);
1114 return;
1117 /* No suitable intermediate mode. */
1118 abort ();
1122 /* Support special truncate insns for certain modes. */
1124 if (from_mode == DImode && to_mode == SImode)
1126 #ifdef HAVE_truncdisi2
1127 if (HAVE_truncdisi2)
1129 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1130 return;
1132 #endif
1133 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 return;
1137 if (from_mode == DImode && to_mode == HImode)
1139 #ifdef HAVE_truncdihi2
1140 if (HAVE_truncdihi2)
1142 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1143 return;
1145 #endif
1146 convert_move (to, force_reg (from_mode, from), unsignedp);
1147 return;
1150 if (from_mode == DImode && to_mode == QImode)
1152 #ifdef HAVE_truncdiqi2
1153 if (HAVE_truncdiqi2)
1155 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1156 return;
1158 #endif
1159 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 return;
1163 if (from_mode == SImode && to_mode == HImode)
1165 #ifdef HAVE_truncsihi2
1166 if (HAVE_truncsihi2)
1168 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1169 return;
1171 #endif
1172 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 return;
1176 if (from_mode == SImode && to_mode == QImode)
1178 #ifdef HAVE_truncsiqi2
1179 if (HAVE_truncsiqi2)
1181 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1182 return;
1184 #endif
1185 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 return;
1189 if (from_mode == HImode && to_mode == QImode)
1191 #ifdef HAVE_trunchiqi2
1192 if (HAVE_trunchiqi2)
1194 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1195 return;
1197 #endif
1198 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 return;
1202 if (from_mode == TImode && to_mode == DImode)
1204 #ifdef HAVE_trunctidi2
1205 if (HAVE_trunctidi2)
1207 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1208 return;
1210 #endif
1211 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 return;
1215 if (from_mode == TImode && to_mode == SImode)
1217 #ifdef HAVE_trunctisi2
1218 if (HAVE_trunctisi2)
1220 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1221 return;
1223 #endif
1224 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 return;
1228 if (from_mode == TImode && to_mode == HImode)
1230 #ifdef HAVE_trunctihi2
1231 if (HAVE_trunctihi2)
1233 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1234 return;
1236 #endif
1237 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 return;
1241 if (from_mode == TImode && to_mode == QImode)
1243 #ifdef HAVE_trunctiqi2
1244 if (HAVE_trunctiqi2)
1246 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1247 return;
1249 #endif
1250 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 return;
1254 /* Handle truncation of volatile memrefs, and so on;
1255 the things that couldn't be truncated directly,
1256 and for which there was no special instruction. */
1257 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1259 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1260 emit_move_insn (to, temp);
1261 return;
1264 /* Mode combination is not recognized. */
1265 abort ();
1268 /* Return an rtx for a value that would result
1269 from converting X to mode MODE.
1270 Both X and MODE may be floating, or both integer.
1271 UNSIGNEDP is nonzero if X is an unsigned value.
1272 This can be done by referring to a part of X in place
1273 or by copying to a new temporary with conversion.
1275 This function *must not* call protect_from_queue
1276 except when putting X into an insn (in which case convert_move does it). */
1279 convert_to_mode (mode, x, unsignedp)
1280 enum machine_mode mode;
1281 rtx x;
1282 int unsignedp;
1284 return convert_modes (mode, VOIDmode, x, unsignedp);
1287 /* Return an rtx for a value that would result
1288 from converting X from mode OLDMODE to mode MODE.
1289 Both modes may be floating, or both integer.
1290 UNSIGNEDP is nonzero if X is an unsigned value.
1292 This can be done by referring to a part of X in place
1293 or by copying to a new temporary with conversion.
1295 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1297 This function *must not* call protect_from_queue
1298 except when putting X into an insn (in which case convert_move does it). */
1301 convert_modes (mode, oldmode, x, unsignedp)
1302 enum machine_mode mode, oldmode;
1303 rtx x;
1304 int unsignedp;
1306 register rtx temp;
1308 /* If FROM is a SUBREG that indicates that we have already done at least
1309 the required extension, strip it. */
1311 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1312 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1313 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1314 x = gen_lowpart (mode, x);
1316 if (GET_MODE (x) != VOIDmode)
1317 oldmode = GET_MODE (x);
1319 if (mode == oldmode)
1320 return x;
1322 /* There is one case that we must handle specially: If we are converting
1323 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1324 we are to interpret the constant as unsigned, gen_lowpart will do
1325 the wrong if the constant appears negative. What we want to do is
1326 make the high-order word of the constant zero, not all ones. */
1328 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1329 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1330 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1332 HOST_WIDE_INT val = INTVAL (x);
1334 if (oldmode != VOIDmode
1335 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1337 int width = GET_MODE_BITSIZE (oldmode);
1339 /* We need to zero extend VAL. */
1340 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1343 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1346 /* We can do this with a gen_lowpart if both desired and current modes
1347 are integer, and this is either a constant integer, a register, or a
1348 non-volatile MEM. Except for the constant case where MODE is no
1349 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1351 if ((GET_CODE (x) == CONST_INT
1352 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1353 || (GET_MODE_CLASS (mode) == MODE_INT
1354 && GET_MODE_CLASS (oldmode) == MODE_INT
1355 && (GET_CODE (x) == CONST_DOUBLE
1356 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1357 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1358 && direct_load[(int) mode])
1359 || (GET_CODE (x) == REG
1360 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1361 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1363 /* ?? If we don't know OLDMODE, we have to assume here that
1364 X does not need sign- or zero-extension. This may not be
1365 the case, but it's the best we can do. */
1366 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1367 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1369 HOST_WIDE_INT val = INTVAL (x);
1370 int width = GET_MODE_BITSIZE (oldmode);
1372 /* We must sign or zero-extend in this case. Start by
1373 zero-extending, then sign extend if we need to. */
1374 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1375 if (! unsignedp
1376 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1377 val |= (HOST_WIDE_INT) (-1) << width;
1379 return GEN_INT (val);
1382 return gen_lowpart (mode, x);
1385 temp = gen_reg_rtx (mode);
1386 convert_move (temp, x, unsignedp);
1387 return temp;
1390 /* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1394 ALIGN (in bytes) is maximum alignment we can assume. */
1396 void
1397 move_by_pieces (to, from, len, align)
1398 rtx to, from;
1399 int len, align;
1401 struct move_by_pieces data;
1402 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1403 int max_size = MOVE_MAX + 1;
1405 data.offset = 0;
1406 data.to_addr = to_addr;
1407 data.from_addr = from_addr;
1408 data.to = to;
1409 data.from = from;
1410 data.autinc_to
1411 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1412 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1413 data.autinc_from
1414 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1415 || GET_CODE (from_addr) == POST_INC
1416 || GET_CODE (from_addr) == POST_DEC);
1418 data.explicit_inc_from = 0;
1419 data.explicit_inc_to = 0;
1420 data.reverse
1421 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1422 if (data.reverse) data.offset = len;
1423 data.len = len;
1425 data.to_struct = MEM_IN_STRUCT_P (to);
1426 data.from_struct = MEM_IN_STRUCT_P (from);
1428 /* If copying requires more than two move insns,
1429 copy addresses to registers (to make displacements shorter)
1430 and use post-increment if available. */
1431 if (!(data.autinc_from && data.autinc_to)
1432 && move_by_pieces_ninsns (len, align) > 2)
1434 #ifdef HAVE_PRE_DECREMENT
1435 if (data.reverse && ! data.autinc_from)
1437 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1438 data.autinc_from = 1;
1439 data.explicit_inc_from = -1;
1441 #endif
1442 #ifdef HAVE_POST_INCREMENT
1443 if (! data.autinc_from)
1445 data.from_addr = copy_addr_to_reg (from_addr);
1446 data.autinc_from = 1;
1447 data.explicit_inc_from = 1;
1449 #endif
1450 if (!data.autinc_from && CONSTANT_P (from_addr))
1451 data.from_addr = copy_addr_to_reg (from_addr);
1452 #ifdef HAVE_PRE_DECREMENT
1453 if (data.reverse && ! data.autinc_to)
1455 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1456 data.autinc_to = 1;
1457 data.explicit_inc_to = -1;
1459 #endif
1460 #ifdef HAVE_POST_INCREMENT
1461 if (! data.reverse && ! data.autinc_to)
1463 data.to_addr = copy_addr_to_reg (to_addr);
1464 data.autinc_to = 1;
1465 data.explicit_inc_to = 1;
1467 #endif
1468 if (!data.autinc_to && CONSTANT_P (to_addr))
1469 data.to_addr = copy_addr_to_reg (to_addr);
1472 if (! SLOW_UNALIGNED_ACCESS
1473 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1474 align = MOVE_MAX;
1476 /* First move what we can in the largest integer mode, then go to
1477 successively smaller modes. */
1479 while (max_size > 1)
1481 enum machine_mode mode = VOIDmode, tmode;
1482 enum insn_code icode;
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1487 mode = tmode;
1489 if (mode == VOIDmode)
1490 break;
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing
1494 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1495 GET_MODE_SIZE (mode)))
1496 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1498 max_size = GET_MODE_SIZE (mode);
1501 /* The code above should have handled everything. */
1502 if (data.len > 0)
1503 abort ();
1506 /* Return number of insns required to move L bytes by pieces.
1507 ALIGN (in bytes) is maximum alignment we can assume. */
1509 static int
1510 move_by_pieces_ninsns (l, align)
1511 unsigned int l;
1512 int align;
1514 register int n_insns = 0;
1515 int max_size = MOVE_MAX + 1;
1517 if (! SLOW_UNALIGNED_ACCESS
1518 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1519 align = MOVE_MAX;
1521 while (max_size > 1)
1523 enum machine_mode mode = VOIDmode, tmode;
1524 enum insn_code icode;
1526 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1527 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1528 if (GET_MODE_SIZE (tmode) < max_size)
1529 mode = tmode;
1531 if (mode == VOIDmode)
1532 break;
1534 icode = mov_optab->handlers[(int) mode].insn_code;
1535 if (icode != CODE_FOR_nothing
1536 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1537 GET_MODE_SIZE (mode)))
1538 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1540 max_size = GET_MODE_SIZE (mode);
1543 return n_insns;
1546 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1547 with move instructions for mode MODE. GENFUN is the gen_... function
1548 to make a move insn for that mode. DATA has all the other info. */
1550 static void
1551 move_by_pieces_1 (genfun, mode, data)
1552 rtx (*genfun) PROTO ((rtx, ...));
1553 enum machine_mode mode;
1554 struct move_by_pieces *data;
1556 register int size = GET_MODE_SIZE (mode);
1557 register rtx to1, from1;
1559 while (data->len >= size)
1561 if (data->reverse) data->offset -= size;
1563 to1 = (data->autinc_to
1564 ? gen_rtx_MEM (mode, data->to_addr)
1565 : copy_rtx (change_address (data->to, mode,
1566 plus_constant (data->to_addr,
1567 data->offset))));
1568 MEM_IN_STRUCT_P (to1) = data->to_struct;
1570 from1
1571 = (data->autinc_from
1572 ? gen_rtx_MEM (mode, data->from_addr)
1573 : copy_rtx (change_address (data->from, mode,
1574 plus_constant (data->from_addr,
1575 data->offset))));
1576 MEM_IN_STRUCT_P (from1) = data->from_struct;
1578 #ifdef HAVE_PRE_DECREMENT
1579 if (data->explicit_inc_to < 0)
1580 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1581 if (data->explicit_inc_from < 0)
1582 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1583 #endif
1585 emit_insn ((*genfun) (to1, from1));
1586 #ifdef HAVE_POST_INCREMENT
1587 if (data->explicit_inc_to > 0)
1588 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1589 if (data->explicit_inc_from > 0)
1590 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1591 #endif
1593 if (! data->reverse) data->offset += size;
1595 data->len -= size;
1599 /* Emit code to move a block Y to a block X.
1600 This may be done with string-move instructions,
1601 with multiple scalar move instructions, or with a library call.
1603 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1604 with mode BLKmode.
1605 SIZE is an rtx that says how long they are.
1606 ALIGN is the maximum alignment we can assume they have,
1607 measured in bytes.
1609 Return the address of the new block, if memcpy is called and returns it,
1610 0 otherwise. */
1613 emit_block_move (x, y, size, align)
1614 rtx x, y;
1615 rtx size;
1616 int align;
1618 rtx retval = 0;
1619 #ifdef TARGET_MEM_FUNCTIONS
1620 static tree fn;
1621 tree call_expr, arg_list;
1622 #endif
1624 if (GET_MODE (x) != BLKmode)
1625 abort ();
1627 if (GET_MODE (y) != BLKmode)
1628 abort ();
1630 x = protect_from_queue (x, 1);
1631 y = protect_from_queue (y, 0);
1632 size = protect_from_queue (size, 0);
1634 if (GET_CODE (x) != MEM)
1635 abort ();
1636 if (GET_CODE (y) != MEM)
1637 abort ();
1638 if (size == 0)
1639 abort ();
1641 if (GET_CODE (size) == CONST_INT
1642 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1643 move_by_pieces (x, y, INTVAL (size), align);
1644 else
1646 /* Try the most limited insn first, because there's no point
1647 including more than one in the machine description unless
1648 the more limited one has some advantage. */
1650 rtx opalign = GEN_INT (align);
1651 enum machine_mode mode;
1653 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1654 mode = GET_MODE_WIDER_MODE (mode))
1656 enum insn_code code = movstr_optab[(int) mode];
1658 if (code != CODE_FOR_nothing
1659 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1660 here because if SIZE is less than the mode mask, as it is
1661 returned by the macro, it will definitely be less than the
1662 actual mode mask. */
1663 && ((GET_CODE (size) == CONST_INT
1664 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1665 <= (GET_MODE_MASK (mode) >> 1)))
1666 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1667 && (insn_operand_predicate[(int) code][0] == 0
1668 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1669 && (insn_operand_predicate[(int) code][1] == 0
1670 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1671 && (insn_operand_predicate[(int) code][3] == 0
1672 || (*insn_operand_predicate[(int) code][3]) (opalign,
1673 VOIDmode)))
1675 rtx op2;
1676 rtx last = get_last_insn ();
1677 rtx pat;
1679 op2 = convert_to_mode (mode, size, 1);
1680 if (insn_operand_predicate[(int) code][2] != 0
1681 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1682 op2 = copy_to_mode_reg (mode, op2);
1684 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1685 if (pat)
1687 emit_insn (pat);
1688 return 0;
1690 else
1691 delete_insns_since (last);
1695 #ifdef TARGET_MEM_FUNCTIONS
1696 /* It is incorrect to use the libcall calling conventions to call
1697 memcpy in this context.
1699 This could be a user call to memcpy and the user may wish to
1700 examine the return value from memcpy.
1702 For targets where libcalls and normal calls have different conventions
1703 for returning pointers, we could end up generating incorrect code.
1705 So instead of using a libcall sequence we build up a suitable
1706 CALL_EXPR and expand the call in the normal fashion. */
1707 if (fn == NULL_TREE)
1709 tree fntype;
1711 /* This was copied from except.c, I don't know if all this is
1712 necessary in this context or not. */
1713 fn = get_identifier ("memcpy");
1714 push_obstacks_nochange ();
1715 end_temporary_allocation ();
1716 fntype = build_pointer_type (void_type_node);
1717 fntype = build_function_type (fntype, NULL_TREE);
1718 fn = build_decl (FUNCTION_DECL, fn, fntype);
1719 DECL_EXTERNAL (fn) = 1;
1720 TREE_PUBLIC (fn) = 1;
1721 DECL_ARTIFICIAL (fn) = 1;
1722 make_decl_rtl (fn, NULL_PTR, 1);
1723 assemble_external (fn);
1724 pop_obstacks ();
1727 /* We need to make an argument list for the function call.
1729 memcpy has three arguments, the first two are void * addresses and
1730 the last is a size_t byte count for the copy. */
1731 arg_list
1732 = build_tree_list (NULL_TREE,
1733 make_tree (build_pointer_type (void_type_node),
1734 XEXP (x, 0)));
1735 TREE_CHAIN (arg_list)
1736 = build_tree_list (NULL_TREE,
1737 make_tree (build_pointer_type (void_type_node),
1738 XEXP (y, 0)));
1739 TREE_CHAIN (TREE_CHAIN (arg_list))
1740 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1741 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1743 /* Now we have to build up the CALL_EXPR itself. */
1744 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1745 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1746 call_expr, arg_list, NULL_TREE);
1747 TREE_SIDE_EFFECTS (call_expr) = 1;
1749 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1750 #else
1751 emit_library_call (bcopy_libfunc, 0,
1752 VOIDmode, 3, XEXP (y, 0), Pmode,
1753 XEXP (x, 0), Pmode,
1754 convert_to_mode (TYPE_MODE (integer_type_node), size,
1755 TREE_UNSIGNED (integer_type_node)),
1756 TYPE_MODE (integer_type_node));
1757 #endif
1760 return retval;
1763 /* Copy all or part of a value X into registers starting at REGNO.
1764 The number of registers to be filled is NREGS. */
1766 void
1767 move_block_to_reg (regno, x, nregs, mode)
1768 int regno;
1769 rtx x;
1770 int nregs;
1771 enum machine_mode mode;
1773 int i;
1774 #ifdef HAVE_load_multiple
1775 rtx pat;
1776 rtx last;
1777 #endif
1779 if (nregs == 0)
1780 return;
1782 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1783 x = validize_mem (force_const_mem (mode, x));
1785 /* See if the machine can do this with a load multiple insn. */
1786 #ifdef HAVE_load_multiple
1787 if (HAVE_load_multiple)
1789 last = get_last_insn ();
1790 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1791 GEN_INT (nregs));
1792 if (pat)
1794 emit_insn (pat);
1795 return;
1797 else
1798 delete_insns_since (last);
1800 #endif
1802 for (i = 0; i < nregs; i++)
1803 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1804 operand_subword_force (x, i, mode));
1807 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1808 The number of registers to be filled is NREGS. SIZE indicates the number
1809 of bytes in the object X. */
1812 void
1813 move_block_from_reg (regno, x, nregs, size)
1814 int regno;
1815 rtx x;
1816 int nregs;
1817 int size;
1819 int i;
1820 #ifdef HAVE_store_multiple
1821 rtx pat;
1822 rtx last;
1823 #endif
1824 enum machine_mode mode;
1826 /* If SIZE is that of a mode no bigger than a word, just use that
1827 mode's store operation. */
1828 if (size <= UNITS_PER_WORD
1829 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1831 emit_move_insn (change_address (x, mode, NULL),
1832 gen_rtx_REG (mode, regno));
1833 return;
1836 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1837 to the left before storing to memory. Note that the previous test
1838 doesn't handle all cases (e.g. SIZE == 3). */
1839 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1841 rtx tem = operand_subword (x, 0, 1, BLKmode);
1842 rtx shift;
1844 if (tem == 0)
1845 abort ();
1847 shift = expand_shift (LSHIFT_EXPR, word_mode,
1848 gen_rtx_REG (word_mode, regno),
1849 build_int_2 ((UNITS_PER_WORD - size)
1850 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1851 emit_move_insn (tem, shift);
1852 return;
1855 /* See if the machine can do this with a store multiple insn. */
1856 #ifdef HAVE_store_multiple
1857 if (HAVE_store_multiple)
1859 last = get_last_insn ();
1860 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1861 GEN_INT (nregs));
1862 if (pat)
1864 emit_insn (pat);
1865 return;
1867 else
1868 delete_insns_since (last);
1870 #endif
1872 for (i = 0; i < nregs; i++)
1874 rtx tem = operand_subword (x, i, 1, BLKmode);
1876 if (tem == 0)
1877 abort ();
1879 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1883 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1884 registers represented by a PARALLEL. SSIZE represents the total size of
1885 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1886 SRC in bits. */
1887 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1888 the balance will be in what would be the low-order memory addresses, i.e.
1889 left justified for big endian, right justified for little endian. This
1890 happens to be true for the targets currently using this support. If this
1891 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1892 would be needed. */
1894 void
1895 emit_group_load (dst, orig_src, ssize, align)
1896 rtx dst, orig_src;
1897 int align, ssize;
1899 rtx *tmps, src;
1900 int start, i;
1902 if (GET_CODE (dst) != PARALLEL)
1903 abort ();
1905 /* Check for a NULL entry, used to indicate that the parameter goes
1906 both on the stack and in registers. */
1907 if (XEXP (XVECEXP (dst, 0, 0), 0))
1908 start = 0;
1909 else
1910 start = 1;
1912 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1914 /* If we won't be loading directly from memory, protect the real source
1915 from strange tricks we might play. */
1916 src = orig_src;
1917 if (GET_CODE (src) != MEM)
1919 src = gen_reg_rtx (GET_MODE (orig_src));
1920 emit_move_insn (src, orig_src);
1923 /* Process the pieces. */
1924 for (i = start; i < XVECLEN (dst, 0); i++)
1926 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1927 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1928 int bytelen = GET_MODE_SIZE (mode);
1929 int shift = 0;
1931 /* Handle trailing fragments that run over the size of the struct. */
1932 if (ssize >= 0 && bytepos + bytelen > ssize)
1934 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1935 bytelen = ssize - bytepos;
1936 if (bytelen <= 0)
1937 abort();
1940 /* Optimize the access just a bit. */
1941 if (GET_CODE (src) == MEM
1942 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1943 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1944 && bytelen == GET_MODE_SIZE (mode))
1946 tmps[i] = gen_reg_rtx (mode);
1947 emit_move_insn (tmps[i],
1948 change_address (src, mode,
1949 plus_constant (XEXP (src, 0),
1950 bytepos)));
1952 else
1954 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1955 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1956 mode, mode, align, ssize);
1959 if (BYTES_BIG_ENDIAN && shift)
1961 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1962 tmps[i], 0, OPTAB_WIDEN);
1965 emit_queue();
1967 /* Copy the extracted pieces into the proper (probable) hard regs. */
1968 for (i = start; i < XVECLEN (dst, 0); i++)
1969 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1972 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1973 registers represented by a PARALLEL. SSIZE represents the total size of
1974 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1976 void
1977 emit_group_store (orig_dst, src, ssize, align)
1978 rtx orig_dst, src;
1979 int ssize, align;
1981 rtx *tmps, dst;
1982 int start, i;
1984 if (GET_CODE (src) != PARALLEL)
1985 abort ();
1987 /* Check for a NULL entry, used to indicate that the parameter goes
1988 both on the stack and in registers. */
1989 if (XEXP (XVECEXP (src, 0, 0), 0))
1990 start = 0;
1991 else
1992 start = 1;
1994 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1996 /* Copy the (probable) hard regs into pseudos. */
1997 for (i = start; i < XVECLEN (src, 0); i++)
1999 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2000 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2001 emit_move_insn (tmps[i], reg);
2003 emit_queue();
2005 /* If we won't be storing directly into memory, protect the real destination
2006 from strange tricks we might play. */
2007 dst = orig_dst;
2008 if (GET_CODE (dst) != MEM)
2010 dst = gen_reg_rtx (GET_MODE (orig_dst));
2011 /* Make life a bit easier for combine. */
2012 emit_move_insn (dst, const0_rtx);
2014 else if (! MEM_IN_STRUCT_P (dst))
2016 /* store_bit_field requires that memory operations have
2017 mem_in_struct_p set; we might not. */
2019 dst = copy_rtx (orig_dst);
2020 MEM_IN_STRUCT_P (dst) = 1;
2023 /* Process the pieces. */
2024 for (i = start; i < XVECLEN (src, 0); i++)
2026 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2027 enum machine_mode mode = GET_MODE (tmps[i]);
2028 int bytelen = GET_MODE_SIZE (mode);
2030 /* Handle trailing fragments that run over the size of the struct. */
2031 if (ssize >= 0 && bytepos + bytelen > ssize)
2033 if (BYTES_BIG_ENDIAN)
2035 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2036 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2037 tmps[i], 0, OPTAB_WIDEN);
2039 bytelen = ssize - bytepos;
2042 /* Optimize the access just a bit. */
2043 if (GET_CODE (dst) == MEM
2044 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2045 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2046 && bytelen == GET_MODE_SIZE (mode))
2048 emit_move_insn (change_address (dst, mode,
2049 plus_constant (XEXP (dst, 0),
2050 bytepos)),
2051 tmps[i]);
2053 else
2055 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2056 mode, tmps[i], align, ssize);
2059 emit_queue();
2061 /* Copy from the pseudo into the (probable) hard reg. */
2062 if (GET_CODE (dst) == REG)
2063 emit_move_insn (orig_dst, dst);
2066 /* Add a USE expression for REG to the (possibly empty) list pointed
2067 to by CALL_FUSAGE. REG must denote a hard register. */
2069 void
2070 use_reg (call_fusage, reg)
2071 rtx *call_fusage, reg;
2073 if (GET_CODE (reg) != REG
2074 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2075 abort();
2077 *call_fusage
2078 = gen_rtx_EXPR_LIST (VOIDmode,
2079 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2082 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2083 starting at REGNO. All of these registers must be hard registers. */
2085 void
2086 use_regs (call_fusage, regno, nregs)
2087 rtx *call_fusage;
2088 int regno;
2089 int nregs;
2091 int i;
2093 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2094 abort ();
2096 for (i = 0; i < nregs; i++)
2097 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2100 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2101 PARALLEL REGS. This is for calls that pass values in multiple
2102 non-contiguous locations. The Irix 6 ABI has examples of this. */
2104 void
2105 use_group_regs (call_fusage, regs)
2106 rtx *call_fusage;
2107 rtx regs;
2109 int i;
2111 for (i = 0; i < XVECLEN (regs, 0); i++)
2113 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2115 /* A NULL entry means the parameter goes both on the stack and in
2116 registers. This can also be a MEM for targets that pass values
2117 partially on the stack and partially in registers. */
2118 if (reg != 0 && GET_CODE (reg) == REG)
2119 use_reg (call_fusage, reg);
2123 /* Generate several move instructions to clear LEN bytes of block TO.
2124 (A MEM rtx with BLKmode). The caller must pass TO through
2125 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2126 we can assume. */
2128 static void
2129 clear_by_pieces (to, len, align)
2130 rtx to;
2131 int len, align;
2133 struct clear_by_pieces data;
2134 rtx to_addr = XEXP (to, 0);
2135 int max_size = MOVE_MAX + 1;
2137 data.offset = 0;
2138 data.to_addr = to_addr;
2139 data.to = to;
2140 data.autinc_to
2141 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2142 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2144 data.explicit_inc_to = 0;
2145 data.reverse
2146 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2147 if (data.reverse) data.offset = len;
2148 data.len = len;
2150 data.to_struct = MEM_IN_STRUCT_P (to);
2152 /* If copying requires more than two move insns,
2153 copy addresses to registers (to make displacements shorter)
2154 and use post-increment if available. */
2155 if (!data.autinc_to
2156 && move_by_pieces_ninsns (len, align) > 2)
2158 #ifdef HAVE_PRE_DECREMENT
2159 if (data.reverse && ! data.autinc_to)
2161 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2162 data.autinc_to = 1;
2163 data.explicit_inc_to = -1;
2165 #endif
2166 #ifdef HAVE_POST_INCREMENT
2167 if (! data.reverse && ! data.autinc_to)
2169 data.to_addr = copy_addr_to_reg (to_addr);
2170 data.autinc_to = 1;
2171 data.explicit_inc_to = 1;
2173 #endif
2174 if (!data.autinc_to && CONSTANT_P (to_addr))
2175 data.to_addr = copy_addr_to_reg (to_addr);
2178 if (! SLOW_UNALIGNED_ACCESS
2179 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2180 align = MOVE_MAX;
2182 /* First move what we can in the largest integer mode, then go to
2183 successively smaller modes. */
2185 while (max_size > 1)
2187 enum machine_mode mode = VOIDmode, tmode;
2188 enum insn_code icode;
2190 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2191 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2192 if (GET_MODE_SIZE (tmode) < max_size)
2193 mode = tmode;
2195 if (mode == VOIDmode)
2196 break;
2198 icode = mov_optab->handlers[(int) mode].insn_code;
2199 if (icode != CODE_FOR_nothing
2200 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2201 GET_MODE_SIZE (mode)))
2202 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2204 max_size = GET_MODE_SIZE (mode);
2207 /* The code above should have handled everything. */
2208 if (data.len != 0)
2209 abort ();
2212 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2213 with move instructions for mode MODE. GENFUN is the gen_... function
2214 to make a move insn for that mode. DATA has all the other info. */
2216 static void
2217 clear_by_pieces_1 (genfun, mode, data)
2218 rtx (*genfun) PROTO ((rtx, ...));
2219 enum machine_mode mode;
2220 struct clear_by_pieces *data;
2222 register int size = GET_MODE_SIZE (mode);
2223 register rtx to1;
2225 while (data->len >= size)
2227 if (data->reverse) data->offset -= size;
2229 to1 = (data->autinc_to
2230 ? gen_rtx_MEM (mode, data->to_addr)
2231 : copy_rtx (change_address (data->to, mode,
2232 plus_constant (data->to_addr,
2233 data->offset))));
2234 MEM_IN_STRUCT_P (to1) = data->to_struct;
2236 #ifdef HAVE_PRE_DECREMENT
2237 if (data->explicit_inc_to < 0)
2238 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2239 #endif
2241 emit_insn ((*genfun) (to1, const0_rtx));
2242 #ifdef HAVE_POST_INCREMENT
2243 if (data->explicit_inc_to > 0)
2244 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2245 #endif
2247 if (! data->reverse) data->offset += size;
2249 data->len -= size;
2253 /* Write zeros through the storage of OBJECT.
2254 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2255 the maximum alignment we can is has, measured in bytes.
2257 If we call a function that returns the length of the block, return it. */
2260 clear_storage (object, size, align)
2261 rtx object;
2262 rtx size;
2263 int align;
2265 #ifdef TARGET_MEM_FUNCTIONS
2266 static tree fn;
2267 tree call_expr, arg_list;
2268 #endif
2269 rtx retval = 0;
2271 if (GET_MODE (object) == BLKmode)
2273 object = protect_from_queue (object, 1);
2274 size = protect_from_queue (size, 0);
2276 if (GET_CODE (size) == CONST_INT
2277 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2278 clear_by_pieces (object, INTVAL (size), align);
2280 else
2282 /* Try the most limited insn first, because there's no point
2283 including more than one in the machine description unless
2284 the more limited one has some advantage. */
2286 rtx opalign = GEN_INT (align);
2287 enum machine_mode mode;
2289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2290 mode = GET_MODE_WIDER_MODE (mode))
2292 enum insn_code code = clrstr_optab[(int) mode];
2294 if (code != CODE_FOR_nothing
2295 /* We don't need MODE to be narrower than
2296 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2297 the mode mask, as it is returned by the macro, it will
2298 definitely be less than the actual mode mask. */
2299 && ((GET_CODE (size) == CONST_INT
2300 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2301 <= (GET_MODE_MASK (mode) >> 1)))
2302 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2303 && (insn_operand_predicate[(int) code][0] == 0
2304 || (*insn_operand_predicate[(int) code][0]) (object,
2305 BLKmode))
2306 && (insn_operand_predicate[(int) code][2] == 0
2307 || (*insn_operand_predicate[(int) code][2]) (opalign,
2308 VOIDmode)))
2310 rtx op1;
2311 rtx last = get_last_insn ();
2312 rtx pat;
2314 op1 = convert_to_mode (mode, size, 1);
2315 if (insn_operand_predicate[(int) code][1] != 0
2316 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2317 mode))
2318 op1 = copy_to_mode_reg (mode, op1);
2320 pat = GEN_FCN ((int) code) (object, op1, opalign);
2321 if (pat)
2323 emit_insn (pat);
2324 return 0;
2326 else
2327 delete_insns_since (last);
2332 #ifdef TARGET_MEM_FUNCTIONS
2333 /* It is incorrect to use the libcall calling conventions to call
2334 memset in this context.
2336 This could be a user call to memset and the user may wish to
2337 examine the return value from memset.
2339 For targets where libcalls and normal calls have different conventions
2340 for returning pointers, we could end up generating incorrect code.
2342 So instead of using a libcall sequence we build up a suitable
2343 CALL_EXPR and expand the call in the normal fashion. */
2344 if (fn == NULL_TREE)
2346 tree fntype;
2348 /* This was copied from except.c, I don't know if all this is
2349 necessary in this context or not. */
2350 fn = get_identifier ("memset");
2351 push_obstacks_nochange ();
2352 end_temporary_allocation ();
2353 fntype = build_pointer_type (void_type_node);
2354 fntype = build_function_type (fntype, NULL_TREE);
2355 fn = build_decl (FUNCTION_DECL, fn, fntype);
2356 DECL_EXTERNAL (fn) = 1;
2357 TREE_PUBLIC (fn) = 1;
2358 DECL_ARTIFICIAL (fn) = 1;
2359 make_decl_rtl (fn, NULL_PTR, 1);
2360 assemble_external (fn);
2361 pop_obstacks ();
2364 /* We need to make an argument list for the function call.
2366 memset has three arguments, the first is a void * addresses, the
2367 second a integer with the initialization value, the last is a size_t
2368 byte count for the copy. */
2369 arg_list
2370 = build_tree_list (NULL_TREE,
2371 make_tree (build_pointer_type (void_type_node),
2372 XEXP (object, 0)));
2373 TREE_CHAIN (arg_list)
2374 = build_tree_list (NULL_TREE,
2375 make_tree (integer_type_node, const0_rtx));
2376 TREE_CHAIN (TREE_CHAIN (arg_list))
2377 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2378 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2380 /* Now we have to build up the CALL_EXPR itself. */
2381 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2382 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2383 call_expr, arg_list, NULL_TREE);
2384 TREE_SIDE_EFFECTS (call_expr) = 1;
2386 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2387 #else
2388 emit_library_call (bzero_libfunc, 0,
2389 VOIDmode, 2,
2390 XEXP (object, 0), Pmode,
2391 convert_to_mode
2392 (TYPE_MODE (integer_type_node), size,
2393 TREE_UNSIGNED (integer_type_node)),
2394 TYPE_MODE (integer_type_node));
2395 #endif
2398 else
2399 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2401 return retval;
2404 /* Generate code to copy Y into X.
2405 Both Y and X must have the same mode, except that
2406 Y can be a constant with VOIDmode.
2407 This mode cannot be BLKmode; use emit_block_move for that.
2409 Return the last instruction emitted. */
2412 emit_move_insn (x, y)
2413 rtx x, y;
2415 enum machine_mode mode = GET_MODE (x);
2417 x = protect_from_queue (x, 1);
2418 y = protect_from_queue (y, 0);
2420 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2421 abort ();
2423 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2424 y = force_const_mem (mode, y);
2426 /* If X or Y are memory references, verify that their addresses are valid
2427 for the machine. */
2428 if (GET_CODE (x) == MEM
2429 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2430 && ! push_operand (x, GET_MODE (x)))
2431 || (flag_force_addr
2432 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2433 x = change_address (x, VOIDmode, XEXP (x, 0));
2435 if (GET_CODE (y) == MEM
2436 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2437 || (flag_force_addr
2438 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2439 y = change_address (y, VOIDmode, XEXP (y, 0));
2441 if (mode == BLKmode)
2442 abort ();
2444 return emit_move_insn_1 (x, y);
2447 /* Low level part of emit_move_insn.
2448 Called just like emit_move_insn, but assumes X and Y
2449 are basically valid. */
2452 emit_move_insn_1 (x, y)
2453 rtx x, y;
2455 enum machine_mode mode = GET_MODE (x);
2456 enum machine_mode submode;
2457 enum mode_class class = GET_MODE_CLASS (mode);
2458 int i;
2460 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2461 return
2462 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2464 /* Expand complex moves by moving real part and imag part, if possible. */
2465 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2466 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2467 * BITS_PER_UNIT),
2468 (class == MODE_COMPLEX_INT
2469 ? MODE_INT : MODE_FLOAT),
2471 && (mov_optab->handlers[(int) submode].insn_code
2472 != CODE_FOR_nothing))
2474 /* Don't split destination if it is a stack push. */
2475 int stack = push_operand (x, GET_MODE (x));
2477 /* If this is a stack, push the highpart first, so it
2478 will be in the argument order.
2480 In that case, change_address is used only to convert
2481 the mode, not to change the address. */
2482 if (stack)
2484 /* Note that the real part always precedes the imag part in memory
2485 regardless of machine's endianness. */
2486 #ifdef STACK_GROWS_DOWNWARD
2487 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2488 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2489 gen_imagpart (submode, y)));
2490 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2491 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2492 gen_realpart (submode, y)));
2493 #else
2494 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2495 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2496 gen_realpart (submode, y)));
2497 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2498 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2499 gen_imagpart (submode, y)));
2500 #endif
2502 else
2504 /* Show the output dies here. */
2505 if (x != y)
2506 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2508 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2509 (gen_realpart (submode, x), gen_realpart (submode, y)));
2510 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2511 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2514 return get_last_insn ();
2517 /* This will handle any multi-word mode that lacks a move_insn pattern.
2518 However, you will get better code if you define such patterns,
2519 even if they must turn into multiple assembler instructions. */
2520 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2522 rtx last_insn = 0;
2524 #ifdef PUSH_ROUNDING
2526 /* If X is a push on the stack, do the push now and replace
2527 X with a reference to the stack pointer. */
2528 if (push_operand (x, GET_MODE (x)))
2530 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2531 x = change_address (x, VOIDmode, stack_pointer_rtx);
2533 #endif
2535 /* Show the output dies here. */
2536 if (x != y)
2537 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2539 for (i = 0;
2540 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2541 i++)
2543 rtx xpart = operand_subword (x, i, 1, mode);
2544 rtx ypart = operand_subword (y, i, 1, mode);
2546 /* If we can't get a part of Y, put Y into memory if it is a
2547 constant. Otherwise, force it into a register. If we still
2548 can't get a part of Y, abort. */
2549 if (ypart == 0 && CONSTANT_P (y))
2551 y = force_const_mem (mode, y);
2552 ypart = operand_subword (y, i, 1, mode);
2554 else if (ypart == 0)
2555 ypart = operand_subword_force (y, i, mode);
2557 if (xpart == 0 || ypart == 0)
2558 abort ();
2560 last_insn = emit_move_insn (xpart, ypart);
2563 return last_insn;
2565 else
2566 abort ();
2569 /* Pushing data onto the stack. */
2571 /* Push a block of length SIZE (perhaps variable)
2572 and return an rtx to address the beginning of the block.
2573 Note that it is not possible for the value returned to be a QUEUED.
2574 The value may be virtual_outgoing_args_rtx.
2576 EXTRA is the number of bytes of padding to push in addition to SIZE.
2577 BELOW nonzero means this padding comes at low addresses;
2578 otherwise, the padding comes at high addresses. */
2581 push_block (size, extra, below)
2582 rtx size;
2583 int extra, below;
2585 register rtx temp;
2587 size = convert_modes (Pmode, ptr_mode, size, 1);
2588 if (CONSTANT_P (size))
2589 anti_adjust_stack (plus_constant (size, extra));
2590 else if (GET_CODE (size) == REG && extra == 0)
2591 anti_adjust_stack (size);
2592 else
2594 rtx temp = copy_to_mode_reg (Pmode, size);
2595 if (extra != 0)
2596 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2597 temp, 0, OPTAB_LIB_WIDEN);
2598 anti_adjust_stack (temp);
2601 #ifdef STACK_GROWS_DOWNWARD
2602 temp = virtual_outgoing_args_rtx;
2603 if (extra != 0 && below)
2604 temp = plus_constant (temp, extra);
2605 #else
2606 if (GET_CODE (size) == CONST_INT)
2607 temp = plus_constant (virtual_outgoing_args_rtx,
2608 - INTVAL (size) - (below ? 0 : extra));
2609 else if (extra != 0 && !below)
2610 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2611 negate_rtx (Pmode, plus_constant (size, extra)));
2612 else
2613 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2614 negate_rtx (Pmode, size));
2615 #endif
2617 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2621 gen_push_operand ()
2623 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2626 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2627 block of SIZE bytes. */
2629 static rtx
2630 get_push_address (size)
2631 int size;
2633 register rtx temp;
2635 if (STACK_PUSH_CODE == POST_DEC)
2636 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2637 else if (STACK_PUSH_CODE == POST_INC)
2638 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2639 else
2640 temp = stack_pointer_rtx;
2642 return copy_to_reg (temp);
2645 /* Generate code to push X onto the stack, assuming it has mode MODE and
2646 type TYPE.
2647 MODE is redundant except when X is a CONST_INT (since they don't
2648 carry mode info).
2649 SIZE is an rtx for the size of data to be copied (in bytes),
2650 needed only if X is BLKmode.
2652 ALIGN (in bytes) is maximum alignment we can assume.
2654 If PARTIAL and REG are both nonzero, then copy that many of the first
2655 words of X into registers starting with REG, and push the rest of X.
2656 The amount of space pushed is decreased by PARTIAL words,
2657 rounded *down* to a multiple of PARM_BOUNDARY.
2658 REG must be a hard register in this case.
2659 If REG is zero but PARTIAL is not, take any all others actions for an
2660 argument partially in registers, but do not actually load any
2661 registers.
2663 EXTRA is the amount in bytes of extra space to leave next to this arg.
2664 This is ignored if an argument block has already been allocated.
2666 On a machine that lacks real push insns, ARGS_ADDR is the address of
2667 the bottom of the argument block for this call. We use indexing off there
2668 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2669 argument block has not been preallocated.
2671 ARGS_SO_FAR is the size of args previously pushed for this call.
2673 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2674 for arguments passed in registers. If nonzero, it will be the number
2675 of bytes required. */
2677 void
2678 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2679 args_addr, args_so_far, reg_parm_stack_space)
2680 register rtx x;
2681 enum machine_mode mode;
2682 tree type;
2683 rtx size;
2684 int align;
2685 int partial;
2686 rtx reg;
2687 int extra;
2688 rtx args_addr;
2689 rtx args_so_far;
2690 int reg_parm_stack_space;
2692 rtx xinner;
2693 enum direction stack_direction
2694 #ifdef STACK_GROWS_DOWNWARD
2695 = downward;
2696 #else
2697 = upward;
2698 #endif
2700 /* Decide where to pad the argument: `downward' for below,
2701 `upward' for above, or `none' for don't pad it.
2702 Default is below for small data on big-endian machines; else above. */
2703 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2705 /* Invert direction if stack is post-update. */
2706 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2707 if (where_pad != none)
2708 where_pad = (where_pad == downward ? upward : downward);
2710 xinner = x = protect_from_queue (x, 0);
2712 if (mode == BLKmode)
2714 /* Copy a block into the stack, entirely or partially. */
2716 register rtx temp;
2717 int used = partial * UNITS_PER_WORD;
2718 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2719 int skip;
2721 if (size == 0)
2722 abort ();
2724 used -= offset;
2726 /* USED is now the # of bytes we need not copy to the stack
2727 because registers will take care of them. */
2729 if (partial != 0)
2730 xinner = change_address (xinner, BLKmode,
2731 plus_constant (XEXP (xinner, 0), used));
2733 /* If the partial register-part of the arg counts in its stack size,
2734 skip the part of stack space corresponding to the registers.
2735 Otherwise, start copying to the beginning of the stack space,
2736 by setting SKIP to 0. */
2737 skip = (reg_parm_stack_space == 0) ? 0 : used;
2739 #ifdef PUSH_ROUNDING
2740 /* Do it with several push insns if that doesn't take lots of insns
2741 and if there is no difficulty with push insns that skip bytes
2742 on the stack for alignment purposes. */
2743 if (args_addr == 0
2744 && GET_CODE (size) == CONST_INT
2745 && skip == 0
2746 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2747 < MOVE_RATIO)
2748 /* Here we avoid the case of a structure whose weak alignment
2749 forces many pushes of a small amount of data,
2750 and such small pushes do rounding that causes trouble. */
2751 && ((! SLOW_UNALIGNED_ACCESS)
2752 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2753 || PUSH_ROUNDING (align) == align)
2754 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2756 /* Push padding now if padding above and stack grows down,
2757 or if padding below and stack grows up.
2758 But if space already allocated, this has already been done. */
2759 if (extra && args_addr == 0
2760 && where_pad != none && where_pad != stack_direction)
2761 anti_adjust_stack (GEN_INT (extra));
2763 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2764 INTVAL (size) - used, align);
2766 if (flag_check_memory_usage && ! in_check_memory_usage)
2768 rtx temp;
2770 in_check_memory_usage = 1;
2771 temp = get_push_address (INTVAL(size) - used);
2772 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2773 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2774 temp, ptr_mode,
2775 XEXP (xinner, 0), ptr_mode,
2776 GEN_INT (INTVAL(size) - used),
2777 TYPE_MODE (sizetype));
2778 else
2779 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2780 temp, ptr_mode,
2781 GEN_INT (INTVAL(size) - used),
2782 TYPE_MODE (sizetype),
2783 GEN_INT (MEMORY_USE_RW),
2784 TYPE_MODE (integer_type_node));
2785 in_check_memory_usage = 0;
2788 else
2789 #endif /* PUSH_ROUNDING */
2791 /* Otherwise make space on the stack and copy the data
2792 to the address of that space. */
2794 /* Deduct words put into registers from the size we must copy. */
2795 if (partial != 0)
2797 if (GET_CODE (size) == CONST_INT)
2798 size = GEN_INT (INTVAL (size) - used);
2799 else
2800 size = expand_binop (GET_MODE (size), sub_optab, size,
2801 GEN_INT (used), NULL_RTX, 0,
2802 OPTAB_LIB_WIDEN);
2805 /* Get the address of the stack space.
2806 In this case, we do not deal with EXTRA separately.
2807 A single stack adjust will do. */
2808 if (! args_addr)
2810 temp = push_block (size, extra, where_pad == downward);
2811 extra = 0;
2813 else if (GET_CODE (args_so_far) == CONST_INT)
2814 temp = memory_address (BLKmode,
2815 plus_constant (args_addr,
2816 skip + INTVAL (args_so_far)));
2817 else
2818 temp = memory_address (BLKmode,
2819 plus_constant (gen_rtx_PLUS (Pmode,
2820 args_addr,
2821 args_so_far),
2822 skip));
2823 if (flag_check_memory_usage && ! in_check_memory_usage)
2825 rtx target;
2827 in_check_memory_usage = 1;
2828 target = copy_to_reg (temp);
2829 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2830 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2831 target, ptr_mode,
2832 XEXP (xinner, 0), ptr_mode,
2833 size, TYPE_MODE (sizetype));
2834 else
2835 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2836 target, ptr_mode,
2837 size, TYPE_MODE (sizetype),
2838 GEN_INT (MEMORY_USE_RW),
2839 TYPE_MODE (integer_type_node));
2840 in_check_memory_usage = 0;
2843 /* TEMP is the address of the block. Copy the data there. */
2844 if (GET_CODE (size) == CONST_INT
2845 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2846 < MOVE_RATIO))
2848 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2849 INTVAL (size), align);
2850 goto ret;
2852 else
2854 rtx opalign = GEN_INT (align);
2855 enum machine_mode mode;
2856 rtx target = gen_rtx_MEM (BLKmode, temp);
2858 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2859 mode != VOIDmode;
2860 mode = GET_MODE_WIDER_MODE (mode))
2862 enum insn_code code = movstr_optab[(int) mode];
2864 if (code != CODE_FOR_nothing
2865 && ((GET_CODE (size) == CONST_INT
2866 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2867 <= (GET_MODE_MASK (mode) >> 1)))
2868 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2869 && (insn_operand_predicate[(int) code][0] == 0
2870 || ((*insn_operand_predicate[(int) code][0])
2871 (target, BLKmode)))
2872 && (insn_operand_predicate[(int) code][1] == 0
2873 || ((*insn_operand_predicate[(int) code][1])
2874 (xinner, BLKmode)))
2875 && (insn_operand_predicate[(int) code][3] == 0
2876 || ((*insn_operand_predicate[(int) code][3])
2877 (opalign, VOIDmode))))
2879 rtx op2 = convert_to_mode (mode, size, 1);
2880 rtx last = get_last_insn ();
2881 rtx pat;
2883 if (insn_operand_predicate[(int) code][2] != 0
2884 && ! ((*insn_operand_predicate[(int) code][2])
2885 (op2, mode)))
2886 op2 = copy_to_mode_reg (mode, op2);
2888 pat = GEN_FCN ((int) code) (target, xinner,
2889 op2, opalign);
2890 if (pat)
2892 emit_insn (pat);
2893 goto ret;
2895 else
2896 delete_insns_since (last);
2901 #ifndef ACCUMULATE_OUTGOING_ARGS
2902 /* If the source is referenced relative to the stack pointer,
2903 copy it to another register to stabilize it. We do not need
2904 to do this if we know that we won't be changing sp. */
2906 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2907 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2908 temp = copy_to_reg (temp);
2909 #endif
2911 /* Make inhibit_defer_pop nonzero around the library call
2912 to force it to pop the bcopy-arguments right away. */
2913 NO_DEFER_POP;
2914 #ifdef TARGET_MEM_FUNCTIONS
2915 emit_library_call (memcpy_libfunc, 0,
2916 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2917 convert_to_mode (TYPE_MODE (sizetype),
2918 size, TREE_UNSIGNED (sizetype)),
2919 TYPE_MODE (sizetype));
2920 #else
2921 emit_library_call (bcopy_libfunc, 0,
2922 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2923 convert_to_mode (TYPE_MODE (integer_type_node),
2924 size,
2925 TREE_UNSIGNED (integer_type_node)),
2926 TYPE_MODE (integer_type_node));
2927 #endif
2928 OK_DEFER_POP;
2931 else if (partial > 0)
2933 /* Scalar partly in registers. */
2935 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2936 int i;
2937 int not_stack;
2938 /* # words of start of argument
2939 that we must make space for but need not store. */
2940 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2941 int args_offset = INTVAL (args_so_far);
2942 int skip;
2944 /* Push padding now if padding above and stack grows down,
2945 or if padding below and stack grows up.
2946 But if space already allocated, this has already been done. */
2947 if (extra && args_addr == 0
2948 && where_pad != none && where_pad != stack_direction)
2949 anti_adjust_stack (GEN_INT (extra));
2951 /* If we make space by pushing it, we might as well push
2952 the real data. Otherwise, we can leave OFFSET nonzero
2953 and leave the space uninitialized. */
2954 if (args_addr == 0)
2955 offset = 0;
2957 /* Now NOT_STACK gets the number of words that we don't need to
2958 allocate on the stack. */
2959 not_stack = partial - offset;
2961 /* If the partial register-part of the arg counts in its stack size,
2962 skip the part of stack space corresponding to the registers.
2963 Otherwise, start copying to the beginning of the stack space,
2964 by setting SKIP to 0. */
2965 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
2967 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2968 x = validize_mem (force_const_mem (mode, x));
2970 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2971 SUBREGs of such registers are not allowed. */
2972 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2973 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2974 x = copy_to_reg (x);
2976 /* Loop over all the words allocated on the stack for this arg. */
2977 /* We can do it by words, because any scalar bigger than a word
2978 has a size a multiple of a word. */
2979 #ifndef PUSH_ARGS_REVERSED
2980 for (i = not_stack; i < size; i++)
2981 #else
2982 for (i = size - 1; i >= not_stack; i--)
2983 #endif
2984 if (i >= not_stack + offset)
2985 emit_push_insn (operand_subword_force (x, i, mode),
2986 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2987 0, args_addr,
2988 GEN_INT (args_offset + ((i - not_stack + skip)
2989 * UNITS_PER_WORD)),
2990 reg_parm_stack_space);
2992 else
2994 rtx addr;
2995 rtx target = NULL_RTX;
2997 /* Push padding now if padding above and stack grows down,
2998 or if padding below and stack grows up.
2999 But if space already allocated, this has already been done. */
3000 if (extra && args_addr == 0
3001 && where_pad != none && where_pad != stack_direction)
3002 anti_adjust_stack (GEN_INT (extra));
3004 #ifdef PUSH_ROUNDING
3005 if (args_addr == 0)
3006 addr = gen_push_operand ();
3007 else
3008 #endif
3010 if (GET_CODE (args_so_far) == CONST_INT)
3011 addr
3012 = memory_address (mode,
3013 plus_constant (args_addr,
3014 INTVAL (args_so_far)));
3015 else
3016 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3017 args_so_far));
3018 target = addr;
3021 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3023 if (flag_check_memory_usage && ! in_check_memory_usage)
3025 in_check_memory_usage = 1;
3026 if (target == 0)
3027 target = get_push_address (GET_MODE_SIZE (mode));
3029 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3030 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3031 target, ptr_mode,
3032 XEXP (x, 0), ptr_mode,
3033 GEN_INT (GET_MODE_SIZE (mode)),
3034 TYPE_MODE (sizetype));
3035 else
3036 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3037 target, ptr_mode,
3038 GEN_INT (GET_MODE_SIZE (mode)),
3039 TYPE_MODE (sizetype),
3040 GEN_INT (MEMORY_USE_RW),
3041 TYPE_MODE (integer_type_node));
3042 in_check_memory_usage = 0;
3046 ret:
3047 /* If part should go in registers, copy that part
3048 into the appropriate registers. Do this now, at the end,
3049 since mem-to-mem copies above may do function calls. */
3050 if (partial > 0 && reg != 0)
3052 /* Handle calls that pass values in multiple non-contiguous locations.
3053 The Irix 6 ABI has examples of this. */
3054 if (GET_CODE (reg) == PARALLEL)
3055 emit_group_load (reg, x, -1, align); /* ??? size? */
3056 else
3057 move_block_to_reg (REGNO (reg), x, partial, mode);
3060 if (extra && args_addr == 0 && where_pad == stack_direction)
3061 anti_adjust_stack (GEN_INT (extra));
3064 /* Expand an assignment that stores the value of FROM into TO.
3065 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3066 (This may contain a QUEUED rtx;
3067 if the value is constant, this rtx is a constant.)
3068 Otherwise, the returned value is NULL_RTX.
3070 SUGGEST_REG is no longer actually used.
3071 It used to mean, copy the value through a register
3072 and return that register, if that is possible.
3073 We now use WANT_VALUE to decide whether to do this. */
3076 expand_assignment (to, from, want_value, suggest_reg)
3077 tree to, from;
3078 int want_value;
3079 int suggest_reg;
3081 register rtx to_rtx = 0;
3082 rtx result;
3084 /* Don't crash if the lhs of the assignment was erroneous. */
3086 if (TREE_CODE (to) == ERROR_MARK)
3088 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3089 return want_value ? result : NULL_RTX;
3092 /* Assignment of a structure component needs special treatment
3093 if the structure component's rtx is not simply a MEM.
3094 Assignment of an array element at a constant index, and assignment of
3095 an array element in an unaligned packed structure field, has the same
3096 problem. */
3098 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3099 || TREE_CODE (to) == ARRAY_REF)
3101 enum machine_mode mode1;
3102 int bitsize;
3103 int bitpos;
3104 tree offset;
3105 int unsignedp;
3106 int volatilep = 0;
3107 tree tem;
3108 int alignment;
3110 push_temp_slots ();
3111 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3112 &unsignedp, &volatilep, &alignment);
3114 /* If we are going to use store_bit_field and extract_bit_field,
3115 make sure to_rtx will be safe for multiple use. */
3117 if (mode1 == VOIDmode && want_value)
3118 tem = stabilize_reference (tem);
3120 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3121 if (offset != 0)
3123 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3125 if (GET_CODE (to_rtx) != MEM)
3126 abort ();
3128 if (GET_MODE (offset_rtx) != ptr_mode)
3130 #ifdef POINTERS_EXTEND_UNSIGNED
3131 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3132 #else
3133 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3134 #endif
3137 if (GET_CODE (to_rtx) == MEM
3138 && GET_MODE (to_rtx) == BLKmode
3139 && bitsize
3140 && (bitpos % bitsize) == 0
3141 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3142 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3144 rtx temp = change_address (to_rtx, mode1,
3145 plus_constant (XEXP (to_rtx, 0),
3146 (bitpos /
3147 BITS_PER_UNIT)));
3148 if (GET_CODE (XEXP (temp, 0)) == REG)
3149 to_rtx = temp;
3150 else
3151 to_rtx = change_address (to_rtx, mode1,
3152 force_reg (GET_MODE (XEXP (temp, 0)),
3153 XEXP (temp, 0)));
3154 bitpos = 0;
3157 to_rtx = change_address (to_rtx, VOIDmode,
3158 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3159 force_reg (ptr_mode, offset_rtx)));
3161 if (volatilep)
3163 if (GET_CODE (to_rtx) == MEM)
3165 /* When the offset is zero, to_rtx is the address of the
3166 structure we are storing into, and hence may be shared.
3167 We must make a new MEM before setting the volatile bit. */
3168 if (offset == 0)
3169 to_rtx = copy_rtx (to_rtx);
3171 MEM_VOLATILE_P (to_rtx) = 1;
3173 #if 0 /* This was turned off because, when a field is volatile
3174 in an object which is not volatile, the object may be in a register,
3175 and then we would abort over here. */
3176 else
3177 abort ();
3178 #endif
3181 if (TREE_CODE (to) == COMPONENT_REF
3182 && TREE_READONLY (TREE_OPERAND (to, 1)))
3184 if (offset == 0)
3185 to_rtx = copy_rtx (to_rtx);
3187 RTX_UNCHANGING_P (to_rtx) = 1;
3190 /* Check the access. */
3191 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
3193 rtx to_addr;
3194 int size;
3195 int best_mode_size;
3196 enum machine_mode best_mode;
3198 best_mode = get_best_mode (bitsize, bitpos,
3199 TYPE_ALIGN (TREE_TYPE (tem)),
3200 mode1, volatilep);
3201 if (best_mode == VOIDmode)
3202 best_mode = QImode;
3204 best_mode_size = GET_MODE_BITSIZE (best_mode);
3205 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3206 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3207 size *= GET_MODE_SIZE (best_mode);
3209 /* Check the access right of the pointer. */
3210 if (size)
3211 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3212 to_addr, ptr_mode,
3213 GEN_INT (size), TYPE_MODE (sizetype),
3214 GEN_INT (MEMORY_USE_WO),
3215 TYPE_MODE (integer_type_node));
3218 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3219 (want_value
3220 /* Spurious cast makes HPUX compiler happy. */
3221 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3222 : VOIDmode),
3223 unsignedp,
3224 /* Required alignment of containing datum. */
3225 alignment,
3226 int_size_in_bytes (TREE_TYPE (tem)));
3227 preserve_temp_slots (result);
3228 free_temp_slots ();
3229 pop_temp_slots ();
3231 /* If the value is meaningful, convert RESULT to the proper mode.
3232 Otherwise, return nothing. */
3233 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3234 TYPE_MODE (TREE_TYPE (from)),
3235 result,
3236 TREE_UNSIGNED (TREE_TYPE (to)))
3237 : NULL_RTX);
3240 /* If the rhs is a function call and its value is not an aggregate,
3241 call the function before we start to compute the lhs.
3242 This is needed for correct code for cases such as
3243 val = setjmp (buf) on machines where reference to val
3244 requires loading up part of an address in a separate insn.
3246 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3247 a promoted variable where the zero- or sign- extension needs to be done.
3248 Handling this in the normal way is safe because no computation is done
3249 before the call. */
3250 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3251 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3252 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3254 rtx value;
3256 push_temp_slots ();
3257 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3258 if (to_rtx == 0)
3259 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3261 /* Handle calls that return values in multiple non-contiguous locations.
3262 The Irix 6 ABI has examples of this. */
3263 if (GET_CODE (to_rtx) == PARALLEL)
3264 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3265 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3266 else if (GET_MODE (to_rtx) == BLKmode)
3267 emit_block_move (to_rtx, value, expr_size (from),
3268 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3269 else
3270 emit_move_insn (to_rtx, value);
3271 preserve_temp_slots (to_rtx);
3272 free_temp_slots ();
3273 pop_temp_slots ();
3274 return want_value ? to_rtx : NULL_RTX;
3277 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3278 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3280 if (to_rtx == 0)
3282 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3283 if (GET_CODE (to_rtx) == MEM)
3284 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3287 /* Don't move directly into a return register. */
3288 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3290 rtx temp;
3292 push_temp_slots ();
3293 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3294 emit_move_insn (to_rtx, temp);
3295 preserve_temp_slots (to_rtx);
3296 free_temp_slots ();
3297 pop_temp_slots ();
3298 return want_value ? to_rtx : NULL_RTX;
3301 /* In case we are returning the contents of an object which overlaps
3302 the place the value is being stored, use a safe function when copying
3303 a value through a pointer into a structure value return block. */
3304 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3305 && current_function_returns_struct
3306 && !current_function_returns_pcc_struct)
3308 rtx from_rtx, size;
3310 push_temp_slots ();
3311 size = expr_size (from);
3312 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3313 EXPAND_MEMORY_USE_DONT);
3315 /* Copy the rights of the bitmap. */
3316 if (flag_check_memory_usage)
3317 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3318 XEXP (to_rtx, 0), ptr_mode,
3319 XEXP (from_rtx, 0), ptr_mode,
3320 convert_to_mode (TYPE_MODE (sizetype),
3321 size, TREE_UNSIGNED (sizetype)),
3322 TYPE_MODE (sizetype));
3324 #ifdef TARGET_MEM_FUNCTIONS
3325 emit_library_call (memcpy_libfunc, 0,
3326 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3327 XEXP (from_rtx, 0), Pmode,
3328 convert_to_mode (TYPE_MODE (sizetype),
3329 size, TREE_UNSIGNED (sizetype)),
3330 TYPE_MODE (sizetype));
3331 #else
3332 emit_library_call (bcopy_libfunc, 0,
3333 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3334 XEXP (to_rtx, 0), Pmode,
3335 convert_to_mode (TYPE_MODE (integer_type_node),
3336 size, TREE_UNSIGNED (integer_type_node)),
3337 TYPE_MODE (integer_type_node));
3338 #endif
3340 preserve_temp_slots (to_rtx);
3341 free_temp_slots ();
3342 pop_temp_slots ();
3343 return want_value ? to_rtx : NULL_RTX;
3346 /* Compute FROM and store the value in the rtx we got. */
3348 push_temp_slots ();
3349 result = store_expr (from, to_rtx, want_value);
3350 preserve_temp_slots (result);
3351 free_temp_slots ();
3352 pop_temp_slots ();
3353 return want_value ? result : NULL_RTX;
3356 /* Generate code for computing expression EXP,
3357 and storing the value into TARGET.
3358 TARGET may contain a QUEUED rtx.
3360 If WANT_VALUE is nonzero, return a copy of the value
3361 not in TARGET, so that we can be sure to use the proper
3362 value in a containing expression even if TARGET has something
3363 else stored in it. If possible, we copy the value through a pseudo
3364 and return that pseudo. Or, if the value is constant, we try to
3365 return the constant. In some cases, we return a pseudo
3366 copied *from* TARGET.
3368 If the mode is BLKmode then we may return TARGET itself.
3369 It turns out that in BLKmode it doesn't cause a problem.
3370 because C has no operators that could combine two different
3371 assignments into the same BLKmode object with different values
3372 with no sequence point. Will other languages need this to
3373 be more thorough?
3375 If WANT_VALUE is 0, we return NULL, to make sure
3376 to catch quickly any cases where the caller uses the value
3377 and fails to set WANT_VALUE. */
3380 store_expr (exp, target, want_value)
3381 register tree exp;
3382 register rtx target;
3383 int want_value;
3385 register rtx temp;
3386 int dont_return_target = 0;
3388 if (TREE_CODE (exp) == COMPOUND_EXPR)
3390 /* Perform first part of compound expression, then assign from second
3391 part. */
3392 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3393 emit_queue ();
3394 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3396 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3398 /* For conditional expression, get safe form of the target. Then
3399 test the condition, doing the appropriate assignment on either
3400 side. This avoids the creation of unnecessary temporaries.
3401 For non-BLKmode, it is more efficient not to do this. */
3403 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3405 emit_queue ();
3406 target = protect_from_queue (target, 1);
3408 do_pending_stack_adjust ();
3409 NO_DEFER_POP;
3410 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3411 start_cleanup_deferral ();
3412 store_expr (TREE_OPERAND (exp, 1), target, 0);
3413 end_cleanup_deferral ();
3414 emit_queue ();
3415 emit_jump_insn (gen_jump (lab2));
3416 emit_barrier ();
3417 emit_label (lab1);
3418 start_cleanup_deferral ();
3419 store_expr (TREE_OPERAND (exp, 2), target, 0);
3420 end_cleanup_deferral ();
3421 emit_queue ();
3422 emit_label (lab2);
3423 OK_DEFER_POP;
3425 return want_value ? target : NULL_RTX;
3427 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3428 && GET_MODE (target) != BLKmode)
3429 /* If target is in memory and caller wants value in a register instead,
3430 arrange that. Pass TARGET as target for expand_expr so that,
3431 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3432 We know expand_expr will not use the target in that case.
3433 Don't do this if TARGET is volatile because we are supposed
3434 to write it and then read it. */
3436 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3437 GET_MODE (target), 0);
3438 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3439 temp = copy_to_reg (temp);
3440 dont_return_target = 1;
3442 else if (queued_subexp_p (target))
3443 /* If target contains a postincrement, let's not risk
3444 using it as the place to generate the rhs. */
3446 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3448 /* Expand EXP into a new pseudo. */
3449 temp = gen_reg_rtx (GET_MODE (target));
3450 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3452 else
3453 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3455 /* If target is volatile, ANSI requires accessing the value
3456 *from* the target, if it is accessed. So make that happen.
3457 In no case return the target itself. */
3458 if (! MEM_VOLATILE_P (target) && want_value)
3459 dont_return_target = 1;
3461 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3462 /* If this is an scalar in a register that is stored in a wider mode
3463 than the declared mode, compute the result into its declared mode
3464 and then convert to the wider mode. Our value is the computed
3465 expression. */
3467 /* If we don't want a value, we can do the conversion inside EXP,
3468 which will often result in some optimizations. Do the conversion
3469 in two steps: first change the signedness, if needed, then
3470 the extend. But don't do this if the type of EXP is a subtype
3471 of something else since then the conversion might involve
3472 more than just converting modes. */
3473 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3474 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3476 if (TREE_UNSIGNED (TREE_TYPE (exp))
3477 != SUBREG_PROMOTED_UNSIGNED_P (target))
3479 = convert
3480 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3481 TREE_TYPE (exp)),
3482 exp);
3484 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3485 SUBREG_PROMOTED_UNSIGNED_P (target)),
3486 exp);
3489 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3491 /* If TEMP is a volatile MEM and we want a result value, make
3492 the access now so it gets done only once. Likewise if
3493 it contains TARGET. */
3494 if (GET_CODE (temp) == MEM && want_value
3495 && (MEM_VOLATILE_P (temp)
3496 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3497 temp = copy_to_reg (temp);
3499 /* If TEMP is a VOIDmode constant, use convert_modes to make
3500 sure that we properly convert it. */
3501 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3502 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3503 TYPE_MODE (TREE_TYPE (exp)), temp,
3504 SUBREG_PROMOTED_UNSIGNED_P (target));
3506 convert_move (SUBREG_REG (target), temp,
3507 SUBREG_PROMOTED_UNSIGNED_P (target));
3508 return want_value ? temp : NULL_RTX;
3510 else
3512 temp = expand_expr (exp, target, GET_MODE (target), 0);
3513 /* Return TARGET if it's a specified hardware register.
3514 If TARGET is a volatile mem ref, either return TARGET
3515 or return a reg copied *from* TARGET; ANSI requires this.
3517 Otherwise, if TEMP is not TARGET, return TEMP
3518 if it is constant (for efficiency),
3519 or if we really want the correct value. */
3520 if (!(target && GET_CODE (target) == REG
3521 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3522 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3523 && ! rtx_equal_p (temp, target)
3524 && (CONSTANT_P (temp) || want_value))
3525 dont_return_target = 1;
3528 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3529 the same as that of TARGET, adjust the constant. This is needed, for
3530 example, in case it is a CONST_DOUBLE and we want only a word-sized
3531 value. */
3532 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3533 && TREE_CODE (exp) != ERROR_MARK
3534 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3535 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3536 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3538 if (flag_check_memory_usage
3539 && GET_CODE (target) == MEM
3540 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3542 if (GET_CODE (temp) == MEM)
3543 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3544 XEXP (target, 0), ptr_mode,
3545 XEXP (temp, 0), ptr_mode,
3546 expr_size (exp), TYPE_MODE (sizetype));
3547 else
3548 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3549 XEXP (target, 0), ptr_mode,
3550 expr_size (exp), TYPE_MODE (sizetype),
3551 GEN_INT (MEMORY_USE_WO),
3552 TYPE_MODE (integer_type_node));
3555 /* If value was not generated in the target, store it there.
3556 Convert the value to TARGET's type first if nec. */
3558 if ((! rtx_equal_p (temp, target)
3559 || side_effects_p (temp)
3560 || side_effects_p (target))
3561 && TREE_CODE (exp) != ERROR_MARK)
3563 target = protect_from_queue (target, 1);
3564 if (GET_MODE (temp) != GET_MODE (target)
3565 && GET_MODE (temp) != VOIDmode)
3567 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3568 if (dont_return_target)
3570 /* In this case, we will return TEMP,
3571 so make sure it has the proper mode.
3572 But don't forget to store the value into TARGET. */
3573 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3574 emit_move_insn (target, temp);
3576 else
3577 convert_move (target, temp, unsignedp);
3580 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3582 /* Handle copying a string constant into an array.
3583 The string constant may be shorter than the array.
3584 So copy just the string's actual length, and clear the rest. */
3585 rtx size;
3586 rtx addr;
3588 /* Get the size of the data type of the string,
3589 which is actually the size of the target. */
3590 size = expr_size (exp);
3591 if (GET_CODE (size) == CONST_INT
3592 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3593 emit_block_move (target, temp, size,
3594 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3595 else
3597 /* Compute the size of the data to copy from the string. */
3598 tree copy_size
3599 = size_binop (MIN_EXPR,
3600 make_tree (sizetype, size),
3601 convert (sizetype,
3602 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3603 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3604 VOIDmode, 0);
3605 rtx label = 0;
3607 /* Copy that much. */
3608 emit_block_move (target, temp, copy_size_rtx,
3609 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3611 /* Figure out how much is left in TARGET that we have to clear.
3612 Do all calculations in ptr_mode. */
3614 addr = XEXP (target, 0);
3615 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3617 if (GET_CODE (copy_size_rtx) == CONST_INT)
3619 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3620 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3622 else
3624 addr = force_reg (ptr_mode, addr);
3625 addr = expand_binop (ptr_mode, add_optab, addr,
3626 copy_size_rtx, NULL_RTX, 0,
3627 OPTAB_LIB_WIDEN);
3629 size = expand_binop (ptr_mode, sub_optab, size,
3630 copy_size_rtx, NULL_RTX, 0,
3631 OPTAB_LIB_WIDEN);
3633 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3634 GET_MODE (size), 0, 0);
3635 label = gen_label_rtx ();
3636 emit_jump_insn (gen_blt (label));
3639 if (size != const0_rtx)
3641 /* Be sure we can write on ADDR. */
3642 if (flag_check_memory_usage)
3643 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3644 addr, ptr_mode,
3645 size, TYPE_MODE (sizetype),
3646 GEN_INT (MEMORY_USE_WO),
3647 TYPE_MODE (integer_type_node));
3648 #ifdef TARGET_MEM_FUNCTIONS
3649 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3650 addr, ptr_mode,
3651 const0_rtx, TYPE_MODE (integer_type_node),
3652 convert_to_mode (TYPE_MODE (sizetype),
3653 size,
3654 TREE_UNSIGNED (sizetype)),
3655 TYPE_MODE (sizetype));
3656 #else
3657 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3658 addr, ptr_mode,
3659 convert_to_mode (TYPE_MODE (integer_type_node),
3660 size,
3661 TREE_UNSIGNED (integer_type_node)),
3662 TYPE_MODE (integer_type_node));
3663 #endif
3666 if (label)
3667 emit_label (label);
3670 /* Handle calls that return values in multiple non-contiguous locations.
3671 The Irix 6 ABI has examples of this. */
3672 else if (GET_CODE (target) == PARALLEL)
3673 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3674 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3675 else if (GET_MODE (temp) == BLKmode)
3676 emit_block_move (target, temp, expr_size (exp),
3677 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3678 else
3679 emit_move_insn (target, temp);
3682 /* If we don't want a value, return NULL_RTX. */
3683 if (! want_value)
3684 return NULL_RTX;
3686 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3687 ??? The latter test doesn't seem to make sense. */
3688 else if (dont_return_target && GET_CODE (temp) != MEM)
3689 return temp;
3691 /* Return TARGET itself if it is a hard register. */
3692 else if (want_value && GET_MODE (target) != BLKmode
3693 && ! (GET_CODE (target) == REG
3694 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3695 return copy_to_reg (target);
3697 else
3698 return target;
3701 /* Return 1 if EXP just contains zeros. */
3703 static int
3704 is_zeros_p (exp)
3705 tree exp;
3707 tree elt;
3709 switch (TREE_CODE (exp))
3711 case CONVERT_EXPR:
3712 case NOP_EXPR:
3713 case NON_LVALUE_EXPR:
3714 return is_zeros_p (TREE_OPERAND (exp, 0));
3716 case INTEGER_CST:
3717 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3719 case COMPLEX_CST:
3720 return
3721 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3723 case REAL_CST:
3724 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3726 case CONSTRUCTOR:
3727 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3728 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3729 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3730 if (! is_zeros_p (TREE_VALUE (elt)))
3731 return 0;
3733 return 1;
3735 default:
3736 return 0;
3740 /* Return 1 if EXP contains mostly (3/4) zeros. */
3742 static int
3743 mostly_zeros_p (exp)
3744 tree exp;
3746 if (TREE_CODE (exp) == CONSTRUCTOR)
3748 int elts = 0, zeros = 0;
3749 tree elt = CONSTRUCTOR_ELTS (exp);
3750 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3752 /* If there are no ranges of true bits, it is all zero. */
3753 return elt == NULL_TREE;
3755 for (; elt; elt = TREE_CHAIN (elt))
3757 /* We do not handle the case where the index is a RANGE_EXPR,
3758 so the statistic will be somewhat inaccurate.
3759 We do make a more accurate count in store_constructor itself,
3760 so since this function is only used for nested array elements,
3761 this should be close enough. */
3762 if (mostly_zeros_p (TREE_VALUE (elt)))
3763 zeros++;
3764 elts++;
3767 return 4 * zeros >= 3 * elts;
3770 return is_zeros_p (exp);
3773 /* Helper function for store_constructor.
3774 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3775 TYPE is the type of the CONSTRUCTOR, not the element type.
3776 CLEARED is as for store_constructor.
3778 This provides a recursive shortcut back to store_constructor when it isn't
3779 necessary to go through store_field. This is so that we can pass through
3780 the cleared field to let store_constructor know that we may not have to
3781 clear a substructure if the outer structure has already been cleared. */
3783 static void
3784 store_constructor_field (target, bitsize, bitpos,
3785 mode, exp, type, cleared)
3786 rtx target;
3787 int bitsize, bitpos;
3788 enum machine_mode mode;
3789 tree exp, type;
3790 int cleared;
3792 if (TREE_CODE (exp) == CONSTRUCTOR
3793 && bitpos % BITS_PER_UNIT == 0
3794 /* If we have a non-zero bitpos for a register target, then we just
3795 let store_field do the bitfield handling. This is unlikely to
3796 generate unnecessary clear instructions anyways. */
3797 && (bitpos == 0 || GET_CODE (target) == MEM))
3799 if (bitpos != 0)
3800 target = change_address (target, VOIDmode,
3801 plus_constant (XEXP (target, 0),
3802 bitpos / BITS_PER_UNIT));
3803 store_constructor (exp, target, cleared);
3805 else
3806 store_field (target, bitsize, bitpos, mode, exp,
3807 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3808 int_size_in_bytes (type));
3811 /* Store the value of constructor EXP into the rtx TARGET.
3812 TARGET is either a REG or a MEM.
3813 CLEARED is true if TARGET is known to have been zero'd. */
3815 static void
3816 store_constructor (exp, target, cleared)
3817 tree exp;
3818 rtx target;
3819 int cleared;
3821 tree type = TREE_TYPE (exp);
3823 /* We know our target cannot conflict, since safe_from_p has been called. */
3824 #if 0
3825 /* Don't try copying piece by piece into a hard register
3826 since that is vulnerable to being clobbered by EXP.
3827 Instead, construct in a pseudo register and then copy it all. */
3828 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3830 rtx temp = gen_reg_rtx (GET_MODE (target));
3831 store_constructor (exp, temp, 0);
3832 emit_move_insn (target, temp);
3833 return;
3835 #endif
3837 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3838 || TREE_CODE (type) == QUAL_UNION_TYPE)
3840 register tree elt;
3842 /* Inform later passes that the whole union value is dead. */
3843 if (TREE_CODE (type) == UNION_TYPE
3844 || TREE_CODE (type) == QUAL_UNION_TYPE)
3845 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3847 /* If we are building a static constructor into a register,
3848 set the initial value as zero so we can fold the value into
3849 a constant. But if more than one register is involved,
3850 this probably loses. */
3851 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3852 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3854 if (! cleared)
3855 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3857 cleared = 1;
3860 /* If the constructor has fewer fields than the structure
3861 or if we are initializing the structure to mostly zeros,
3862 clear the whole structure first. */
3863 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3864 != list_length (TYPE_FIELDS (type)))
3865 || mostly_zeros_p (exp))
3867 if (! cleared)
3868 clear_storage (target, expr_size (exp),
3869 TYPE_ALIGN (type) / BITS_PER_UNIT);
3871 cleared = 1;
3873 else
3874 /* Inform later passes that the old value is dead. */
3875 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3877 /* Store each element of the constructor into
3878 the corresponding field of TARGET. */
3880 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3882 register tree field = TREE_PURPOSE (elt);
3883 register enum machine_mode mode;
3884 int bitsize;
3885 int bitpos = 0;
3886 int unsignedp;
3887 tree pos, constant = 0, offset = 0;
3888 rtx to_rtx = target;
3890 /* Just ignore missing fields.
3891 We cleared the whole structure, above,
3892 if any fields are missing. */
3893 if (field == 0)
3894 continue;
3896 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3897 continue;
3899 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3900 unsignedp = TREE_UNSIGNED (field);
3901 mode = DECL_MODE (field);
3902 if (DECL_BIT_FIELD (field))
3903 mode = VOIDmode;
3905 pos = DECL_FIELD_BITPOS (field);
3906 if (TREE_CODE (pos) == INTEGER_CST)
3907 constant = pos;
3908 else if (TREE_CODE (pos) == PLUS_EXPR
3909 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3910 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3911 else
3912 offset = pos;
3914 if (constant)
3915 bitpos = TREE_INT_CST_LOW (constant);
3917 if (offset)
3919 rtx offset_rtx;
3921 if (contains_placeholder_p (offset))
3922 offset = build (WITH_RECORD_EXPR, sizetype,
3923 offset, make_tree (TREE_TYPE (exp), target));
3925 offset = size_binop (FLOOR_DIV_EXPR, offset,
3926 size_int (BITS_PER_UNIT));
3928 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3929 if (GET_CODE (to_rtx) != MEM)
3930 abort ();
3932 if (GET_MODE (offset_rtx) != ptr_mode)
3934 #ifdef POINTERS_EXTEND_UNSIGNED
3935 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3936 #else
3937 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3938 #endif
3941 to_rtx
3942 = change_address (to_rtx, VOIDmode,
3943 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3944 force_reg (ptr_mode, offset_rtx)));
3946 if (TREE_READONLY (field))
3948 if (GET_CODE (to_rtx) == MEM)
3949 to_rtx = copy_rtx (to_rtx);
3951 RTX_UNCHANGING_P (to_rtx) = 1;
3954 store_constructor_field (to_rtx, bitsize, bitpos,
3955 mode, TREE_VALUE (elt), type, cleared);
3958 else if (TREE_CODE (type) == ARRAY_TYPE)
3960 register tree elt;
3961 register int i;
3962 int need_to_clear;
3963 tree domain = TYPE_DOMAIN (type);
3964 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3965 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3966 tree elttype = TREE_TYPE (type);
3968 /* If the constructor has fewer elements than the array,
3969 clear the whole array first. Similarly if this is
3970 static constructor of a non-BLKmode object. */
3971 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3972 need_to_clear = 1;
3973 else
3975 HOST_WIDE_INT count = 0, zero_count = 0;
3976 need_to_clear = 0;
3977 /* This loop is a more accurate version of the loop in
3978 mostly_zeros_p (it handles RANGE_EXPR in an index).
3979 It is also needed to check for missing elements. */
3980 for (elt = CONSTRUCTOR_ELTS (exp);
3981 elt != NULL_TREE;
3982 elt = TREE_CHAIN (elt))
3984 tree index = TREE_PURPOSE (elt);
3985 HOST_WIDE_INT this_node_count;
3986 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3988 tree lo_index = TREE_OPERAND (index, 0);
3989 tree hi_index = TREE_OPERAND (index, 1);
3990 if (TREE_CODE (lo_index) != INTEGER_CST
3991 || TREE_CODE (hi_index) != INTEGER_CST)
3993 need_to_clear = 1;
3994 break;
3996 this_node_count = TREE_INT_CST_LOW (hi_index)
3997 - TREE_INT_CST_LOW (lo_index) + 1;
3999 else
4000 this_node_count = 1;
4001 count += this_node_count;
4002 if (mostly_zeros_p (TREE_VALUE (elt)))
4003 zero_count += this_node_count;
4005 /* Clear the entire array first if there are any missing elements,
4006 or if the incidence of zero elements is >= 75%. */
4007 if (count < maxelt - minelt + 1
4008 || 4 * zero_count >= 3 * count)
4009 need_to_clear = 1;
4011 if (need_to_clear)
4013 if (! cleared)
4014 clear_storage (target, expr_size (exp),
4015 TYPE_ALIGN (type) / BITS_PER_UNIT);
4016 cleared = 1;
4018 else
4019 /* Inform later passes that the old value is dead. */
4020 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4022 /* Store each element of the constructor into
4023 the corresponding element of TARGET, determined
4024 by counting the elements. */
4025 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4026 elt;
4027 elt = TREE_CHAIN (elt), i++)
4029 register enum machine_mode mode;
4030 int bitsize;
4031 int bitpos;
4032 int unsignedp;
4033 tree value = TREE_VALUE (elt);
4034 tree index = TREE_PURPOSE (elt);
4035 rtx xtarget = target;
4037 if (cleared && is_zeros_p (value))
4038 continue;
4040 mode = TYPE_MODE (elttype);
4041 bitsize = GET_MODE_BITSIZE (mode);
4042 unsignedp = TREE_UNSIGNED (elttype);
4044 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4046 tree lo_index = TREE_OPERAND (index, 0);
4047 tree hi_index = TREE_OPERAND (index, 1);
4048 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4049 struct nesting *loop;
4050 HOST_WIDE_INT lo, hi, count;
4051 tree position;
4053 /* If the range is constant and "small", unroll the loop. */
4054 if (TREE_CODE (lo_index) == INTEGER_CST
4055 && TREE_CODE (hi_index) == INTEGER_CST
4056 && (lo = TREE_INT_CST_LOW (lo_index),
4057 hi = TREE_INT_CST_LOW (hi_index),
4058 count = hi - lo + 1,
4059 (GET_CODE (target) != MEM
4060 || count <= 2
4061 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4062 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4063 <= 40 * 8))))
4065 lo -= minelt; hi -= minelt;
4066 for (; lo <= hi; lo++)
4068 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4069 store_constructor_field (target, bitsize, bitpos,
4070 mode, value, type, cleared);
4073 else
4075 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4076 loop_top = gen_label_rtx ();
4077 loop_end = gen_label_rtx ();
4079 unsignedp = TREE_UNSIGNED (domain);
4081 index = build_decl (VAR_DECL, NULL_TREE, domain);
4083 DECL_RTL (index) = index_r
4084 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4085 &unsignedp, 0));
4087 if (TREE_CODE (value) == SAVE_EXPR
4088 && SAVE_EXPR_RTL (value) == 0)
4090 /* Make sure value gets expanded once before the
4091 loop. */
4092 expand_expr (value, const0_rtx, VOIDmode, 0);
4093 emit_queue ();
4095 store_expr (lo_index, index_r, 0);
4096 loop = expand_start_loop (0);
4098 /* Assign value to element index. */
4099 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4100 size_int (BITS_PER_UNIT));
4101 position = size_binop (MULT_EXPR,
4102 size_binop (MINUS_EXPR, index,
4103 TYPE_MIN_VALUE (domain)),
4104 position);
4105 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4106 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4107 xtarget = change_address (target, mode, addr);
4108 if (TREE_CODE (value) == CONSTRUCTOR)
4109 store_constructor (value, xtarget, cleared);
4110 else
4111 store_expr (value, xtarget, 0);
4113 expand_exit_loop_if_false (loop,
4114 build (LT_EXPR, integer_type_node,
4115 index, hi_index));
4117 expand_increment (build (PREINCREMENT_EXPR,
4118 TREE_TYPE (index),
4119 index, integer_one_node), 0, 0);
4120 expand_end_loop ();
4121 emit_label (loop_end);
4123 /* Needed by stupid register allocation. to extend the
4124 lifetime of pseudo-regs used by target past the end
4125 of the loop. */
4126 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4129 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4130 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4132 rtx pos_rtx, addr;
4133 tree position;
4135 if (index == 0)
4136 index = size_int (i);
4138 if (minelt)
4139 index = size_binop (MINUS_EXPR, index,
4140 TYPE_MIN_VALUE (domain));
4141 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4142 size_int (BITS_PER_UNIT));
4143 position = size_binop (MULT_EXPR, index, position);
4144 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4145 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4146 xtarget = change_address (target, mode, addr);
4147 store_expr (value, xtarget, 0);
4149 else
4151 if (index != 0)
4152 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4153 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4154 else
4155 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4156 store_constructor_field (target, bitsize, bitpos,
4157 mode, value, type, cleared);
4161 /* set constructor assignments */
4162 else if (TREE_CODE (type) == SET_TYPE)
4164 tree elt = CONSTRUCTOR_ELTS (exp);
4165 int nbytes = int_size_in_bytes (type), nbits;
4166 tree domain = TYPE_DOMAIN (type);
4167 tree domain_min, domain_max, bitlength;
4169 /* The default implementation strategy is to extract the constant
4170 parts of the constructor, use that to initialize the target,
4171 and then "or" in whatever non-constant ranges we need in addition.
4173 If a large set is all zero or all ones, it is
4174 probably better to set it using memset (if available) or bzero.
4175 Also, if a large set has just a single range, it may also be
4176 better to first clear all the first clear the set (using
4177 bzero/memset), and set the bits we want. */
4179 /* Check for all zeros. */
4180 if (elt == NULL_TREE)
4182 if (!cleared)
4183 clear_storage (target, expr_size (exp),
4184 TYPE_ALIGN (type) / BITS_PER_UNIT);
4185 return;
4188 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4189 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4190 bitlength = size_binop (PLUS_EXPR,
4191 size_binop (MINUS_EXPR, domain_max, domain_min),
4192 size_one_node);
4194 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4195 abort ();
4196 nbits = TREE_INT_CST_LOW (bitlength);
4198 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4199 are "complicated" (more than one range), initialize (the
4200 constant parts) by copying from a constant. */
4201 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4202 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4204 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4205 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4206 char *bit_buffer = (char *) alloca (nbits);
4207 HOST_WIDE_INT word = 0;
4208 int bit_pos = 0;
4209 int ibit = 0;
4210 int offset = 0; /* In bytes from beginning of set. */
4211 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4212 for (;;)
4214 if (bit_buffer[ibit])
4216 if (BYTES_BIG_ENDIAN)
4217 word |= (1 << (set_word_size - 1 - bit_pos));
4218 else
4219 word |= 1 << bit_pos;
4221 bit_pos++; ibit++;
4222 if (bit_pos >= set_word_size || ibit == nbits)
4224 if (word != 0 || ! cleared)
4226 rtx datum = GEN_INT (word);
4227 rtx to_rtx;
4228 /* The assumption here is that it is safe to use
4229 XEXP if the set is multi-word, but not if
4230 it's single-word. */
4231 if (GET_CODE (target) == MEM)
4233 to_rtx = plus_constant (XEXP (target, 0), offset);
4234 to_rtx = change_address (target, mode, to_rtx);
4236 else if (offset == 0)
4237 to_rtx = target;
4238 else
4239 abort ();
4240 emit_move_insn (to_rtx, datum);
4242 if (ibit == nbits)
4243 break;
4244 word = 0;
4245 bit_pos = 0;
4246 offset += set_word_size / BITS_PER_UNIT;
4250 else if (!cleared)
4252 /* Don't bother clearing storage if the set is all ones. */
4253 if (TREE_CHAIN (elt) != NULL_TREE
4254 || (TREE_PURPOSE (elt) == NULL_TREE
4255 ? nbits != 1
4256 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4257 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4258 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4259 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4260 != nbits))))
4261 clear_storage (target, expr_size (exp),
4262 TYPE_ALIGN (type) / BITS_PER_UNIT);
4265 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4267 /* start of range of element or NULL */
4268 tree startbit = TREE_PURPOSE (elt);
4269 /* end of range of element, or element value */
4270 tree endbit = TREE_VALUE (elt);
4271 #ifdef TARGET_MEM_FUNCTIONS
4272 HOST_WIDE_INT startb, endb;
4273 #endif
4274 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4276 bitlength_rtx = expand_expr (bitlength,
4277 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4279 /* handle non-range tuple element like [ expr ] */
4280 if (startbit == NULL_TREE)
4282 startbit = save_expr (endbit);
4283 endbit = startbit;
4285 startbit = convert (sizetype, startbit);
4286 endbit = convert (sizetype, endbit);
4287 if (! integer_zerop (domain_min))
4289 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4290 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4292 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4293 EXPAND_CONST_ADDRESS);
4294 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4295 EXPAND_CONST_ADDRESS);
4297 if (REG_P (target))
4299 targetx = assign_stack_temp (GET_MODE (target),
4300 GET_MODE_SIZE (GET_MODE (target)),
4302 emit_move_insn (targetx, target);
4304 else if (GET_CODE (target) == MEM)
4305 targetx = target;
4306 else
4307 abort ();
4309 #ifdef TARGET_MEM_FUNCTIONS
4310 /* Optimization: If startbit and endbit are
4311 constants divisible by BITS_PER_UNIT,
4312 call memset instead. */
4313 if (TREE_CODE (startbit) == INTEGER_CST
4314 && TREE_CODE (endbit) == INTEGER_CST
4315 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4316 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4318 emit_library_call (memset_libfunc, 0,
4319 VOIDmode, 3,
4320 plus_constant (XEXP (targetx, 0),
4321 startb / BITS_PER_UNIT),
4322 Pmode,
4323 constm1_rtx, TYPE_MODE (integer_type_node),
4324 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4325 TYPE_MODE (sizetype));
4327 else
4328 #endif
4330 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4331 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4332 bitlength_rtx, TYPE_MODE (sizetype),
4333 startbit_rtx, TYPE_MODE (sizetype),
4334 endbit_rtx, TYPE_MODE (sizetype));
4336 if (REG_P (target))
4337 emit_move_insn (target, targetx);
4341 else
4342 abort ();
4345 /* Store the value of EXP (an expression tree)
4346 into a subfield of TARGET which has mode MODE and occupies
4347 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4348 If MODE is VOIDmode, it means that we are storing into a bit-field.
4350 If VALUE_MODE is VOIDmode, return nothing in particular.
4351 UNSIGNEDP is not used in this case.
4353 Otherwise, return an rtx for the value stored. This rtx
4354 has mode VALUE_MODE if that is convenient to do.
4355 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4357 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4358 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4360 static rtx
4361 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4362 unsignedp, align, total_size)
4363 rtx target;
4364 int bitsize, bitpos;
4365 enum machine_mode mode;
4366 tree exp;
4367 enum machine_mode value_mode;
4368 int unsignedp;
4369 int align;
4370 int total_size;
4372 HOST_WIDE_INT width_mask = 0;
4374 if (TREE_CODE (exp) == ERROR_MARK)
4375 return const0_rtx;
4377 if (bitsize < HOST_BITS_PER_WIDE_INT)
4378 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4380 /* If we are storing into an unaligned field of an aligned union that is
4381 in a register, we may have the mode of TARGET being an integer mode but
4382 MODE == BLKmode. In that case, get an aligned object whose size and
4383 alignment are the same as TARGET and store TARGET into it (we can avoid
4384 the store if the field being stored is the entire width of TARGET). Then
4385 call ourselves recursively to store the field into a BLKmode version of
4386 that object. Finally, load from the object into TARGET. This is not
4387 very efficient in general, but should only be slightly more expensive
4388 than the otherwise-required unaligned accesses. Perhaps this can be
4389 cleaned up later. */
4391 if (mode == BLKmode
4392 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4394 rtx object = assign_stack_temp (GET_MODE (target),
4395 GET_MODE_SIZE (GET_MODE (target)), 0);
4396 rtx blk_object = copy_rtx (object);
4398 MEM_IN_STRUCT_P (object) = 1;
4399 MEM_IN_STRUCT_P (blk_object) = 1;
4400 PUT_MODE (blk_object, BLKmode);
4402 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4403 emit_move_insn (object, target);
4405 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4406 align, total_size);
4408 /* Even though we aren't returning target, we need to
4409 give it the updated value. */
4410 emit_move_insn (target, object);
4412 return blk_object;
4415 /* If the structure is in a register or if the component
4416 is a bit field, we cannot use addressing to access it.
4417 Use bit-field techniques or SUBREG to store in it. */
4419 if (mode == VOIDmode
4420 || (mode != BLKmode && ! direct_store[(int) mode])
4421 || GET_CODE (target) == REG
4422 || GET_CODE (target) == SUBREG
4423 /* If the field isn't aligned enough to store as an ordinary memref,
4424 store it as a bit field. */
4425 || (SLOW_UNALIGNED_ACCESS
4426 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4427 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4429 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4431 /* If BITSIZE is narrower than the size of the type of EXP
4432 we will be narrowing TEMP. Normally, what's wanted are the
4433 low-order bits. However, if EXP's type is a record and this is
4434 big-endian machine, we want the upper BITSIZE bits. */
4435 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4436 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4437 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4438 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4439 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4440 - bitsize),
4441 temp, 1);
4443 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4444 MODE. */
4445 if (mode != VOIDmode && mode != BLKmode
4446 && mode != TYPE_MODE (TREE_TYPE (exp)))
4447 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4449 /* If the modes of TARGET and TEMP are both BLKmode, both
4450 must be in memory and BITPOS must be aligned on a byte
4451 boundary. If so, we simply do a block copy. */
4452 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4454 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4455 || bitpos % BITS_PER_UNIT != 0)
4456 abort ();
4458 target = change_address (target, VOIDmode,
4459 plus_constant (XEXP (target, 0),
4460 bitpos / BITS_PER_UNIT));
4462 emit_block_move (target, temp,
4463 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4464 / BITS_PER_UNIT),
4467 return value_mode == VOIDmode ? const0_rtx : target;
4470 /* Store the value in the bitfield. */
4471 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4472 if (value_mode != VOIDmode)
4474 /* The caller wants an rtx for the value. */
4475 /* If possible, avoid refetching from the bitfield itself. */
4476 if (width_mask != 0
4477 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4479 tree count;
4480 enum machine_mode tmode;
4482 if (unsignedp)
4483 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4484 tmode = GET_MODE (temp);
4485 if (tmode == VOIDmode)
4486 tmode = value_mode;
4487 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4488 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4489 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4491 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4492 NULL_RTX, value_mode, 0, align,
4493 total_size);
4495 return const0_rtx;
4497 else
4499 rtx addr = XEXP (target, 0);
4500 rtx to_rtx;
4502 /* If a value is wanted, it must be the lhs;
4503 so make the address stable for multiple use. */
4505 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4506 && ! CONSTANT_ADDRESS_P (addr)
4507 /* A frame-pointer reference is already stable. */
4508 && ! (GET_CODE (addr) == PLUS
4509 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4510 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4511 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4512 addr = copy_to_reg (addr);
4514 /* Now build a reference to just the desired component. */
4516 to_rtx = copy_rtx (change_address (target, mode,
4517 plus_constant (addr,
4518 (bitpos
4519 / BITS_PER_UNIT))));
4520 MEM_IN_STRUCT_P (to_rtx) = 1;
4522 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4526 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4527 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4528 ARRAY_REFs and find the ultimate containing object, which we return.
4530 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4531 bit position, and *PUNSIGNEDP to the signedness of the field.
4532 If the position of the field is variable, we store a tree
4533 giving the variable offset (in units) in *POFFSET.
4534 This offset is in addition to the bit position.
4535 If the position is not variable, we store 0 in *POFFSET.
4536 We set *PALIGNMENT to the alignment in bytes of the address that will be
4537 computed. This is the alignment of the thing we return if *POFFSET
4538 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4540 If any of the extraction expressions is volatile,
4541 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4543 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4544 is a mode that can be used to access the field. In that case, *PBITSIZE
4545 is redundant.
4547 If the field describes a variable-sized object, *PMODE is set to
4548 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4549 this case, but the address of the object can be found. */
4551 tree
4552 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4553 punsignedp, pvolatilep, palignment)
4554 tree exp;
4555 int *pbitsize;
4556 int *pbitpos;
4557 tree *poffset;
4558 enum machine_mode *pmode;
4559 int *punsignedp;
4560 int *pvolatilep;
4561 int *palignment;
4563 tree orig_exp = exp;
4564 tree size_tree = 0;
4565 enum machine_mode mode = VOIDmode;
4566 tree offset = integer_zero_node;
4567 int alignment = BIGGEST_ALIGNMENT;
4569 if (TREE_CODE (exp) == COMPONENT_REF)
4571 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4572 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4573 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4574 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4576 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4578 size_tree = TREE_OPERAND (exp, 1);
4579 *punsignedp = TREE_UNSIGNED (exp);
4581 else
4583 mode = TYPE_MODE (TREE_TYPE (exp));
4584 *pbitsize = GET_MODE_BITSIZE (mode);
4585 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4588 if (size_tree)
4590 if (TREE_CODE (size_tree) != INTEGER_CST)
4591 mode = BLKmode, *pbitsize = -1;
4592 else
4593 *pbitsize = TREE_INT_CST_LOW (size_tree);
4596 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4597 and find the ultimate containing object. */
4599 *pbitpos = 0;
4601 while (1)
4603 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4605 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4606 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4607 : TREE_OPERAND (exp, 2));
4608 tree constant = integer_zero_node, var = pos;
4610 /* If this field hasn't been filled in yet, don't go
4611 past it. This should only happen when folding expressions
4612 made during type construction. */
4613 if (pos == 0)
4614 break;
4616 /* Assume here that the offset is a multiple of a unit.
4617 If not, there should be an explicitly added constant. */
4618 if (TREE_CODE (pos) == PLUS_EXPR
4619 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4620 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4621 else if (TREE_CODE (pos) == INTEGER_CST)
4622 constant = pos, var = integer_zero_node;
4624 *pbitpos += TREE_INT_CST_LOW (constant);
4625 offset = size_binop (PLUS_EXPR, offset,
4626 size_binop (EXACT_DIV_EXPR, var,
4627 size_int (BITS_PER_UNIT)));
4630 else if (TREE_CODE (exp) == ARRAY_REF)
4632 /* This code is based on the code in case ARRAY_REF in expand_expr
4633 below. We assume here that the size of an array element is
4634 always an integral multiple of BITS_PER_UNIT. */
4636 tree index = TREE_OPERAND (exp, 1);
4637 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4638 tree low_bound
4639 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4640 tree index_type = TREE_TYPE (index);
4641 tree xindex;
4643 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4645 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4646 index);
4647 index_type = TREE_TYPE (index);
4650 /* Optimize the special-case of a zero lower bound.
4652 We convert the low_bound to sizetype to avoid some problems
4653 with constant folding. (E.g. suppose the lower bound is 1,
4654 and its mode is QI. Without the conversion, (ARRAY
4655 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4656 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4658 But sizetype isn't quite right either (especially if
4659 the lowbound is negative). FIXME */
4661 if (! integer_zerop (low_bound))
4662 index = fold (build (MINUS_EXPR, index_type, index,
4663 convert (sizetype, low_bound)));
4665 if (TREE_CODE (index) == INTEGER_CST)
4667 index = convert (sbitsizetype, index);
4668 index_type = TREE_TYPE (index);
4671 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4672 convert (sbitsizetype,
4673 TYPE_SIZE (TREE_TYPE (exp)))));
4675 if (TREE_CODE (xindex) == INTEGER_CST
4676 && TREE_INT_CST_HIGH (xindex) == 0)
4677 *pbitpos += TREE_INT_CST_LOW (xindex);
4678 else
4680 /* Either the bit offset calculated above is not constant, or
4681 it overflowed. In either case, redo the multiplication
4682 against the size in units. This is especially important
4683 in the non-constant case to avoid a division at runtime. */
4684 xindex = fold (build (MULT_EXPR, ssizetype, index,
4685 convert (ssizetype,
4686 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4688 if (contains_placeholder_p (xindex))
4689 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4691 offset = size_binop (PLUS_EXPR, offset, xindex);
4694 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4695 && ! ((TREE_CODE (exp) == NOP_EXPR
4696 || TREE_CODE (exp) == CONVERT_EXPR)
4697 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4698 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4699 != UNION_TYPE))
4700 && (TYPE_MODE (TREE_TYPE (exp))
4701 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4702 break;
4704 /* If any reference in the chain is volatile, the effect is volatile. */
4705 if (TREE_THIS_VOLATILE (exp))
4706 *pvolatilep = 1;
4708 /* If the offset is non-constant already, then we can't assume any
4709 alignment more than the alignment here. */
4710 if (! integer_zerop (offset))
4711 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4713 exp = TREE_OPERAND (exp, 0);
4716 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4717 alignment = MIN (alignment, DECL_ALIGN (exp));
4718 else if (TREE_TYPE (exp) != 0)
4719 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4721 if (integer_zerop (offset))
4722 offset = 0;
4724 if (offset != 0 && contains_placeholder_p (offset))
4725 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4727 *pmode = mode;
4728 *poffset = offset;
4729 *palignment = alignment / BITS_PER_UNIT;
4730 return exp;
4733 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4734 static enum memory_use_mode
4735 get_memory_usage_from_modifier (modifier)
4736 enum expand_modifier modifier;
4738 switch (modifier)
4740 case EXPAND_NORMAL:
4741 case EXPAND_SUM:
4742 return MEMORY_USE_RO;
4743 break;
4744 case EXPAND_MEMORY_USE_WO:
4745 return MEMORY_USE_WO;
4746 break;
4747 case EXPAND_MEMORY_USE_RW:
4748 return MEMORY_USE_RW;
4749 break;
4750 case EXPAND_MEMORY_USE_DONT:
4751 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4752 MEMORY_USE_DONT, because they are modifiers to a call of
4753 expand_expr in the ADDR_EXPR case of expand_expr. */
4754 case EXPAND_CONST_ADDRESS:
4755 case EXPAND_INITIALIZER:
4756 return MEMORY_USE_DONT;
4757 case EXPAND_MEMORY_USE_BAD:
4758 default:
4759 abort ();
4763 /* Given an rtx VALUE that may contain additions and multiplications,
4764 return an equivalent value that just refers to a register or memory.
4765 This is done by generating instructions to perform the arithmetic
4766 and returning a pseudo-register containing the value.
4768 The returned value may be a REG, SUBREG, MEM or constant. */
4771 force_operand (value, target)
4772 rtx value, target;
4774 register optab binoptab = 0;
4775 /* Use a temporary to force order of execution of calls to
4776 `force_operand'. */
4777 rtx tmp;
4778 register rtx op2;
4779 /* Use subtarget as the target for operand 0 of a binary operation. */
4780 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4782 /* Check for a PIC address load. */
4783 if (flag_pic
4784 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4785 && XEXP (value, 0) == pic_offset_table_rtx
4786 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4787 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4788 || GET_CODE (XEXP (value, 1)) == CONST))
4790 if (!subtarget)
4791 subtarget = gen_reg_rtx (GET_MODE (value));
4792 emit_move_insn (subtarget, value);
4793 return subtarget;
4796 if (GET_CODE (value) == PLUS)
4797 binoptab = add_optab;
4798 else if (GET_CODE (value) == MINUS)
4799 binoptab = sub_optab;
4800 else if (GET_CODE (value) == MULT)
4802 op2 = XEXP (value, 1);
4803 if (!CONSTANT_P (op2)
4804 && !(GET_CODE (op2) == REG && op2 != subtarget))
4805 subtarget = 0;
4806 tmp = force_operand (XEXP (value, 0), subtarget);
4807 return expand_mult (GET_MODE (value), tmp,
4808 force_operand (op2, NULL_RTX),
4809 target, 0);
4812 if (binoptab)
4814 op2 = XEXP (value, 1);
4815 if (!CONSTANT_P (op2)
4816 && !(GET_CODE (op2) == REG && op2 != subtarget))
4817 subtarget = 0;
4818 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4820 binoptab = add_optab;
4821 op2 = negate_rtx (GET_MODE (value), op2);
4824 /* Check for an addition with OP2 a constant integer and our first
4825 operand a PLUS of a virtual register and something else. In that
4826 case, we want to emit the sum of the virtual register and the
4827 constant first and then add the other value. This allows virtual
4828 register instantiation to simply modify the constant rather than
4829 creating another one around this addition. */
4830 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4831 && GET_CODE (XEXP (value, 0)) == PLUS
4832 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4833 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4834 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4836 rtx temp = expand_binop (GET_MODE (value), binoptab,
4837 XEXP (XEXP (value, 0), 0), op2,
4838 subtarget, 0, OPTAB_LIB_WIDEN);
4839 return expand_binop (GET_MODE (value), binoptab, temp,
4840 force_operand (XEXP (XEXP (value, 0), 1), 0),
4841 target, 0, OPTAB_LIB_WIDEN);
4844 tmp = force_operand (XEXP (value, 0), subtarget);
4845 return expand_binop (GET_MODE (value), binoptab, tmp,
4846 force_operand (op2, NULL_RTX),
4847 target, 0, OPTAB_LIB_WIDEN);
4848 /* We give UNSIGNEDP = 0 to expand_binop
4849 because the only operations we are expanding here are signed ones. */
4851 return value;
4854 /* Subroutine of expand_expr:
4855 save the non-copied parts (LIST) of an expr (LHS), and return a list
4856 which can restore these values to their previous values,
4857 should something modify their storage. */
4859 static tree
4860 save_noncopied_parts (lhs, list)
4861 tree lhs;
4862 tree list;
4864 tree tail;
4865 tree parts = 0;
4867 for (tail = list; tail; tail = TREE_CHAIN (tail))
4868 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4869 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4870 else
4872 tree part = TREE_VALUE (tail);
4873 tree part_type = TREE_TYPE (part);
4874 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4875 rtx target = assign_temp (part_type, 0, 1, 1);
4876 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4877 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4878 parts = tree_cons (to_be_saved,
4879 build (RTL_EXPR, part_type, NULL_TREE,
4880 (tree) target),
4881 parts);
4882 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4884 return parts;
4887 /* Subroutine of expand_expr:
4888 record the non-copied parts (LIST) of an expr (LHS), and return a list
4889 which specifies the initial values of these parts. */
4891 static tree
4892 init_noncopied_parts (lhs, list)
4893 tree lhs;
4894 tree list;
4896 tree tail;
4897 tree parts = 0;
4899 for (tail = list; tail; tail = TREE_CHAIN (tail))
4900 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4901 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4902 else
4904 tree part = TREE_VALUE (tail);
4905 tree part_type = TREE_TYPE (part);
4906 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4907 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4909 return parts;
4912 /* Subroutine of expand_expr: return nonzero iff there is no way that
4913 EXP can reference X, which is being modified. TOP_P is nonzero if this
4914 call is going to be used to determine whether we need a temporary
4915 for EXP, as opposed to a recursive call to this function.
4917 It is always safe for this routine to return zero since it merely
4918 searches for optimization opportunities. */
4920 static int
4921 safe_from_p (x, exp, top_p)
4922 rtx x;
4923 tree exp;
4924 int top_p;
4926 rtx exp_rtl = 0;
4927 int i, nops;
4928 static int save_expr_count;
4929 static int save_expr_size = 0;
4930 static tree *save_expr_rewritten;
4931 static tree save_expr_trees[256];
4933 if (x == 0
4934 /* If EXP has varying size, we MUST use a target since we currently
4935 have no way of allocating temporaries of variable size
4936 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4937 So we assume here that something at a higher level has prevented a
4938 clash. This is somewhat bogus, but the best we can do. Only
4939 do this when X is BLKmode and when we are at the top level. */
4940 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4941 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4942 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4943 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4944 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4945 != INTEGER_CST)
4946 && GET_MODE (x) == BLKmode))
4947 return 1;
4949 if (top_p && save_expr_size == 0)
4951 int rtn;
4953 save_expr_count = 0;
4954 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
4955 save_expr_rewritten = &save_expr_trees[0];
4957 rtn = safe_from_p (x, exp, 1);
4959 for (i = 0; i < save_expr_count; ++i)
4961 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
4962 abort ();
4963 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
4966 save_expr_size = 0;
4968 return rtn;
4971 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4972 find the underlying pseudo. */
4973 if (GET_CODE (x) == SUBREG)
4975 x = SUBREG_REG (x);
4976 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4977 return 0;
4980 /* If X is a location in the outgoing argument area, it is always safe. */
4981 if (GET_CODE (x) == MEM
4982 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4983 || (GET_CODE (XEXP (x, 0)) == PLUS
4984 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4985 return 1;
4987 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4989 case 'd':
4990 exp_rtl = DECL_RTL (exp);
4991 break;
4993 case 'c':
4994 return 1;
4996 case 'x':
4997 if (TREE_CODE (exp) == TREE_LIST)
4998 return ((TREE_VALUE (exp) == 0
4999 || safe_from_p (x, TREE_VALUE (exp), 0))
5000 && (TREE_CHAIN (exp) == 0
5001 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5002 else if (TREE_CODE (exp) == ERROR_MARK)
5003 return 1; /* An already-visited SAVE_EXPR? */
5004 else
5005 return 0;
5007 case '1':
5008 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5010 case '2':
5011 case '<':
5012 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5013 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5015 case 'e':
5016 case 'r':
5017 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5018 the expression. If it is set, we conflict iff we are that rtx or
5019 both are in memory. Otherwise, we check all operands of the
5020 expression recursively. */
5022 switch (TREE_CODE (exp))
5024 case ADDR_EXPR:
5025 return (staticp (TREE_OPERAND (exp, 0))
5026 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5027 || TREE_STATIC (exp));
5029 case INDIRECT_REF:
5030 if (GET_CODE (x) == MEM)
5031 return 0;
5032 break;
5034 case CALL_EXPR:
5035 exp_rtl = CALL_EXPR_RTL (exp);
5036 if (exp_rtl == 0)
5038 /* Assume that the call will clobber all hard registers and
5039 all of memory. */
5040 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5041 || GET_CODE (x) == MEM)
5042 return 0;
5045 break;
5047 case RTL_EXPR:
5048 /* If a sequence exists, we would have to scan every instruction
5049 in the sequence to see if it was safe. This is probably not
5050 worthwhile. */
5051 if (RTL_EXPR_SEQUENCE (exp))
5052 return 0;
5054 exp_rtl = RTL_EXPR_RTL (exp);
5055 break;
5057 case WITH_CLEANUP_EXPR:
5058 exp_rtl = RTL_EXPR_RTL (exp);
5059 break;
5061 case CLEANUP_POINT_EXPR:
5062 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5064 case SAVE_EXPR:
5065 exp_rtl = SAVE_EXPR_RTL (exp);
5066 if (exp_rtl)
5067 break;
5069 /* This SAVE_EXPR might appear many times in the top-level
5070 safe_from_p() expression, and if it has a complex
5071 subexpression, examining it multiple times could result
5072 in a combinatorial explosion. E.g. on an Alpha
5073 running at least 200MHz, a Fortran test case compiled with
5074 optimization took about 28 minutes to compile -- even though
5075 it was only a few lines long, and the complicated line causing
5076 so much time to be spent in the earlier version of safe_from_p()
5077 had only 293 or so unique nodes.
5079 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5080 where it is so we can turn it back in the top-level safe_from_p()
5081 when we're done. */
5083 /* For now, don't bother re-sizing the array. */
5084 if (save_expr_count >= save_expr_size)
5085 return 0;
5086 save_expr_rewritten[save_expr_count++] = exp;
5088 nops = tree_code_length[(int) SAVE_EXPR];
5089 for (i = 0; i < nops; i++)
5091 tree operand = TREE_OPERAND (exp, i);
5092 if (operand == NULL_TREE)
5093 continue;
5094 TREE_SET_CODE (exp, ERROR_MARK);
5095 if (!safe_from_p (x, operand, 0))
5096 return 0;
5097 TREE_SET_CODE (exp, SAVE_EXPR);
5099 TREE_SET_CODE (exp, ERROR_MARK);
5100 return 1;
5102 case BIND_EXPR:
5103 /* The only operand we look at is operand 1. The rest aren't
5104 part of the expression. */
5105 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5107 case METHOD_CALL_EXPR:
5108 /* This takes a rtx argument, but shouldn't appear here. */
5109 abort ();
5111 default:
5112 break;
5115 /* If we have an rtx, we do not need to scan our operands. */
5116 if (exp_rtl)
5117 break;
5119 nops = tree_code_length[(int) TREE_CODE (exp)];
5120 for (i = 0; i < nops; i++)
5121 if (TREE_OPERAND (exp, i) != 0
5122 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5123 return 0;
5126 /* If we have an rtl, find any enclosed object. Then see if we conflict
5127 with it. */
5128 if (exp_rtl)
5130 if (GET_CODE (exp_rtl) == SUBREG)
5132 exp_rtl = SUBREG_REG (exp_rtl);
5133 if (GET_CODE (exp_rtl) == REG
5134 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5135 return 0;
5138 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5139 are memory and EXP is not readonly. */
5140 return ! (rtx_equal_p (x, exp_rtl)
5141 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5142 && ! TREE_READONLY (exp)));
5145 /* If we reach here, it is safe. */
5146 return 1;
5149 /* Subroutine of expand_expr: return nonzero iff EXP is an
5150 expression whose type is statically determinable. */
5152 static int
5153 fixed_type_p (exp)
5154 tree exp;
5156 if (TREE_CODE (exp) == PARM_DECL
5157 || TREE_CODE (exp) == VAR_DECL
5158 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5159 || TREE_CODE (exp) == COMPONENT_REF
5160 || TREE_CODE (exp) == ARRAY_REF)
5161 return 1;
5162 return 0;
5165 /* Subroutine of expand_expr: return rtx if EXP is a
5166 variable or parameter; else return 0. */
5168 static rtx
5169 var_rtx (exp)
5170 tree exp;
5172 STRIP_NOPS (exp);
5173 switch (TREE_CODE (exp))
5175 case PARM_DECL:
5176 case VAR_DECL:
5177 return DECL_RTL (exp);
5178 default:
5179 return 0;
5183 #ifdef MAX_INTEGER_COMPUTATION_MODE
5184 void
5185 check_max_integer_computation_mode (exp)
5186 tree exp;
5188 enum tree_code code = TREE_CODE (exp);
5189 enum machine_mode mode;
5191 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5192 if (code == NOP_EXPR
5193 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5194 return;
5196 /* First check the type of the overall operation. We need only look at
5197 unary, binary and relational operations. */
5198 if (TREE_CODE_CLASS (code) == '1'
5199 || TREE_CODE_CLASS (code) == '2'
5200 || TREE_CODE_CLASS (code) == '<')
5202 mode = TYPE_MODE (TREE_TYPE (exp));
5203 if (GET_MODE_CLASS (mode) == MODE_INT
5204 && mode > MAX_INTEGER_COMPUTATION_MODE)
5205 fatal ("unsupported wide integer operation");
5208 /* Check operand of a unary op. */
5209 if (TREE_CODE_CLASS (code) == '1')
5211 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5212 if (GET_MODE_CLASS (mode) == MODE_INT
5213 && mode > MAX_INTEGER_COMPUTATION_MODE)
5214 fatal ("unsupported wide integer operation");
5217 /* Check operands of a binary/comparison op. */
5218 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5220 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5221 if (GET_MODE_CLASS (mode) == MODE_INT
5222 && mode > MAX_INTEGER_COMPUTATION_MODE)
5223 fatal ("unsupported wide integer operation");
5225 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5226 if (GET_MODE_CLASS (mode) == MODE_INT
5227 && mode > MAX_INTEGER_COMPUTATION_MODE)
5228 fatal ("unsupported wide integer operation");
5231 #endif
5234 /* expand_expr: generate code for computing expression EXP.
5235 An rtx for the computed value is returned. The value is never null.
5236 In the case of a void EXP, const0_rtx is returned.
5238 The value may be stored in TARGET if TARGET is nonzero.
5239 TARGET is just a suggestion; callers must assume that
5240 the rtx returned may not be the same as TARGET.
5242 If TARGET is CONST0_RTX, it means that the value will be ignored.
5244 If TMODE is not VOIDmode, it suggests generating the
5245 result in mode TMODE. But this is done only when convenient.
5246 Otherwise, TMODE is ignored and the value generated in its natural mode.
5247 TMODE is just a suggestion; callers must assume that
5248 the rtx returned may not have mode TMODE.
5250 Note that TARGET may have neither TMODE nor MODE. In that case, it
5251 probably will not be used.
5253 If MODIFIER is EXPAND_SUM then when EXP is an addition
5254 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5255 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5256 products as above, or REG or MEM, or constant.
5257 Ordinarily in such cases we would output mul or add instructions
5258 and then return a pseudo reg containing the sum.
5260 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5261 it also marks a label as absolutely required (it can't be dead).
5262 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5263 This is used for outputting expressions used in initializers.
5265 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5266 with a constant address even if that address is not normally legitimate.
5267 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5270 expand_expr (exp, target, tmode, modifier)
5271 register tree exp;
5272 rtx target;
5273 enum machine_mode tmode;
5274 enum expand_modifier modifier;
5276 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5277 This is static so it will be accessible to our recursive callees. */
5278 static tree placeholder_list = 0;
5279 register rtx op0, op1, temp;
5280 tree type = TREE_TYPE (exp);
5281 int unsignedp = TREE_UNSIGNED (type);
5282 register enum machine_mode mode = TYPE_MODE (type);
5283 register enum tree_code code = TREE_CODE (exp);
5284 optab this_optab;
5285 /* Use subtarget as the target for operand 0 of a binary operation. */
5286 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5287 rtx original_target = target;
5288 int ignore = (target == const0_rtx
5289 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5290 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5291 || code == COND_EXPR)
5292 && TREE_CODE (type) == VOID_TYPE));
5293 tree context;
5294 /* Used by check-memory-usage to make modifier read only. */
5295 enum expand_modifier ro_modifier;
5297 /* Make a read-only version of the modifier. */
5298 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5299 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5300 ro_modifier = modifier;
5301 else
5302 ro_modifier = EXPAND_NORMAL;
5304 /* Don't use hard regs as subtargets, because the combiner
5305 can only handle pseudo regs. */
5306 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5307 subtarget = 0;
5308 /* Avoid subtargets inside loops,
5309 since they hide some invariant expressions. */
5310 if (preserve_subexpressions_p ())
5311 subtarget = 0;
5313 /* If we are going to ignore this result, we need only do something
5314 if there is a side-effect somewhere in the expression. If there
5315 is, short-circuit the most common cases here. Note that we must
5316 not call expand_expr with anything but const0_rtx in case this
5317 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5319 if (ignore)
5321 if (! TREE_SIDE_EFFECTS (exp))
5322 return const0_rtx;
5324 /* Ensure we reference a volatile object even if value is ignored. */
5325 if (TREE_THIS_VOLATILE (exp)
5326 && TREE_CODE (exp) != FUNCTION_DECL
5327 && mode != VOIDmode && mode != BLKmode)
5329 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5330 if (GET_CODE (temp) == MEM)
5331 temp = copy_to_reg (temp);
5332 return const0_rtx;
5335 if (TREE_CODE_CLASS (code) == '1')
5336 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5337 VOIDmode, ro_modifier);
5338 else if (TREE_CODE_CLASS (code) == '2'
5339 || TREE_CODE_CLASS (code) == '<')
5341 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5342 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5343 return const0_rtx;
5345 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5346 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5347 /* If the second operand has no side effects, just evaluate
5348 the first. */
5349 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5350 VOIDmode, ro_modifier);
5352 target = 0;
5355 #ifdef MAX_INTEGER_COMPUTATION_MODE
5356 if (target
5357 && TREE_CODE (exp) != INTEGER_CST
5358 && TREE_CODE (exp) != PARM_DECL
5359 && TREE_CODE (exp) != VAR_DECL)
5361 enum machine_mode mode = GET_MODE (target);
5363 if (GET_MODE_CLASS (mode) == MODE_INT
5364 && mode > MAX_INTEGER_COMPUTATION_MODE)
5365 fatal ("unsupported wide integer operation");
5368 if (TREE_CODE (exp) != INTEGER_CST
5369 && TREE_CODE (exp) != PARM_DECL
5370 && TREE_CODE (exp) != VAR_DECL
5371 && GET_MODE_CLASS (tmode) == MODE_INT
5372 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5373 fatal ("unsupported wide integer operation");
5375 check_max_integer_computation_mode (exp);
5376 #endif
5378 /* If will do cse, generate all results into pseudo registers
5379 since 1) that allows cse to find more things
5380 and 2) otherwise cse could produce an insn the machine
5381 cannot support. */
5383 if (! cse_not_expected && mode != BLKmode && target
5384 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5385 target = subtarget;
5387 switch (code)
5389 case LABEL_DECL:
5391 tree function = decl_function_context (exp);
5392 /* Handle using a label in a containing function. */
5393 if (function != current_function_decl
5394 && function != inline_function_decl && function != 0)
5396 struct function *p = find_function_data (function);
5397 /* Allocate in the memory associated with the function
5398 that the label is in. */
5399 push_obstacks (p->function_obstack,
5400 p->function_maybepermanent_obstack);
5402 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5403 label_rtx (exp),
5404 p->forced_labels);
5405 pop_obstacks ();
5407 else if (modifier == EXPAND_INITIALIZER)
5408 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5409 label_rtx (exp), forced_labels);
5410 temp = gen_rtx_MEM (FUNCTION_MODE,
5411 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5412 if (function != current_function_decl
5413 && function != inline_function_decl && function != 0)
5414 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5415 return temp;
5418 case PARM_DECL:
5419 if (DECL_RTL (exp) == 0)
5421 error_with_decl (exp, "prior parameter's size depends on `%s'");
5422 return CONST0_RTX (mode);
5425 /* ... fall through ... */
5427 case VAR_DECL:
5428 /* If a static var's type was incomplete when the decl was written,
5429 but the type is complete now, lay out the decl now. */
5430 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5431 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5433 push_obstacks_nochange ();
5434 end_temporary_allocation ();
5435 layout_decl (exp, 0);
5436 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5437 pop_obstacks ();
5440 /* Only check automatic variables. Currently, function arguments are
5441 not checked (this can be done at compile-time with prototypes).
5442 Aggregates are not checked. */
5443 if (flag_check_memory_usage && code == VAR_DECL
5444 && GET_CODE (DECL_RTL (exp)) == MEM
5445 && DECL_CONTEXT (exp) != NULL_TREE
5446 && ! TREE_STATIC (exp)
5447 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5449 enum memory_use_mode memory_usage;
5450 memory_usage = get_memory_usage_from_modifier (modifier);
5452 if (memory_usage != MEMORY_USE_DONT)
5453 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5454 XEXP (DECL_RTL (exp), 0), ptr_mode,
5455 GEN_INT (int_size_in_bytes (type)),
5456 TYPE_MODE (sizetype),
5457 GEN_INT (memory_usage),
5458 TYPE_MODE (integer_type_node));
5461 /* ... fall through ... */
5463 case FUNCTION_DECL:
5464 case RESULT_DECL:
5465 if (DECL_RTL (exp) == 0)
5466 abort ();
5468 /* Ensure variable marked as used even if it doesn't go through
5469 a parser. If it hasn't be used yet, write out an external
5470 definition. */
5471 if (! TREE_USED (exp))
5473 assemble_external (exp);
5474 TREE_USED (exp) = 1;
5477 /* Show we haven't gotten RTL for this yet. */
5478 temp = 0;
5480 /* Handle variables inherited from containing functions. */
5481 context = decl_function_context (exp);
5483 /* We treat inline_function_decl as an alias for the current function
5484 because that is the inline function whose vars, types, etc.
5485 are being merged into the current function.
5486 See expand_inline_function. */
5488 if (context != 0 && context != current_function_decl
5489 && context != inline_function_decl
5490 /* If var is static, we don't need a static chain to access it. */
5491 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5492 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5494 rtx addr;
5496 /* Mark as non-local and addressable. */
5497 DECL_NONLOCAL (exp) = 1;
5498 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5499 abort ();
5500 mark_addressable (exp);
5501 if (GET_CODE (DECL_RTL (exp)) != MEM)
5502 abort ();
5503 addr = XEXP (DECL_RTL (exp), 0);
5504 if (GET_CODE (addr) == MEM)
5505 addr = gen_rtx_MEM (Pmode,
5506 fix_lexical_addr (XEXP (addr, 0), exp));
5507 else
5508 addr = fix_lexical_addr (addr, exp);
5509 temp = change_address (DECL_RTL (exp), mode, addr);
5512 /* This is the case of an array whose size is to be determined
5513 from its initializer, while the initializer is still being parsed.
5514 See expand_decl. */
5516 else if (GET_CODE (DECL_RTL (exp)) == MEM
5517 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5518 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5519 XEXP (DECL_RTL (exp), 0));
5521 /* If DECL_RTL is memory, we are in the normal case and either
5522 the address is not valid or it is not a register and -fforce-addr
5523 is specified, get the address into a register. */
5525 else if (GET_CODE (DECL_RTL (exp)) == MEM
5526 && modifier != EXPAND_CONST_ADDRESS
5527 && modifier != EXPAND_SUM
5528 && modifier != EXPAND_INITIALIZER
5529 && (! memory_address_p (DECL_MODE (exp),
5530 XEXP (DECL_RTL (exp), 0))
5531 || (flag_force_addr
5532 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5533 temp = change_address (DECL_RTL (exp), VOIDmode,
5534 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5536 /* If we got something, return it. But first, set the alignment
5537 the address is a register. */
5538 if (temp != 0)
5540 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5541 mark_reg_pointer (XEXP (temp, 0),
5542 DECL_ALIGN (exp) / BITS_PER_UNIT);
5544 return temp;
5547 /* If the mode of DECL_RTL does not match that of the decl, it
5548 must be a promoted value. We return a SUBREG of the wanted mode,
5549 but mark it so that we know that it was already extended. */
5551 if (GET_CODE (DECL_RTL (exp)) == REG
5552 && GET_MODE (DECL_RTL (exp)) != mode)
5554 /* Get the signedness used for this variable. Ensure we get the
5555 same mode we got when the variable was declared. */
5556 if (GET_MODE (DECL_RTL (exp))
5557 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5558 abort ();
5560 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5561 SUBREG_PROMOTED_VAR_P (temp) = 1;
5562 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5563 return temp;
5566 return DECL_RTL (exp);
5568 case INTEGER_CST:
5569 return immed_double_const (TREE_INT_CST_LOW (exp),
5570 TREE_INT_CST_HIGH (exp),
5571 mode);
5573 case CONST_DECL:
5574 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5575 EXPAND_MEMORY_USE_BAD);
5577 case REAL_CST:
5578 /* If optimized, generate immediate CONST_DOUBLE
5579 which will be turned into memory by reload if necessary.
5581 We used to force a register so that loop.c could see it. But
5582 this does not allow gen_* patterns to perform optimizations with
5583 the constants. It also produces two insns in cases like "x = 1.0;".
5584 On most machines, floating-point constants are not permitted in
5585 many insns, so we'd end up copying it to a register in any case.
5587 Now, we do the copying in expand_binop, if appropriate. */
5588 return immed_real_const (exp);
5590 case COMPLEX_CST:
5591 case STRING_CST:
5592 if (! TREE_CST_RTL (exp))
5593 output_constant_def (exp);
5595 /* TREE_CST_RTL probably contains a constant address.
5596 On RISC machines where a constant address isn't valid,
5597 make some insns to get that address into a register. */
5598 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5599 && modifier != EXPAND_CONST_ADDRESS
5600 && modifier != EXPAND_INITIALIZER
5601 && modifier != EXPAND_SUM
5602 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5603 || (flag_force_addr
5604 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5605 return change_address (TREE_CST_RTL (exp), VOIDmode,
5606 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5607 return TREE_CST_RTL (exp);
5609 case EXPR_WITH_FILE_LOCATION:
5611 rtx to_return;
5612 char *saved_input_filename = input_filename;
5613 int saved_lineno = lineno;
5614 input_filename = EXPR_WFL_FILENAME (exp);
5615 lineno = EXPR_WFL_LINENO (exp);
5616 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5617 emit_line_note (input_filename, lineno);
5618 /* Possibly avoid switching back and force here */
5619 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5620 input_filename = saved_input_filename;
5621 lineno = saved_lineno;
5622 return to_return;
5625 case SAVE_EXPR:
5626 context = decl_function_context (exp);
5628 /* If this SAVE_EXPR was at global context, assume we are an
5629 initialization function and move it into our context. */
5630 if (context == 0)
5631 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5633 /* We treat inline_function_decl as an alias for the current function
5634 because that is the inline function whose vars, types, etc.
5635 are being merged into the current function.
5636 See expand_inline_function. */
5637 if (context == current_function_decl || context == inline_function_decl)
5638 context = 0;
5640 /* If this is non-local, handle it. */
5641 if (context)
5643 /* The following call just exists to abort if the context is
5644 not of a containing function. */
5645 find_function_data (context);
5647 temp = SAVE_EXPR_RTL (exp);
5648 if (temp && GET_CODE (temp) == REG)
5650 put_var_into_stack (exp);
5651 temp = SAVE_EXPR_RTL (exp);
5653 if (temp == 0 || GET_CODE (temp) != MEM)
5654 abort ();
5655 return change_address (temp, mode,
5656 fix_lexical_addr (XEXP (temp, 0), exp));
5658 if (SAVE_EXPR_RTL (exp) == 0)
5660 if (mode == VOIDmode)
5661 temp = const0_rtx;
5662 else
5663 temp = assign_temp (type, 3, 0, 0);
5665 SAVE_EXPR_RTL (exp) = temp;
5666 if (!optimize && GET_CODE (temp) == REG)
5667 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5668 save_expr_regs);
5670 /* If the mode of TEMP does not match that of the expression, it
5671 must be a promoted value. We pass store_expr a SUBREG of the
5672 wanted mode but mark it so that we know that it was already
5673 extended. Note that `unsignedp' was modified above in
5674 this case. */
5676 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5678 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5679 SUBREG_PROMOTED_VAR_P (temp) = 1;
5680 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5683 if (temp == const0_rtx)
5684 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5685 EXPAND_MEMORY_USE_BAD);
5686 else
5687 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5689 TREE_USED (exp) = 1;
5692 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5693 must be a promoted value. We return a SUBREG of the wanted mode,
5694 but mark it so that we know that it was already extended. */
5696 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5697 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5699 /* Compute the signedness and make the proper SUBREG. */
5700 promote_mode (type, mode, &unsignedp, 0);
5701 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5702 SUBREG_PROMOTED_VAR_P (temp) = 1;
5703 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5704 return temp;
5707 return SAVE_EXPR_RTL (exp);
5709 case UNSAVE_EXPR:
5711 rtx temp;
5712 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5713 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5714 return temp;
5717 case PLACEHOLDER_EXPR:
5719 tree placeholder_expr;
5721 /* If there is an object on the head of the placeholder list,
5722 see if some object in it of type TYPE or a pointer to it. For
5723 further information, see tree.def. */
5724 for (placeholder_expr = placeholder_list;
5725 placeholder_expr != 0;
5726 placeholder_expr = TREE_CHAIN (placeholder_expr))
5728 tree need_type = TYPE_MAIN_VARIANT (type);
5729 tree object = 0;
5730 tree old_list = placeholder_list;
5731 tree elt;
5733 /* Find the outermost reference that is of the type we want.
5734 If none, see if any object has a type that is a pointer to
5735 the type we want. */
5736 for (elt = TREE_PURPOSE (placeholder_expr);
5737 elt != 0 && object == 0;
5739 = ((TREE_CODE (elt) == COMPOUND_EXPR
5740 || TREE_CODE (elt) == COND_EXPR)
5741 ? TREE_OPERAND (elt, 1)
5742 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5743 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5744 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5745 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5746 ? TREE_OPERAND (elt, 0) : 0))
5747 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5748 object = elt;
5750 for (elt = TREE_PURPOSE (placeholder_expr);
5751 elt != 0 && object == 0;
5753 = ((TREE_CODE (elt) == COMPOUND_EXPR
5754 || TREE_CODE (elt) == COND_EXPR)
5755 ? TREE_OPERAND (elt, 1)
5756 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5757 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5758 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5759 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5760 ? TREE_OPERAND (elt, 0) : 0))
5761 if (POINTER_TYPE_P (TREE_TYPE (elt))
5762 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5763 == need_type))
5764 object = build1 (INDIRECT_REF, need_type, elt);
5766 if (object != 0)
5768 /* Expand this object skipping the list entries before
5769 it was found in case it is also a PLACEHOLDER_EXPR.
5770 In that case, we want to translate it using subsequent
5771 entries. */
5772 placeholder_list = TREE_CHAIN (placeholder_expr);
5773 temp = expand_expr (object, original_target, tmode,
5774 ro_modifier);
5775 placeholder_list = old_list;
5776 return temp;
5781 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5782 abort ();
5784 case WITH_RECORD_EXPR:
5785 /* Put the object on the placeholder list, expand our first operand,
5786 and pop the list. */
5787 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5788 placeholder_list);
5789 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5790 tmode, ro_modifier);
5791 placeholder_list = TREE_CHAIN (placeholder_list);
5792 return target;
5794 case EXIT_EXPR:
5795 expand_exit_loop_if_false (NULL_PTR,
5796 invert_truthvalue (TREE_OPERAND (exp, 0)));
5797 return const0_rtx;
5799 case LABELED_BLOCK_EXPR:
5800 if (LABELED_BLOCK_BODY (exp))
5801 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
5802 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
5803 return const0_rtx;
5805 case EXIT_BLOCK_EXPR:
5806 if (EXIT_BLOCK_RETURN (exp))
5807 really_sorry ("returned value in block_exit_expr");
5808 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
5809 return const0_rtx;
5811 case LOOP_EXPR:
5812 push_temp_slots ();
5813 expand_start_loop (1);
5814 expand_expr_stmt (TREE_OPERAND (exp, 0));
5815 expand_end_loop ();
5816 pop_temp_slots ();
5818 return const0_rtx;
5820 case BIND_EXPR:
5822 tree vars = TREE_OPERAND (exp, 0);
5823 int vars_need_expansion = 0;
5825 /* Need to open a binding contour here because
5826 if there are any cleanups they must be contained here. */
5827 expand_start_bindings (0);
5829 /* Mark the corresponding BLOCK for output in its proper place. */
5830 if (TREE_OPERAND (exp, 2) != 0
5831 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5832 insert_block (TREE_OPERAND (exp, 2));
5834 /* If VARS have not yet been expanded, expand them now. */
5835 while (vars)
5837 if (DECL_RTL (vars) == 0)
5839 vars_need_expansion = 1;
5840 expand_decl (vars);
5842 expand_decl_init (vars);
5843 vars = TREE_CHAIN (vars);
5846 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5848 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5850 return temp;
5853 case RTL_EXPR:
5854 if (RTL_EXPR_SEQUENCE (exp))
5856 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5857 abort ();
5858 emit_insns (RTL_EXPR_SEQUENCE (exp));
5859 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5861 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5862 free_temps_for_rtl_expr (exp);
5863 return RTL_EXPR_RTL (exp);
5865 case CONSTRUCTOR:
5866 /* If we don't need the result, just ensure we evaluate any
5867 subexpressions. */
5868 if (ignore)
5870 tree elt;
5871 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5872 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5873 EXPAND_MEMORY_USE_BAD);
5874 return const0_rtx;
5877 /* All elts simple constants => refer to a constant in memory. But
5878 if this is a non-BLKmode mode, let it store a field at a time
5879 since that should make a CONST_INT or CONST_DOUBLE when we
5880 fold. Likewise, if we have a target we can use, it is best to
5881 store directly into the target unless the type is large enough
5882 that memcpy will be used. If we are making an initializer and
5883 all operands are constant, put it in memory as well. */
5884 else if ((TREE_STATIC (exp)
5885 && ((mode == BLKmode
5886 && ! (target != 0 && safe_from_p (target, exp, 1)))
5887 || TREE_ADDRESSABLE (exp)
5888 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5889 && (move_by_pieces_ninsns
5890 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5891 TYPE_ALIGN (type) / BITS_PER_UNIT)
5892 >= MOVE_RATIO)
5893 && ! mostly_zeros_p (exp))))
5894 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5896 rtx constructor = output_constant_def (exp);
5897 if (modifier != EXPAND_CONST_ADDRESS
5898 && modifier != EXPAND_INITIALIZER
5899 && modifier != EXPAND_SUM
5900 && (! memory_address_p (GET_MODE (constructor),
5901 XEXP (constructor, 0))
5902 || (flag_force_addr
5903 && GET_CODE (XEXP (constructor, 0)) != REG)))
5904 constructor = change_address (constructor, VOIDmode,
5905 XEXP (constructor, 0));
5906 return constructor;
5909 else
5911 /* Handle calls that pass values in multiple non-contiguous
5912 locations. The Irix 6 ABI has examples of this. */
5913 if (target == 0 || ! safe_from_p (target, exp, 1)
5914 || GET_CODE (target) == PARALLEL)
5916 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5917 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5918 else
5919 target = assign_temp (type, 0, 1, 1);
5922 if (TREE_READONLY (exp))
5924 if (GET_CODE (target) == MEM)
5925 target = copy_rtx (target);
5927 RTX_UNCHANGING_P (target) = 1;
5930 store_constructor (exp, target, 0);
5931 return target;
5934 case INDIRECT_REF:
5936 tree exp1 = TREE_OPERAND (exp, 0);
5937 tree exp2;
5938 tree index;
5939 tree string = string_constant (exp1, &index);
5940 int i;
5942 /* Try to optimize reads from const strings. */
5943 if (string
5944 && TREE_CODE (string) == STRING_CST
5945 && TREE_CODE (index) == INTEGER_CST
5946 && !TREE_INT_CST_HIGH (index)
5947 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5948 && GET_MODE_CLASS (mode) == MODE_INT
5949 && GET_MODE_SIZE (mode) == 1
5950 && modifier != EXPAND_MEMORY_USE_WO)
5951 return GEN_INT (TREE_STRING_POINTER (string)[i]);
5953 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5954 op0 = memory_address (mode, op0);
5956 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5958 enum memory_use_mode memory_usage;
5959 memory_usage = get_memory_usage_from_modifier (modifier);
5961 if (memory_usage != MEMORY_USE_DONT)
5963 in_check_memory_usage = 1;
5964 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5965 op0, ptr_mode,
5966 GEN_INT (int_size_in_bytes (type)),
5967 TYPE_MODE (sizetype),
5968 GEN_INT (memory_usage),
5969 TYPE_MODE (integer_type_node));
5970 in_check_memory_usage = 0;
5974 temp = gen_rtx_MEM (mode, op0);
5975 /* If address was computed by addition,
5976 mark this as an element of an aggregate. */
5977 if (TREE_CODE (exp1) == PLUS_EXPR
5978 || (TREE_CODE (exp1) == SAVE_EXPR
5979 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
5980 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5981 || (TREE_CODE (exp1) == ADDR_EXPR
5982 && (exp2 = TREE_OPERAND (exp1, 0))
5983 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5984 MEM_IN_STRUCT_P (temp) = 1;
5986 /* If the pointer is actually a REFERENCE_TYPE, this could be pointing
5987 into some aggregate too. In theory we could fold this into the
5988 previous check and use rtx_addr_varies_p there too.
5990 However, this seems safer. */
5991 if (!MEM_IN_STRUCT_P (temp)
5992 && (TREE_CODE (TREE_TYPE (exp1)) == REFERENCE_TYPE
5993 /* This may have been an array reference to the first element
5994 that was optimized away from being an addition. */
5995 || (TREE_CODE (exp1) == NOP_EXPR
5996 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
5997 == REFERENCE_TYPE)
5998 || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
5999 == POINTER_TYPE)
6000 && (AGGREGATE_TYPE_P
6001 (TREE_TYPE (TREE_TYPE
6002 (TREE_OPERAND (exp1, 0))))))))))
6003 MEM_IN_STRUCT_P (temp) = ! rtx_addr_varies_p (temp);
6005 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6006 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6008 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6009 here, because, in C and C++, the fact that a location is accessed
6010 through a pointer to const does not mean that the value there can
6011 never change. Languages where it can never change should
6012 also set TREE_STATIC. */
6013 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6014 return temp;
6017 case ARRAY_REF:
6018 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6019 abort ();
6022 tree array = TREE_OPERAND (exp, 0);
6023 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6024 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6025 tree index = TREE_OPERAND (exp, 1);
6026 tree index_type = TREE_TYPE (index);
6027 HOST_WIDE_INT i;
6029 /* Optimize the special-case of a zero lower bound.
6031 We convert the low_bound to sizetype to avoid some problems
6032 with constant folding. (E.g. suppose the lower bound is 1,
6033 and its mode is QI. Without the conversion, (ARRAY
6034 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6035 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6037 But sizetype isn't quite right either (especially if
6038 the lowbound is negative). FIXME */
6040 if (! integer_zerop (low_bound))
6041 index = fold (build (MINUS_EXPR, index_type, index,
6042 convert (sizetype, low_bound)));
6044 /* Fold an expression like: "foo"[2].
6045 This is not done in fold so it won't happen inside &.
6046 Don't fold if this is for wide characters since it's too
6047 difficult to do correctly and this is a very rare case. */
6049 if (TREE_CODE (array) == STRING_CST
6050 && TREE_CODE (index) == INTEGER_CST
6051 && !TREE_INT_CST_HIGH (index)
6052 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6053 && GET_MODE_CLASS (mode) == MODE_INT
6054 && GET_MODE_SIZE (mode) == 1)
6055 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6057 /* If this is a constant index into a constant array,
6058 just get the value from the array. Handle both the cases when
6059 we have an explicit constructor and when our operand is a variable
6060 that was declared const. */
6062 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6064 if (TREE_CODE (index) == INTEGER_CST
6065 && TREE_INT_CST_HIGH (index) == 0)
6067 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6069 i = TREE_INT_CST_LOW (index);
6070 while (elem && i--)
6071 elem = TREE_CHAIN (elem);
6072 if (elem)
6073 return expand_expr (fold (TREE_VALUE (elem)), target,
6074 tmode, ro_modifier);
6078 else if (optimize >= 1
6079 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6080 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6081 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6083 if (TREE_CODE (index) == INTEGER_CST)
6085 tree init = DECL_INITIAL (array);
6087 i = TREE_INT_CST_LOW (index);
6088 if (TREE_CODE (init) == CONSTRUCTOR)
6090 tree elem = CONSTRUCTOR_ELTS (init);
6092 while (elem
6093 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6094 elem = TREE_CHAIN (elem);
6095 if (elem)
6096 return expand_expr (fold (TREE_VALUE (elem)), target,
6097 tmode, ro_modifier);
6099 else if (TREE_CODE (init) == STRING_CST
6100 && TREE_INT_CST_HIGH (index) == 0
6101 && (TREE_INT_CST_LOW (index)
6102 < TREE_STRING_LENGTH (init)))
6103 return (GEN_INT
6104 (TREE_STRING_POINTER
6105 (init)[TREE_INT_CST_LOW (index)]));
6110 /* ... fall through ... */
6112 case COMPONENT_REF:
6113 case BIT_FIELD_REF:
6114 /* If the operand is a CONSTRUCTOR, we can just extract the
6115 appropriate field if it is present. Don't do this if we have
6116 already written the data since we want to refer to that copy
6117 and varasm.c assumes that's what we'll do. */
6118 if (code != ARRAY_REF
6119 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6120 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6122 tree elt;
6124 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6125 elt = TREE_CHAIN (elt))
6126 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6127 /* We can normally use the value of the field in the
6128 CONSTRUCTOR. However, if this is a bitfield in
6129 an integral mode that we can fit in a HOST_WIDE_INT,
6130 we must mask only the number of bits in the bitfield,
6131 since this is done implicitly by the constructor. If
6132 the bitfield does not meet either of those conditions,
6133 we can't do this optimization. */
6134 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6135 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6136 == MODE_INT)
6137 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6138 <= HOST_BITS_PER_WIDE_INT))))
6140 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6141 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6143 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6145 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6147 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6148 op0 = expand_and (op0, op1, target);
6150 else
6152 enum machine_mode imode
6153 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6154 tree count
6155 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6158 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6159 target, 0);
6160 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6161 target, 0);
6165 return op0;
6170 enum machine_mode mode1;
6171 int bitsize;
6172 int bitpos;
6173 tree offset;
6174 int volatilep = 0;
6175 int alignment;
6176 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6177 &mode1, &unsignedp, &volatilep,
6178 &alignment);
6180 /* If we got back the original object, something is wrong. Perhaps
6181 we are evaluating an expression too early. In any event, don't
6182 infinitely recurse. */
6183 if (tem == exp)
6184 abort ();
6186 /* If TEM's type is a union of variable size, pass TARGET to the inner
6187 computation, since it will need a temporary and TARGET is known
6188 to have to do. This occurs in unchecked conversion in Ada. */
6190 op0 = expand_expr (tem,
6191 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6192 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6193 != INTEGER_CST)
6194 ? target : NULL_RTX),
6195 VOIDmode,
6196 modifier == EXPAND_INITIALIZER
6197 ? modifier : EXPAND_NORMAL);
6199 /* If this is a constant, put it into a register if it is a
6200 legitimate constant and memory if it isn't. */
6201 if (CONSTANT_P (op0))
6203 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6204 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6205 op0 = force_reg (mode, op0);
6206 else
6207 op0 = validize_mem (force_const_mem (mode, op0));
6210 if (offset != 0)
6212 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6214 if (GET_CODE (op0) != MEM)
6215 abort ();
6217 if (GET_MODE (offset_rtx) != ptr_mode)
6219 #ifdef POINTERS_EXTEND_UNSIGNED
6220 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6221 #else
6222 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6223 #endif
6226 if (GET_CODE (op0) == MEM
6227 && GET_MODE (op0) == BLKmode
6228 && bitsize
6229 && (bitpos % bitsize) == 0
6230 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6231 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6233 rtx temp = change_address (op0, mode1,
6234 plus_constant (XEXP (op0, 0),
6235 (bitpos /
6236 BITS_PER_UNIT)));
6237 if (GET_CODE (XEXP (temp, 0)) == REG)
6238 op0 = temp;
6239 else
6240 op0 = change_address (op0, mode1,
6241 force_reg (GET_MODE (XEXP (temp, 0)),
6242 XEXP (temp, 0)));
6243 bitpos = 0;
6247 op0 = change_address (op0, VOIDmode,
6248 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6249 force_reg (ptr_mode, offset_rtx)));
6252 /* Don't forget about volatility even if this is a bitfield. */
6253 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6255 op0 = copy_rtx (op0);
6256 MEM_VOLATILE_P (op0) = 1;
6259 /* Check the access. */
6260 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
6262 enum memory_use_mode memory_usage;
6263 memory_usage = get_memory_usage_from_modifier (modifier);
6265 if (memory_usage != MEMORY_USE_DONT)
6267 rtx to;
6268 int size;
6270 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6271 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6273 /* Check the access right of the pointer. */
6274 if (size > BITS_PER_UNIT)
6275 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6276 to, ptr_mode,
6277 GEN_INT (size / BITS_PER_UNIT),
6278 TYPE_MODE (sizetype),
6279 GEN_INT (memory_usage),
6280 TYPE_MODE (integer_type_node));
6284 /* In cases where an aligned union has an unaligned object
6285 as a field, we might be extracting a BLKmode value from
6286 an integer-mode (e.g., SImode) object. Handle this case
6287 by doing the extract into an object as wide as the field
6288 (which we know to be the width of a basic mode), then
6289 storing into memory, and changing the mode to BLKmode.
6290 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6291 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6292 if (mode1 == VOIDmode
6293 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6294 || (modifier != EXPAND_CONST_ADDRESS
6295 && modifier != EXPAND_INITIALIZER
6296 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6297 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6298 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6299 /* If the field isn't aligned enough to fetch as a memref,
6300 fetch it as a bit field. */
6301 || (SLOW_UNALIGNED_ACCESS
6302 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
6303 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6305 enum machine_mode ext_mode = mode;
6307 if (ext_mode == BLKmode)
6308 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6310 if (ext_mode == BLKmode)
6312 /* In this case, BITPOS must start at a byte boundary and
6313 TARGET, if specified, must be a MEM. */
6314 if (GET_CODE (op0) != MEM
6315 || (target != 0 && GET_CODE (target) != MEM)
6316 || bitpos % BITS_PER_UNIT != 0)
6317 abort ();
6319 op0 = change_address (op0, VOIDmode,
6320 plus_constant (XEXP (op0, 0),
6321 bitpos / BITS_PER_UNIT));
6322 if (target == 0)
6323 target = assign_temp (type, 0, 1, 1);
6325 emit_block_move (target, op0,
6326 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6327 / BITS_PER_UNIT),
6330 return target;
6333 op0 = validize_mem (op0);
6335 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6336 mark_reg_pointer (XEXP (op0, 0), alignment);
6338 op0 = extract_bit_field (op0, bitsize, bitpos,
6339 unsignedp, target, ext_mode, ext_mode,
6340 alignment,
6341 int_size_in_bytes (TREE_TYPE (tem)));
6343 /* If the result is a record type and BITSIZE is narrower than
6344 the mode of OP0, an integral mode, and this is a big endian
6345 machine, we must put the field into the high-order bits. */
6346 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6347 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6348 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6349 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6350 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6351 - bitsize),
6352 op0, 1);
6354 if (mode == BLKmode)
6356 rtx new = assign_stack_temp (ext_mode,
6357 bitsize / BITS_PER_UNIT, 0);
6359 emit_move_insn (new, op0);
6360 op0 = copy_rtx (new);
6361 PUT_MODE (op0, BLKmode);
6362 MEM_IN_STRUCT_P (op0) = 1;
6365 return op0;
6368 /* If the result is BLKmode, use that to access the object
6369 now as well. */
6370 if (mode == BLKmode)
6371 mode1 = BLKmode;
6373 /* Get a reference to just this component. */
6374 if (modifier == EXPAND_CONST_ADDRESS
6375 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6376 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6377 (bitpos / BITS_PER_UNIT)));
6378 else
6379 op0 = change_address (op0, mode1,
6380 plus_constant (XEXP (op0, 0),
6381 (bitpos / BITS_PER_UNIT)));
6383 if (GET_CODE (op0) == MEM)
6384 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6386 if (GET_CODE (XEXP (op0, 0)) == REG)
6387 mark_reg_pointer (XEXP (op0, 0), alignment);
6389 MEM_IN_STRUCT_P (op0) = 1;
6390 MEM_VOLATILE_P (op0) |= volatilep;
6391 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6392 || modifier == EXPAND_CONST_ADDRESS
6393 || modifier == EXPAND_INITIALIZER)
6394 return op0;
6395 else if (target == 0)
6396 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6398 convert_move (target, op0, unsignedp);
6399 return target;
6402 /* Intended for a reference to a buffer of a file-object in Pascal.
6403 But it's not certain that a special tree code will really be
6404 necessary for these. INDIRECT_REF might work for them. */
6405 case BUFFER_REF:
6406 abort ();
6408 case IN_EXPR:
6410 /* Pascal set IN expression.
6412 Algorithm:
6413 rlo = set_low - (set_low%bits_per_word);
6414 the_word = set [ (index - rlo)/bits_per_word ];
6415 bit_index = index % bits_per_word;
6416 bitmask = 1 << bit_index;
6417 return !!(the_word & bitmask); */
6419 tree set = TREE_OPERAND (exp, 0);
6420 tree index = TREE_OPERAND (exp, 1);
6421 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6422 tree set_type = TREE_TYPE (set);
6423 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6424 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6425 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6426 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6427 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6428 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6429 rtx setaddr = XEXP (setval, 0);
6430 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6431 rtx rlow;
6432 rtx diff, quo, rem, addr, bit, result;
6434 preexpand_calls (exp);
6436 /* If domain is empty, answer is no. Likewise if index is constant
6437 and out of bounds. */
6438 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6439 && TREE_CODE (set_low_bound) == INTEGER_CST
6440 && tree_int_cst_lt (set_high_bound, set_low_bound))
6441 || (TREE_CODE (index) == INTEGER_CST
6442 && TREE_CODE (set_low_bound) == INTEGER_CST
6443 && tree_int_cst_lt (index, set_low_bound))
6444 || (TREE_CODE (set_high_bound) == INTEGER_CST
6445 && TREE_CODE (index) == INTEGER_CST
6446 && tree_int_cst_lt (set_high_bound, index))))
6447 return const0_rtx;
6449 if (target == 0)
6450 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6452 /* If we get here, we have to generate the code for both cases
6453 (in range and out of range). */
6455 op0 = gen_label_rtx ();
6456 op1 = gen_label_rtx ();
6458 if (! (GET_CODE (index_val) == CONST_INT
6459 && GET_CODE (lo_r) == CONST_INT))
6461 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6462 GET_MODE (index_val), iunsignedp, 0);
6463 emit_jump_insn (gen_blt (op1));
6466 if (! (GET_CODE (index_val) == CONST_INT
6467 && GET_CODE (hi_r) == CONST_INT))
6469 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6470 GET_MODE (index_val), iunsignedp, 0);
6471 emit_jump_insn (gen_bgt (op1));
6474 /* Calculate the element number of bit zero in the first word
6475 of the set. */
6476 if (GET_CODE (lo_r) == CONST_INT)
6477 rlow = GEN_INT (INTVAL (lo_r)
6478 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6479 else
6480 rlow = expand_binop (index_mode, and_optab, lo_r,
6481 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6482 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6484 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6485 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6487 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6488 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6489 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6490 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6492 addr = memory_address (byte_mode,
6493 expand_binop (index_mode, add_optab, diff,
6494 setaddr, NULL_RTX, iunsignedp,
6495 OPTAB_LIB_WIDEN));
6497 /* Extract the bit we want to examine */
6498 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6499 gen_rtx_MEM (byte_mode, addr),
6500 make_tree (TREE_TYPE (index), rem),
6501 NULL_RTX, 1);
6502 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6503 GET_MODE (target) == byte_mode ? target : 0,
6504 1, OPTAB_LIB_WIDEN);
6506 if (result != target)
6507 convert_move (target, result, 1);
6509 /* Output the code to handle the out-of-range case. */
6510 emit_jump (op0);
6511 emit_label (op1);
6512 emit_move_insn (target, const0_rtx);
6513 emit_label (op0);
6514 return target;
6517 case WITH_CLEANUP_EXPR:
6518 if (RTL_EXPR_RTL (exp) == 0)
6520 RTL_EXPR_RTL (exp)
6521 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6522 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6524 /* That's it for this cleanup. */
6525 TREE_OPERAND (exp, 2) = 0;
6527 return RTL_EXPR_RTL (exp);
6529 case CLEANUP_POINT_EXPR:
6531 extern int temp_slot_level;
6532 /* Start a new binding layer that will keep track of all cleanup
6533 actions to be performed. */
6534 expand_start_bindings (0);
6536 target_temp_slot_level = temp_slot_level;
6538 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6539 /* If we're going to use this value, load it up now. */
6540 if (! ignore)
6541 op0 = force_not_mem (op0);
6542 preserve_temp_slots (op0);
6543 expand_end_bindings (NULL_TREE, 0, 0);
6545 return op0;
6547 case CALL_EXPR:
6548 /* Check for a built-in function. */
6549 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6550 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6551 == FUNCTION_DECL)
6552 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6553 return expand_builtin (exp, target, subtarget, tmode, ignore);
6555 /* If this call was expanded already by preexpand_calls,
6556 just return the result we got. */
6557 if (CALL_EXPR_RTL (exp) != 0)
6558 return CALL_EXPR_RTL (exp);
6560 return expand_call (exp, target, ignore);
6562 case NON_LVALUE_EXPR:
6563 case NOP_EXPR:
6564 case CONVERT_EXPR:
6565 case REFERENCE_EXPR:
6566 if (TREE_CODE (type) == UNION_TYPE)
6568 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6569 if (target == 0)
6571 if (mode != BLKmode)
6572 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6573 else
6574 target = assign_temp (type, 0, 1, 1);
6577 if (GET_CODE (target) == MEM)
6578 /* Store data into beginning of memory target. */
6579 store_expr (TREE_OPERAND (exp, 0),
6580 change_address (target, TYPE_MODE (valtype), 0), 0);
6582 else if (GET_CODE (target) == REG)
6583 /* Store this field into a union of the proper type. */
6584 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6585 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6586 VOIDmode, 0, 1,
6587 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6588 else
6589 abort ();
6591 /* Return the entire union. */
6592 return target;
6595 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6597 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6598 ro_modifier);
6600 /* If the signedness of the conversion differs and OP0 is
6601 a promoted SUBREG, clear that indication since we now
6602 have to do the proper extension. */
6603 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6604 && GET_CODE (op0) == SUBREG)
6605 SUBREG_PROMOTED_VAR_P (op0) = 0;
6607 return op0;
6610 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6611 if (GET_MODE (op0) == mode)
6612 return op0;
6614 /* If OP0 is a constant, just convert it into the proper mode. */
6615 if (CONSTANT_P (op0))
6616 return
6617 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6618 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6620 if (modifier == EXPAND_INITIALIZER)
6621 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6623 if (target == 0)
6624 return
6625 convert_to_mode (mode, op0,
6626 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6627 else
6628 convert_move (target, op0,
6629 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6630 return target;
6632 case PLUS_EXPR:
6633 /* We come here from MINUS_EXPR when the second operand is a
6634 constant. */
6635 plus_expr:
6636 this_optab = add_optab;
6638 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6639 something else, make sure we add the register to the constant and
6640 then to the other thing. This case can occur during strength
6641 reduction and doing it this way will produce better code if the
6642 frame pointer or argument pointer is eliminated.
6644 fold-const.c will ensure that the constant is always in the inner
6645 PLUS_EXPR, so the only case we need to do anything about is if
6646 sp, ap, or fp is our second argument, in which case we must swap
6647 the innermost first argument and our second argument. */
6649 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6650 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6651 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6652 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6653 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6654 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6656 tree t = TREE_OPERAND (exp, 1);
6658 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6659 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6662 /* If the result is to be ptr_mode and we are adding an integer to
6663 something, we might be forming a constant. So try to use
6664 plus_constant. If it produces a sum and we can't accept it,
6665 use force_operand. This allows P = &ARR[const] to generate
6666 efficient code on machines where a SYMBOL_REF is not a valid
6667 address.
6669 If this is an EXPAND_SUM call, always return the sum. */
6670 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6671 || mode == ptr_mode)
6673 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6674 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6675 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6677 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6678 EXPAND_SUM);
6679 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6680 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6681 op1 = force_operand (op1, target);
6682 return op1;
6685 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6686 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6687 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6689 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6690 EXPAND_SUM);
6691 if (! CONSTANT_P (op0))
6693 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6694 VOIDmode, modifier);
6695 /* Don't go to both_summands if modifier
6696 says it's not right to return a PLUS. */
6697 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6698 goto binop2;
6699 goto both_summands;
6701 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6702 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6703 op0 = force_operand (op0, target);
6704 return op0;
6708 /* No sense saving up arithmetic to be done
6709 if it's all in the wrong mode to form part of an address.
6710 And force_operand won't know whether to sign-extend or
6711 zero-extend. */
6712 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6713 || mode != ptr_mode)
6714 goto binop;
6716 preexpand_calls (exp);
6717 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6718 subtarget = 0;
6720 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6721 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6723 both_summands:
6724 /* Make sure any term that's a sum with a constant comes last. */
6725 if (GET_CODE (op0) == PLUS
6726 && CONSTANT_P (XEXP (op0, 1)))
6728 temp = op0;
6729 op0 = op1;
6730 op1 = temp;
6732 /* If adding to a sum including a constant,
6733 associate it to put the constant outside. */
6734 if (GET_CODE (op1) == PLUS
6735 && CONSTANT_P (XEXP (op1, 1)))
6737 rtx constant_term = const0_rtx;
6739 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6740 if (temp != 0)
6741 op0 = temp;
6742 /* Ensure that MULT comes first if there is one. */
6743 else if (GET_CODE (op0) == MULT)
6744 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6745 else
6746 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6748 /* Let's also eliminate constants from op0 if possible. */
6749 op0 = eliminate_constant_term (op0, &constant_term);
6751 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6752 their sum should be a constant. Form it into OP1, since the
6753 result we want will then be OP0 + OP1. */
6755 temp = simplify_binary_operation (PLUS, mode, constant_term,
6756 XEXP (op1, 1));
6757 if (temp != 0)
6758 op1 = temp;
6759 else
6760 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6763 /* Put a constant term last and put a multiplication first. */
6764 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6765 temp = op1, op1 = op0, op0 = temp;
6767 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6768 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6770 case MINUS_EXPR:
6771 /* For initializers, we are allowed to return a MINUS of two
6772 symbolic constants. Here we handle all cases when both operands
6773 are constant. */
6774 /* Handle difference of two symbolic constants,
6775 for the sake of an initializer. */
6776 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6777 && really_constant_p (TREE_OPERAND (exp, 0))
6778 && really_constant_p (TREE_OPERAND (exp, 1)))
6780 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6781 VOIDmode, ro_modifier);
6782 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6783 VOIDmode, ro_modifier);
6785 /* If the last operand is a CONST_INT, use plus_constant of
6786 the negated constant. Else make the MINUS. */
6787 if (GET_CODE (op1) == CONST_INT)
6788 return plus_constant (op0, - INTVAL (op1));
6789 else
6790 return gen_rtx_MINUS (mode, op0, op1);
6792 /* Convert A - const to A + (-const). */
6793 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6795 tree negated = fold (build1 (NEGATE_EXPR, type,
6796 TREE_OPERAND (exp, 1)));
6798 /* Deal with the case where we can't negate the constant
6799 in TYPE. */
6800 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6802 tree newtype = signed_type (type);
6803 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6804 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6805 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6807 if (! TREE_OVERFLOW (newneg))
6808 return expand_expr (convert (type,
6809 build (PLUS_EXPR, newtype,
6810 newop0, newneg)),
6811 target, tmode, ro_modifier);
6813 else
6815 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6816 goto plus_expr;
6819 this_optab = sub_optab;
6820 goto binop;
6822 case MULT_EXPR:
6823 preexpand_calls (exp);
6824 /* If first operand is constant, swap them.
6825 Thus the following special case checks need only
6826 check the second operand. */
6827 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6829 register tree t1 = TREE_OPERAND (exp, 0);
6830 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6831 TREE_OPERAND (exp, 1) = t1;
6834 /* Attempt to return something suitable for generating an
6835 indexed address, for machines that support that. */
6837 if (modifier == EXPAND_SUM && mode == ptr_mode
6838 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6839 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6841 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6842 EXPAND_SUM);
6844 /* Apply distributive law if OP0 is x+c. */
6845 if (GET_CODE (op0) == PLUS
6846 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6847 return gen_rtx_PLUS (mode,
6848 gen_rtx_MULT (mode, XEXP (op0, 0),
6849 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6850 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6851 * INTVAL (XEXP (op0, 1))));
6853 if (GET_CODE (op0) != REG)
6854 op0 = force_operand (op0, NULL_RTX);
6855 if (GET_CODE (op0) != REG)
6856 op0 = copy_to_mode_reg (mode, op0);
6858 return gen_rtx_MULT (mode, op0,
6859 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6862 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6863 subtarget = 0;
6865 /* Check for multiplying things that have been extended
6866 from a narrower type. If this machine supports multiplying
6867 in that narrower type with a result in the desired type,
6868 do it that way, and avoid the explicit type-conversion. */
6869 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6870 && TREE_CODE (type) == INTEGER_TYPE
6871 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6872 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6873 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6874 && int_fits_type_p (TREE_OPERAND (exp, 1),
6875 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6876 /* Don't use a widening multiply if a shift will do. */
6877 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6878 > HOST_BITS_PER_WIDE_INT)
6879 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6881 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6882 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6884 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6885 /* If both operands are extended, they must either both
6886 be zero-extended or both be sign-extended. */
6887 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6889 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6891 enum machine_mode innermode
6892 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6893 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6894 ? smul_widen_optab : umul_widen_optab);
6895 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6896 ? umul_widen_optab : smul_widen_optab);
6897 if (mode == GET_MODE_WIDER_MODE (innermode))
6899 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6901 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6902 NULL_RTX, VOIDmode, 0);
6903 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6904 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6905 VOIDmode, 0);
6906 else
6907 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6908 NULL_RTX, VOIDmode, 0);
6909 goto binop2;
6911 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6912 && innermode == word_mode)
6914 rtx htem;
6915 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6916 NULL_RTX, VOIDmode, 0);
6917 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6918 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6919 VOIDmode, 0);
6920 else
6921 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6922 NULL_RTX, VOIDmode, 0);
6923 temp = expand_binop (mode, other_optab, op0, op1, target,
6924 unsignedp, OPTAB_LIB_WIDEN);
6925 htem = expand_mult_highpart_adjust (innermode,
6926 gen_highpart (innermode, temp),
6927 op0, op1,
6928 gen_highpart (innermode, temp),
6929 unsignedp);
6930 emit_move_insn (gen_highpart (innermode, temp), htem);
6931 return temp;
6935 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6936 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6937 return expand_mult (mode, op0, op1, target, unsignedp);
6939 case TRUNC_DIV_EXPR:
6940 case FLOOR_DIV_EXPR:
6941 case CEIL_DIV_EXPR:
6942 case ROUND_DIV_EXPR:
6943 case EXACT_DIV_EXPR:
6944 preexpand_calls (exp);
6945 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6946 subtarget = 0;
6947 /* Possible optimization: compute the dividend with EXPAND_SUM
6948 then if the divisor is constant can optimize the case
6949 where some terms of the dividend have coeffs divisible by it. */
6950 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6951 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6952 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6954 case RDIV_EXPR:
6955 this_optab = flodiv_optab;
6956 goto binop;
6958 case TRUNC_MOD_EXPR:
6959 case FLOOR_MOD_EXPR:
6960 case CEIL_MOD_EXPR:
6961 case ROUND_MOD_EXPR:
6962 preexpand_calls (exp);
6963 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6964 subtarget = 0;
6965 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6966 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6967 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6969 case FIX_ROUND_EXPR:
6970 case FIX_FLOOR_EXPR:
6971 case FIX_CEIL_EXPR:
6972 abort (); /* Not used for C. */
6974 case FIX_TRUNC_EXPR:
6975 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6976 if (target == 0)
6977 target = gen_reg_rtx (mode);
6978 expand_fix (target, op0, unsignedp);
6979 return target;
6981 case FLOAT_EXPR:
6982 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6983 if (target == 0)
6984 target = gen_reg_rtx (mode);
6985 /* expand_float can't figure out what to do if FROM has VOIDmode.
6986 So give it the correct mode. With -O, cse will optimize this. */
6987 if (GET_MODE (op0) == VOIDmode)
6988 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6989 op0);
6990 expand_float (target, op0,
6991 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6992 return target;
6994 case NEGATE_EXPR:
6995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6996 temp = expand_unop (mode, neg_optab, op0, target, 0);
6997 if (temp == 0)
6998 abort ();
6999 return temp;
7001 case ABS_EXPR:
7002 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7004 /* Handle complex values specially. */
7005 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7006 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7007 return expand_complex_abs (mode, op0, target, unsignedp);
7009 /* Unsigned abs is simply the operand. Testing here means we don't
7010 risk generating incorrect code below. */
7011 if (TREE_UNSIGNED (type))
7012 return op0;
7014 return expand_abs (mode, op0, target, unsignedp,
7015 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7017 case MAX_EXPR:
7018 case MIN_EXPR:
7019 target = original_target;
7020 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7021 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7022 || GET_MODE (target) != mode
7023 || (GET_CODE (target) == REG
7024 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7025 target = gen_reg_rtx (mode);
7026 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7027 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7029 /* First try to do it with a special MIN or MAX instruction.
7030 If that does not win, use a conditional jump to select the proper
7031 value. */
7032 this_optab = (TREE_UNSIGNED (type)
7033 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7034 : (code == MIN_EXPR ? smin_optab : smax_optab));
7036 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7037 OPTAB_WIDEN);
7038 if (temp != 0)
7039 return temp;
7041 /* At this point, a MEM target is no longer useful; we will get better
7042 code without it. */
7044 if (GET_CODE (target) == MEM)
7045 target = gen_reg_rtx (mode);
7047 if (target != op0)
7048 emit_move_insn (target, op0);
7050 op0 = gen_label_rtx ();
7052 /* If this mode is an integer too wide to compare properly,
7053 compare word by word. Rely on cse to optimize constant cases. */
7054 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7056 if (code == MAX_EXPR)
7057 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7058 target, op1, NULL_RTX, op0);
7059 else
7060 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7061 op1, target, NULL_RTX, op0);
7062 emit_move_insn (target, op1);
7064 else
7066 if (code == MAX_EXPR)
7067 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7068 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7069 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7070 else
7071 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7072 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7073 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7074 if (temp == const0_rtx)
7075 emit_move_insn (target, op1);
7076 else if (temp != const_true_rtx)
7078 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7079 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7080 else
7081 abort ();
7082 emit_move_insn (target, op1);
7085 emit_label (op0);
7086 return target;
7088 case BIT_NOT_EXPR:
7089 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7090 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7091 if (temp == 0)
7092 abort ();
7093 return temp;
7095 case FFS_EXPR:
7096 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7097 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7098 if (temp == 0)
7099 abort ();
7100 return temp;
7102 /* ??? Can optimize bitwise operations with one arg constant.
7103 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7104 and (a bitwise1 b) bitwise2 b (etc)
7105 but that is probably not worth while. */
7107 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7108 boolean values when we want in all cases to compute both of them. In
7109 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7110 as actual zero-or-1 values and then bitwise anding. In cases where
7111 there cannot be any side effects, better code would be made by
7112 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7113 how to recognize those cases. */
7115 case TRUTH_AND_EXPR:
7116 case BIT_AND_EXPR:
7117 this_optab = and_optab;
7118 goto binop;
7120 case TRUTH_OR_EXPR:
7121 case BIT_IOR_EXPR:
7122 this_optab = ior_optab;
7123 goto binop;
7125 case TRUTH_XOR_EXPR:
7126 case BIT_XOR_EXPR:
7127 this_optab = xor_optab;
7128 goto binop;
7130 case LSHIFT_EXPR:
7131 case RSHIFT_EXPR:
7132 case LROTATE_EXPR:
7133 case RROTATE_EXPR:
7134 preexpand_calls (exp);
7135 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7136 subtarget = 0;
7137 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7138 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7139 unsignedp);
7141 /* Could determine the answer when only additive constants differ. Also,
7142 the addition of one can be handled by changing the condition. */
7143 case LT_EXPR:
7144 case LE_EXPR:
7145 case GT_EXPR:
7146 case GE_EXPR:
7147 case EQ_EXPR:
7148 case NE_EXPR:
7149 preexpand_calls (exp);
7150 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7151 if (temp != 0)
7152 return temp;
7154 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7155 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7156 && original_target
7157 && GET_CODE (original_target) == REG
7158 && (GET_MODE (original_target)
7159 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7161 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7162 VOIDmode, 0);
7164 if (temp != original_target)
7165 temp = copy_to_reg (temp);
7167 op1 = gen_label_rtx ();
7168 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
7169 GET_MODE (temp), unsignedp, 0);
7170 emit_jump_insn (gen_beq (op1));
7171 emit_move_insn (temp, const1_rtx);
7172 emit_label (op1);
7173 return temp;
7176 /* If no set-flag instruction, must generate a conditional
7177 store into a temporary variable. Drop through
7178 and handle this like && and ||. */
7180 case TRUTH_ANDIF_EXPR:
7181 case TRUTH_ORIF_EXPR:
7182 if (! ignore
7183 && (target == 0 || ! safe_from_p (target, exp, 1)
7184 /* Make sure we don't have a hard reg (such as function's return
7185 value) live across basic blocks, if not optimizing. */
7186 || (!optimize && GET_CODE (target) == REG
7187 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7188 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7190 if (target)
7191 emit_clr_insn (target);
7193 op1 = gen_label_rtx ();
7194 jumpifnot (exp, op1);
7196 if (target)
7197 emit_0_to_1_insn (target);
7199 emit_label (op1);
7200 return ignore ? const0_rtx : target;
7202 case TRUTH_NOT_EXPR:
7203 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7204 /* The parser is careful to generate TRUTH_NOT_EXPR
7205 only with operands that are always zero or one. */
7206 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7207 target, 1, OPTAB_LIB_WIDEN);
7208 if (temp == 0)
7209 abort ();
7210 return temp;
7212 case COMPOUND_EXPR:
7213 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7214 emit_queue ();
7215 return expand_expr (TREE_OPERAND (exp, 1),
7216 (ignore ? const0_rtx : target),
7217 VOIDmode, 0);
7219 case COND_EXPR:
7220 /* If we would have a "singleton" (see below) were it not for a
7221 conversion in each arm, bring that conversion back out. */
7222 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7223 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7224 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7225 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7227 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7228 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7230 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7231 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7232 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7233 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7234 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7235 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7236 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7237 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7238 return expand_expr (build1 (NOP_EXPR, type,
7239 build (COND_EXPR, TREE_TYPE (true),
7240 TREE_OPERAND (exp, 0),
7241 true, false)),
7242 target, tmode, modifier);
7246 /* Note that COND_EXPRs whose type is a structure or union
7247 are required to be constructed to contain assignments of
7248 a temporary variable, so that we can evaluate them here
7249 for side effect only. If type is void, we must do likewise. */
7251 /* If an arm of the branch requires a cleanup,
7252 only that cleanup is performed. */
7254 tree singleton = 0;
7255 tree binary_op = 0, unary_op = 0;
7257 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7258 convert it to our mode, if necessary. */
7259 if (integer_onep (TREE_OPERAND (exp, 1))
7260 && integer_zerop (TREE_OPERAND (exp, 2))
7261 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7263 if (ignore)
7265 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7266 ro_modifier);
7267 return const0_rtx;
7270 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7271 if (GET_MODE (op0) == mode)
7272 return op0;
7274 if (target == 0)
7275 target = gen_reg_rtx (mode);
7276 convert_move (target, op0, unsignedp);
7277 return target;
7280 /* Check for X ? A + B : A. If we have this, we can copy A to the
7281 output and conditionally add B. Similarly for unary operations.
7282 Don't do this if X has side-effects because those side effects
7283 might affect A or B and the "?" operation is a sequence point in
7284 ANSI. (operand_equal_p tests for side effects.) */
7286 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7287 && operand_equal_p (TREE_OPERAND (exp, 2),
7288 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7289 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7290 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7291 && operand_equal_p (TREE_OPERAND (exp, 1),
7292 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7293 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7294 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7295 && operand_equal_p (TREE_OPERAND (exp, 2),
7296 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7297 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7298 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7299 && operand_equal_p (TREE_OPERAND (exp, 1),
7300 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7301 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7303 /* If we are not to produce a result, we have no target. Otherwise,
7304 if a target was specified use it; it will not be used as an
7305 intermediate target unless it is safe. If no target, use a
7306 temporary. */
7308 if (ignore)
7309 temp = 0;
7310 else if (original_target
7311 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7312 || (singleton && GET_CODE (original_target) == REG
7313 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7314 && original_target == var_rtx (singleton)))
7315 && GET_MODE (original_target) == mode
7316 #ifdef HAVE_conditional_move
7317 && (! can_conditionally_move_p (mode)
7318 || GET_CODE (original_target) == REG
7319 || TREE_ADDRESSABLE (type))
7320 #endif
7321 && ! (GET_CODE (original_target) == MEM
7322 && MEM_VOLATILE_P (original_target)))
7323 temp = original_target;
7324 else if (TREE_ADDRESSABLE (type))
7325 abort ();
7326 else
7327 temp = assign_temp (type, 0, 0, 1);
7329 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7330 do the test of X as a store-flag operation, do this as
7331 A + ((X != 0) << log C). Similarly for other simple binary
7332 operators. Only do for C == 1 if BRANCH_COST is low. */
7333 if (temp && singleton && binary_op
7334 && (TREE_CODE (binary_op) == PLUS_EXPR
7335 || TREE_CODE (binary_op) == MINUS_EXPR
7336 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7337 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7338 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7339 : integer_onep (TREE_OPERAND (binary_op, 1)))
7340 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7342 rtx result;
7343 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7344 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7345 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7346 : xor_optab);
7348 /* If we had X ? A : A + 1, do this as A + (X == 0).
7350 We have to invert the truth value here and then put it
7351 back later if do_store_flag fails. We cannot simply copy
7352 TREE_OPERAND (exp, 0) to another variable and modify that
7353 because invert_truthvalue can modify the tree pointed to
7354 by its argument. */
7355 if (singleton == TREE_OPERAND (exp, 1))
7356 TREE_OPERAND (exp, 0)
7357 = invert_truthvalue (TREE_OPERAND (exp, 0));
7359 result = do_store_flag (TREE_OPERAND (exp, 0),
7360 (safe_from_p (temp, singleton, 1)
7361 ? temp : NULL_RTX),
7362 mode, BRANCH_COST <= 1);
7364 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7365 result = expand_shift (LSHIFT_EXPR, mode, result,
7366 build_int_2 (tree_log2
7367 (TREE_OPERAND
7368 (binary_op, 1)),
7370 (safe_from_p (temp, singleton, 1)
7371 ? temp : NULL_RTX), 0);
7373 if (result)
7375 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7376 return expand_binop (mode, boptab, op1, result, temp,
7377 unsignedp, OPTAB_LIB_WIDEN);
7379 else if (singleton == TREE_OPERAND (exp, 1))
7380 TREE_OPERAND (exp, 0)
7381 = invert_truthvalue (TREE_OPERAND (exp, 0));
7384 do_pending_stack_adjust ();
7385 NO_DEFER_POP;
7386 op0 = gen_label_rtx ();
7388 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7390 if (temp != 0)
7392 /* If the target conflicts with the other operand of the
7393 binary op, we can't use it. Also, we can't use the target
7394 if it is a hard register, because evaluating the condition
7395 might clobber it. */
7396 if ((binary_op
7397 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7398 || (GET_CODE (temp) == REG
7399 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7400 temp = gen_reg_rtx (mode);
7401 store_expr (singleton, temp, 0);
7403 else
7404 expand_expr (singleton,
7405 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7406 if (singleton == TREE_OPERAND (exp, 1))
7407 jumpif (TREE_OPERAND (exp, 0), op0);
7408 else
7409 jumpifnot (TREE_OPERAND (exp, 0), op0);
7411 start_cleanup_deferral ();
7412 if (binary_op && temp == 0)
7413 /* Just touch the other operand. */
7414 expand_expr (TREE_OPERAND (binary_op, 1),
7415 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7416 else if (binary_op)
7417 store_expr (build (TREE_CODE (binary_op), type,
7418 make_tree (type, temp),
7419 TREE_OPERAND (binary_op, 1)),
7420 temp, 0);
7421 else
7422 store_expr (build1 (TREE_CODE (unary_op), type,
7423 make_tree (type, temp)),
7424 temp, 0);
7425 op1 = op0;
7427 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7428 comparison operator. If we have one of these cases, set the
7429 output to A, branch on A (cse will merge these two references),
7430 then set the output to FOO. */
7431 else if (temp
7432 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7433 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7434 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7435 TREE_OPERAND (exp, 1), 0)
7436 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7437 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7438 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7440 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7441 temp = gen_reg_rtx (mode);
7442 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7443 jumpif (TREE_OPERAND (exp, 0), op0);
7445 start_cleanup_deferral ();
7446 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7447 op1 = op0;
7449 else if (temp
7450 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7451 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7452 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7453 TREE_OPERAND (exp, 2), 0)
7454 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7455 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7456 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7458 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7459 temp = gen_reg_rtx (mode);
7460 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7461 jumpifnot (TREE_OPERAND (exp, 0), op0);
7463 start_cleanup_deferral ();
7464 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7465 op1 = op0;
7467 else
7469 op1 = gen_label_rtx ();
7470 jumpifnot (TREE_OPERAND (exp, 0), op0);
7472 start_cleanup_deferral ();
7473 if (temp != 0)
7474 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7475 else
7476 expand_expr (TREE_OPERAND (exp, 1),
7477 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7478 end_cleanup_deferral ();
7479 emit_queue ();
7480 emit_jump_insn (gen_jump (op1));
7481 emit_barrier ();
7482 emit_label (op0);
7483 start_cleanup_deferral ();
7484 if (temp != 0)
7485 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7486 else
7487 expand_expr (TREE_OPERAND (exp, 2),
7488 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7491 end_cleanup_deferral ();
7493 emit_queue ();
7494 emit_label (op1);
7495 OK_DEFER_POP;
7497 return temp;
7500 case TARGET_EXPR:
7502 /* Something needs to be initialized, but we didn't know
7503 where that thing was when building the tree. For example,
7504 it could be the return value of a function, or a parameter
7505 to a function which lays down in the stack, or a temporary
7506 variable which must be passed by reference.
7508 We guarantee that the expression will either be constructed
7509 or copied into our original target. */
7511 tree slot = TREE_OPERAND (exp, 0);
7512 tree cleanups = NULL_TREE;
7513 tree exp1;
7515 if (TREE_CODE (slot) != VAR_DECL)
7516 abort ();
7518 if (! ignore)
7519 target = original_target;
7521 if (target == 0)
7523 if (DECL_RTL (slot) != 0)
7525 target = DECL_RTL (slot);
7526 /* If we have already expanded the slot, so don't do
7527 it again. (mrs) */
7528 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7529 return target;
7531 else
7533 target = assign_temp (type, 2, 0, 1);
7534 /* All temp slots at this level must not conflict. */
7535 preserve_temp_slots (target);
7536 DECL_RTL (slot) = target;
7537 if (TREE_ADDRESSABLE (slot))
7539 TREE_ADDRESSABLE (slot) = 0;
7540 mark_addressable (slot);
7543 /* Since SLOT is not known to the called function
7544 to belong to its stack frame, we must build an explicit
7545 cleanup. This case occurs when we must build up a reference
7546 to pass the reference as an argument. In this case,
7547 it is very likely that such a reference need not be
7548 built here. */
7550 if (TREE_OPERAND (exp, 2) == 0)
7551 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7552 cleanups = TREE_OPERAND (exp, 2);
7555 else
7557 /* This case does occur, when expanding a parameter which
7558 needs to be constructed on the stack. The target
7559 is the actual stack address that we want to initialize.
7560 The function we call will perform the cleanup in this case. */
7562 /* If we have already assigned it space, use that space,
7563 not target that we were passed in, as our target
7564 parameter is only a hint. */
7565 if (DECL_RTL (slot) != 0)
7567 target = DECL_RTL (slot);
7568 /* If we have already expanded the slot, so don't do
7569 it again. (mrs) */
7570 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7571 return target;
7573 else
7575 DECL_RTL (slot) = target;
7576 /* If we must have an addressable slot, then make sure that
7577 the RTL that we just stored in slot is OK. */
7578 if (TREE_ADDRESSABLE (slot))
7580 TREE_ADDRESSABLE (slot) = 0;
7581 mark_addressable (slot);
7586 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7587 /* Mark it as expanded. */
7588 TREE_OPERAND (exp, 1) = NULL_TREE;
7590 TREE_USED (slot) = 1;
7591 store_expr (exp1, target, 0);
7593 expand_decl_cleanup (NULL_TREE, cleanups);
7595 return target;
7598 case INIT_EXPR:
7600 tree lhs = TREE_OPERAND (exp, 0);
7601 tree rhs = TREE_OPERAND (exp, 1);
7602 tree noncopied_parts = 0;
7603 tree lhs_type = TREE_TYPE (lhs);
7605 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7606 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7607 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7608 TYPE_NONCOPIED_PARTS (lhs_type));
7609 while (noncopied_parts != 0)
7611 expand_assignment (TREE_VALUE (noncopied_parts),
7612 TREE_PURPOSE (noncopied_parts), 0, 0);
7613 noncopied_parts = TREE_CHAIN (noncopied_parts);
7615 return temp;
7618 case MODIFY_EXPR:
7620 /* If lhs is complex, expand calls in rhs before computing it.
7621 That's so we don't compute a pointer and save it over a call.
7622 If lhs is simple, compute it first so we can give it as a
7623 target if the rhs is just a call. This avoids an extra temp and copy
7624 and that prevents a partial-subsumption which makes bad code.
7625 Actually we could treat component_ref's of vars like vars. */
7627 tree lhs = TREE_OPERAND (exp, 0);
7628 tree rhs = TREE_OPERAND (exp, 1);
7629 tree noncopied_parts = 0;
7630 tree lhs_type = TREE_TYPE (lhs);
7632 temp = 0;
7634 if (TREE_CODE (lhs) != VAR_DECL
7635 && TREE_CODE (lhs) != RESULT_DECL
7636 && TREE_CODE (lhs) != PARM_DECL
7637 && ! (TREE_CODE (lhs) == INDIRECT_REF
7638 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7639 preexpand_calls (exp);
7641 /* Check for |= or &= of a bitfield of size one into another bitfield
7642 of size 1. In this case, (unless we need the result of the
7643 assignment) we can do this more efficiently with a
7644 test followed by an assignment, if necessary.
7646 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7647 things change so we do, this code should be enhanced to
7648 support it. */
7649 if (ignore
7650 && TREE_CODE (lhs) == COMPONENT_REF
7651 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7652 || TREE_CODE (rhs) == BIT_AND_EXPR)
7653 && TREE_OPERAND (rhs, 0) == lhs
7654 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7655 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7656 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7658 rtx label = gen_label_rtx ();
7660 do_jump (TREE_OPERAND (rhs, 1),
7661 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7662 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7663 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7664 (TREE_CODE (rhs) == BIT_IOR_EXPR
7665 ? integer_one_node
7666 : integer_zero_node)),
7667 0, 0);
7668 do_pending_stack_adjust ();
7669 emit_label (label);
7670 return const0_rtx;
7673 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7674 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7675 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7676 TYPE_NONCOPIED_PARTS (lhs_type));
7678 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7679 while (noncopied_parts != 0)
7681 expand_assignment (TREE_PURPOSE (noncopied_parts),
7682 TREE_VALUE (noncopied_parts), 0, 0);
7683 noncopied_parts = TREE_CHAIN (noncopied_parts);
7685 return temp;
7688 case RETURN_EXPR:
7689 if (!TREE_OPERAND (exp, 0))
7690 expand_null_return ();
7691 else
7692 expand_return (TREE_OPERAND (exp, 0));
7693 return const0_rtx;
7695 case PREINCREMENT_EXPR:
7696 case PREDECREMENT_EXPR:
7697 return expand_increment (exp, 0, ignore);
7699 case POSTINCREMENT_EXPR:
7700 case POSTDECREMENT_EXPR:
7701 /* Faster to treat as pre-increment if result is not used. */
7702 return expand_increment (exp, ! ignore, ignore);
7704 case ADDR_EXPR:
7705 /* If nonzero, TEMP will be set to the address of something that might
7706 be a MEM corresponding to a stack slot. */
7707 temp = 0;
7709 /* Are we taking the address of a nested function? */
7710 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7711 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7712 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7713 && ! TREE_STATIC (exp))
7715 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7716 op0 = force_operand (op0, target);
7718 /* If we are taking the address of something erroneous, just
7719 return a zero. */
7720 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7721 return const0_rtx;
7722 else
7724 /* We make sure to pass const0_rtx down if we came in with
7725 ignore set, to avoid doing the cleanups twice for something. */
7726 op0 = expand_expr (TREE_OPERAND (exp, 0),
7727 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7728 (modifier == EXPAND_INITIALIZER
7729 ? modifier : EXPAND_CONST_ADDRESS));
7731 /* If we are going to ignore the result, OP0 will have been set
7732 to const0_rtx, so just return it. Don't get confused and
7733 think we are taking the address of the constant. */
7734 if (ignore)
7735 return op0;
7737 op0 = protect_from_queue (op0, 0);
7739 /* We would like the object in memory. If it is a constant,
7740 we can have it be statically allocated into memory. For
7741 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7742 memory and store the value into it. */
7744 if (CONSTANT_P (op0))
7745 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7746 op0);
7747 else if (GET_CODE (op0) == MEM)
7749 mark_temp_addr_taken (op0);
7750 temp = XEXP (op0, 0);
7753 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7754 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7756 /* If this object is in a register, it must be not
7757 be BLKmode. */
7758 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7759 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7761 mark_temp_addr_taken (memloc);
7762 emit_move_insn (memloc, op0);
7763 op0 = memloc;
7766 if (GET_CODE (op0) != MEM)
7767 abort ();
7769 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7771 temp = XEXP (op0, 0);
7772 #ifdef POINTERS_EXTEND_UNSIGNED
7773 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7774 && mode == ptr_mode)
7775 temp = convert_memory_address (ptr_mode, temp);
7776 #endif
7777 return temp;
7780 op0 = force_operand (XEXP (op0, 0), target);
7783 if (flag_force_addr && GET_CODE (op0) != REG)
7784 op0 = force_reg (Pmode, op0);
7786 if (GET_CODE (op0) == REG
7787 && ! REG_USERVAR_P (op0))
7788 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7790 /* If we might have had a temp slot, add an equivalent address
7791 for it. */
7792 if (temp != 0)
7793 update_temp_slot_address (temp, op0);
7795 #ifdef POINTERS_EXTEND_UNSIGNED
7796 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7797 && mode == ptr_mode)
7798 op0 = convert_memory_address (ptr_mode, op0);
7799 #endif
7801 return op0;
7803 case ENTRY_VALUE_EXPR:
7804 abort ();
7806 /* COMPLEX type for Extended Pascal & Fortran */
7807 case COMPLEX_EXPR:
7809 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7810 rtx insns;
7812 /* Get the rtx code of the operands. */
7813 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7814 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7816 if (! target)
7817 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7819 start_sequence ();
7821 /* Move the real (op0) and imaginary (op1) parts to their location. */
7822 emit_move_insn (gen_realpart (mode, target), op0);
7823 emit_move_insn (gen_imagpart (mode, target), op1);
7825 insns = get_insns ();
7826 end_sequence ();
7828 /* Complex construction should appear as a single unit. */
7829 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7830 each with a separate pseudo as destination.
7831 It's not correct for flow to treat them as a unit. */
7832 if (GET_CODE (target) != CONCAT)
7833 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7834 else
7835 emit_insns (insns);
7837 return target;
7840 case REALPART_EXPR:
7841 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7842 return gen_realpart (mode, op0);
7844 case IMAGPART_EXPR:
7845 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7846 return gen_imagpart (mode, op0);
7848 case CONJ_EXPR:
7850 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7851 rtx imag_t;
7852 rtx insns;
7854 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7856 if (! target)
7857 target = gen_reg_rtx (mode);
7859 start_sequence ();
7861 /* Store the realpart and the negated imagpart to target. */
7862 emit_move_insn (gen_realpart (partmode, target),
7863 gen_realpart (partmode, op0));
7865 imag_t = gen_imagpart (partmode, target);
7866 temp = expand_unop (partmode, neg_optab,
7867 gen_imagpart (partmode, op0), imag_t, 0);
7868 if (temp != imag_t)
7869 emit_move_insn (imag_t, temp);
7871 insns = get_insns ();
7872 end_sequence ();
7874 /* Conjugate should appear as a single unit
7875 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7876 each with a separate pseudo as destination.
7877 It's not correct for flow to treat them as a unit. */
7878 if (GET_CODE (target) != CONCAT)
7879 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7880 else
7881 emit_insns (insns);
7883 return target;
7886 case TRY_CATCH_EXPR:
7888 tree handler = TREE_OPERAND (exp, 1);
7890 expand_eh_region_start ();
7892 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7894 expand_eh_region_end (handler);
7896 return op0;
7899 case POPDCC_EXPR:
7901 rtx dcc = get_dynamic_cleanup_chain ();
7902 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
7903 return const0_rtx;
7906 case POPDHC_EXPR:
7908 rtx dhc = get_dynamic_handler_chain ();
7909 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
7910 return const0_rtx;
7913 case ERROR_MARK:
7914 op0 = CONST0_RTX (tmode);
7915 if (op0 != 0)
7916 return op0;
7917 return const0_rtx;
7919 default:
7920 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7923 /* Here to do an ordinary binary operator, generating an instruction
7924 from the optab already placed in `this_optab'. */
7925 binop:
7926 preexpand_calls (exp);
7927 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7928 subtarget = 0;
7929 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7930 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7931 binop2:
7932 temp = expand_binop (mode, this_optab, op0, op1, target,
7933 unsignedp, OPTAB_LIB_WIDEN);
7934 if (temp == 0)
7935 abort ();
7936 return temp;
7941 /* Return the alignment in bits of EXP, a pointer valued expression.
7942 But don't return more than MAX_ALIGN no matter what.
7943 The alignment returned is, by default, the alignment of the thing that
7944 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7946 Otherwise, look at the expression to see if we can do better, i.e., if the
7947 expression is actually pointing at an object whose alignment is tighter. */
7949 static int
7950 get_pointer_alignment (exp, max_align)
7951 tree exp;
7952 unsigned max_align;
7954 unsigned align, inner;
7956 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7957 return 0;
7959 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7960 align = MIN (align, max_align);
7962 while (1)
7964 switch (TREE_CODE (exp))
7966 case NOP_EXPR:
7967 case CONVERT_EXPR:
7968 case NON_LVALUE_EXPR:
7969 exp = TREE_OPERAND (exp, 0);
7970 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7971 return align;
7972 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7973 align = MIN (inner, max_align);
7974 break;
7976 case PLUS_EXPR:
7977 /* If sum of pointer + int, restrict our maximum alignment to that
7978 imposed by the integer. If not, we can't do any better than
7979 ALIGN. */
7980 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7981 return align;
7983 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7984 & (max_align - 1))
7985 != 0)
7986 max_align >>= 1;
7988 exp = TREE_OPERAND (exp, 0);
7989 break;
7991 case ADDR_EXPR:
7992 /* See what we are pointing at and look at its alignment. */
7993 exp = TREE_OPERAND (exp, 0);
7994 if (TREE_CODE (exp) == FUNCTION_DECL)
7995 align = FUNCTION_BOUNDARY;
7996 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7997 align = DECL_ALIGN (exp);
7998 #ifdef CONSTANT_ALIGNMENT
7999 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8000 align = CONSTANT_ALIGNMENT (exp, align);
8001 #endif
8002 return MIN (align, max_align);
8004 default:
8005 return align;
8010 /* Return the tree node and offset if a given argument corresponds to
8011 a string constant. */
8013 static tree
8014 string_constant (arg, ptr_offset)
8015 tree arg;
8016 tree *ptr_offset;
8018 STRIP_NOPS (arg);
8020 if (TREE_CODE (arg) == ADDR_EXPR
8021 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8023 *ptr_offset = integer_zero_node;
8024 return TREE_OPERAND (arg, 0);
8026 else if (TREE_CODE (arg) == PLUS_EXPR)
8028 tree arg0 = TREE_OPERAND (arg, 0);
8029 tree arg1 = TREE_OPERAND (arg, 1);
8031 STRIP_NOPS (arg0);
8032 STRIP_NOPS (arg1);
8034 if (TREE_CODE (arg0) == ADDR_EXPR
8035 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8037 *ptr_offset = arg1;
8038 return TREE_OPERAND (arg0, 0);
8040 else if (TREE_CODE (arg1) == ADDR_EXPR
8041 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8043 *ptr_offset = arg0;
8044 return TREE_OPERAND (arg1, 0);
8048 return 0;
8051 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8052 way, because it could contain a zero byte in the middle.
8053 TREE_STRING_LENGTH is the size of the character array, not the string.
8055 Unfortunately, string_constant can't access the values of const char
8056 arrays with initializers, so neither can we do so here. */
8058 static tree
8059 c_strlen (src)
8060 tree src;
8062 tree offset_node;
8063 int offset, max;
8064 char *ptr;
8066 src = string_constant (src, &offset_node);
8067 if (src == 0)
8068 return 0;
8069 max = TREE_STRING_LENGTH (src);
8070 ptr = TREE_STRING_POINTER (src);
8071 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8073 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8074 compute the offset to the following null if we don't know where to
8075 start searching for it. */
8076 int i;
8077 for (i = 0; i < max; i++)
8078 if (ptr[i] == 0)
8079 return 0;
8080 /* We don't know the starting offset, but we do know that the string
8081 has no internal zero bytes. We can assume that the offset falls
8082 within the bounds of the string; otherwise, the programmer deserves
8083 what he gets. Subtract the offset from the length of the string,
8084 and return that. */
8085 /* This would perhaps not be valid if we were dealing with named
8086 arrays in addition to literal string constants. */
8087 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8090 /* We have a known offset into the string. Start searching there for
8091 a null character. */
8092 if (offset_node == 0)
8093 offset = 0;
8094 else
8096 /* Did we get a long long offset? If so, punt. */
8097 if (TREE_INT_CST_HIGH (offset_node) != 0)
8098 return 0;
8099 offset = TREE_INT_CST_LOW (offset_node);
8101 /* If the offset is known to be out of bounds, warn, and call strlen at
8102 runtime. */
8103 if (offset < 0 || offset > max)
8105 warning ("offset outside bounds of constant string");
8106 return 0;
8108 /* Use strlen to search for the first zero byte. Since any strings
8109 constructed with build_string will have nulls appended, we win even
8110 if we get handed something like (char[4])"abcd".
8112 Since OFFSET is our starting index into the string, no further
8113 calculation is needed. */
8114 return size_int (strlen (ptr + offset));
8118 expand_builtin_return_addr (fndecl_code, count, tem)
8119 enum built_in_function fndecl_code;
8120 int count;
8121 rtx tem;
8123 int i;
8125 /* Some machines need special handling before we can access
8126 arbitrary frames. For example, on the sparc, we must first flush
8127 all register windows to the stack. */
8128 #ifdef SETUP_FRAME_ADDRESSES
8129 if (count > 0)
8130 SETUP_FRAME_ADDRESSES ();
8131 #endif
8133 /* On the sparc, the return address is not in the frame, it is in a
8134 register. There is no way to access it off of the current frame
8135 pointer, but it can be accessed off the previous frame pointer by
8136 reading the value from the register window save area. */
8137 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8138 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8139 count--;
8140 #endif
8142 /* Scan back COUNT frames to the specified frame. */
8143 for (i = 0; i < count; i++)
8145 /* Assume the dynamic chain pointer is in the word that the
8146 frame address points to, unless otherwise specified. */
8147 #ifdef DYNAMIC_CHAIN_ADDRESS
8148 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8149 #endif
8150 tem = memory_address (Pmode, tem);
8151 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8154 /* For __builtin_frame_address, return what we've got. */
8155 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8156 return tem;
8158 /* For __builtin_return_address, Get the return address from that
8159 frame. */
8160 #ifdef RETURN_ADDR_RTX
8161 tem = RETURN_ADDR_RTX (count, tem);
8162 #else
8163 tem = memory_address (Pmode,
8164 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8165 tem = gen_rtx_MEM (Pmode, tem);
8166 #endif
8167 return tem;
8170 /* __builtin_setjmp is passed a pointer to an array of five words (not
8171 all will be used on all machines). It operates similarly to the C
8172 library function of the same name, but is more efficient. Much of
8173 the code below (and for longjmp) is copied from the handling of
8174 non-local gotos.
8176 NOTE: This is intended for use by GNAT and the exception handling
8177 scheme in the compiler and will only work in the method used by
8178 them. */
8181 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8182 rtx buf_addr;
8183 rtx target;
8184 rtx first_label, next_label;
8186 rtx lab1 = gen_label_rtx ();
8187 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8188 enum machine_mode value_mode;
8189 rtx stack_save;
8191 value_mode = TYPE_MODE (integer_type_node);
8193 #ifdef POINTERS_EXTEND_UNSIGNED
8194 buf_addr = convert_memory_address (Pmode, buf_addr);
8195 #endif
8197 buf_addr = force_reg (Pmode, buf_addr);
8199 if (target == 0 || GET_CODE (target) != REG
8200 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8201 target = gen_reg_rtx (value_mode);
8203 emit_queue ();
8205 /* We store the frame pointer and the address of lab1 in the buffer
8206 and use the rest of it for the stack save area, which is
8207 machine-dependent. */
8209 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8210 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8211 #endif
8213 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8214 BUILTIN_SETJMP_FRAME_VALUE);
8215 emit_move_insn (validize_mem
8216 (gen_rtx_MEM (Pmode,
8217 plus_constant (buf_addr,
8218 GET_MODE_SIZE (Pmode)))),
8219 gen_rtx_LABEL_REF (Pmode, lab1));
8221 stack_save = gen_rtx_MEM (sa_mode,
8222 plus_constant (buf_addr,
8223 2 * GET_MODE_SIZE (Pmode)));
8224 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8226 /* If there is further processing to do, do it. */
8227 #ifdef HAVE_builtin_setjmp_setup
8228 if (HAVE_builtin_setjmp_setup)
8229 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8230 #endif
8232 /* Set TARGET to zero and branch to the first-time-through label. */
8233 emit_move_insn (target, const0_rtx);
8234 emit_jump_insn (gen_jump (first_label));
8235 emit_barrier ();
8236 emit_label (lab1);
8238 /* Tell flow about the strange goings on. */
8239 current_function_has_nonlocal_label = 1;
8241 /* Clobber the FP when we get here, so we have to make sure it's
8242 marked as used by this function. */
8243 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8245 /* Mark the static chain as clobbered here so life information
8246 doesn't get messed up for it. */
8247 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8249 /* Now put in the code to restore the frame pointer, and argument
8250 pointer, if needed. The code below is from expand_end_bindings
8251 in stmt.c; see detailed documentation there. */
8252 #ifdef HAVE_nonlocal_goto
8253 if (! HAVE_nonlocal_goto)
8254 #endif
8255 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8257 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8258 if (fixed_regs[ARG_POINTER_REGNUM])
8260 #ifdef ELIMINABLE_REGS
8261 int i;
8262 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8264 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8265 if (elim_regs[i].from == ARG_POINTER_REGNUM
8266 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8267 break;
8269 if (i == sizeof elim_regs / sizeof elim_regs [0])
8270 #endif
8272 /* Now restore our arg pointer from the address at which it
8273 was saved in our stack frame.
8274 If there hasn't be space allocated for it yet, make
8275 some now. */
8276 if (arg_pointer_save_area == 0)
8277 arg_pointer_save_area
8278 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8279 emit_move_insn (virtual_incoming_args_rtx,
8280 copy_to_reg (arg_pointer_save_area));
8283 #endif
8285 #ifdef HAVE_builtin_setjmp_receiver
8286 if (HAVE_builtin_setjmp_receiver)
8287 emit_insn (gen_builtin_setjmp_receiver (lab1));
8288 else
8289 #endif
8290 #ifdef HAVE_nonlocal_goto_receiver
8291 if (HAVE_nonlocal_goto_receiver)
8292 emit_insn (gen_nonlocal_goto_receiver ());
8293 else
8294 #endif
8296 ; /* Nothing */
8299 /* Set TARGET, and branch to the next-time-through label. */
8300 emit_move_insn (target, const1_rtx);
8301 emit_jump_insn (gen_jump (next_label));
8302 emit_barrier ();
8304 return target;
8307 void
8308 expand_builtin_longjmp (buf_addr, value)
8309 rtx buf_addr, value;
8311 rtx fp, lab, stack;
8312 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8314 #ifdef POINTERS_EXTEND_UNSIGNED
8315 buf_addr = convert_memory_address (Pmode, buf_addr);
8316 #endif
8317 buf_addr = force_reg (Pmode, buf_addr);
8319 /* We used to store value in static_chain_rtx, but that fails if pointers
8320 are smaller than integers. We instead require that the user must pass
8321 a second argument of 1, because that is what builtin_setjmp will
8322 return. This also makes EH slightly more efficient, since we are no
8323 longer copying around a value that we don't care about. */
8324 if (value != const1_rtx)
8325 abort ();
8327 #ifdef HAVE_builtin_longjmp
8328 if (HAVE_builtin_longjmp)
8329 emit_insn (gen_builtin_longjmp (buf_addr));
8330 else
8331 #endif
8333 fp = gen_rtx_MEM (Pmode, buf_addr);
8334 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8335 GET_MODE_SIZE (Pmode)));
8337 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8338 2 * GET_MODE_SIZE (Pmode)));
8340 /* Pick up FP, label, and SP from the block and jump. This code is
8341 from expand_goto in stmt.c; see there for detailed comments. */
8342 #if HAVE_nonlocal_goto
8343 if (HAVE_nonlocal_goto)
8344 /* We have to pass a value to the nonlocal_goto pattern that will
8345 get copied into the static_chain pointer, but it does not matter
8346 what that value is, because builtin_setjmp does not use it. */
8347 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8348 else
8349 #endif
8351 lab = copy_to_reg (lab);
8353 emit_move_insn (hard_frame_pointer_rtx, fp);
8354 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8356 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8357 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8358 emit_indirect_jump (lab);
8363 static rtx
8364 get_memory_rtx (exp)
8365 tree exp;
8367 rtx mem;
8368 int is_aggregate;
8370 mem = gen_rtx_MEM (BLKmode,
8371 memory_address (BLKmode,
8372 expand_expr (exp, NULL_RTX,
8373 ptr_mode, EXPAND_SUM)));
8375 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8377 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8378 if the value is the address of a structure or if the expression is
8379 cast to a pointer to structure type. */
8380 is_aggregate = 0;
8382 while (TREE_CODE (exp) == NOP_EXPR)
8384 tree cast_type = TREE_TYPE (exp);
8385 if (TREE_CODE (cast_type) == POINTER_TYPE
8386 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8388 is_aggregate = 1;
8389 break;
8391 exp = TREE_OPERAND (exp, 0);
8394 if (is_aggregate == 0)
8396 tree type;
8398 if (TREE_CODE (exp) == ADDR_EXPR)
8399 /* If this is the address of an object, check whether the
8400 object is an array. */
8401 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8402 else
8403 type = TREE_TYPE (TREE_TYPE (exp));
8404 is_aggregate = AGGREGATE_TYPE_P (type);
8407 MEM_IN_STRUCT_P (mem) = is_aggregate;
8408 return mem;
8412 /* Expand an expression EXP that calls a built-in function,
8413 with result going to TARGET if that's convenient
8414 (and in mode MODE if that's convenient).
8415 SUBTARGET may be used as the target for computing one of EXP's operands.
8416 IGNORE is nonzero if the value is to be ignored. */
8418 #define CALLED_AS_BUILT_IN(NODE) \
8419 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8421 static rtx
8422 expand_builtin (exp, target, subtarget, mode, ignore)
8423 tree exp;
8424 rtx target;
8425 rtx subtarget;
8426 enum machine_mode mode;
8427 int ignore;
8429 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8430 tree arglist = TREE_OPERAND (exp, 1);
8431 rtx op0;
8432 rtx lab1, insns;
8433 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8434 optab builtin_optab;
8436 switch (DECL_FUNCTION_CODE (fndecl))
8438 case BUILT_IN_ABS:
8439 case BUILT_IN_LABS:
8440 case BUILT_IN_FABS:
8441 /* build_function_call changes these into ABS_EXPR. */
8442 abort ();
8444 case BUILT_IN_SIN:
8445 case BUILT_IN_COS:
8446 /* Treat these like sqrt, but only if the user asks for them. */
8447 if (! flag_fast_math)
8448 break;
8449 case BUILT_IN_FSQRT:
8450 /* If not optimizing, call the library function. */
8451 if (! optimize)
8452 break;
8454 if (arglist == 0
8455 /* Arg could be wrong type if user redeclared this fcn wrong. */
8456 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8457 break;
8459 /* Stabilize and compute the argument. */
8460 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8461 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8463 exp = copy_node (exp);
8464 arglist = copy_node (arglist);
8465 TREE_OPERAND (exp, 1) = arglist;
8466 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8468 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8470 /* Make a suitable register to place result in. */
8471 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8473 emit_queue ();
8474 start_sequence ();
8476 switch (DECL_FUNCTION_CODE (fndecl))
8478 case BUILT_IN_SIN:
8479 builtin_optab = sin_optab; break;
8480 case BUILT_IN_COS:
8481 builtin_optab = cos_optab; break;
8482 case BUILT_IN_FSQRT:
8483 builtin_optab = sqrt_optab; break;
8484 default:
8485 abort ();
8488 /* Compute into TARGET.
8489 Set TARGET to wherever the result comes back. */
8490 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8491 builtin_optab, op0, target, 0);
8493 /* If we were unable to expand via the builtin, stop the
8494 sequence (without outputting the insns) and break, causing
8495 a call to the library function. */
8496 if (target == 0)
8498 end_sequence ();
8499 break;
8502 /* Check the results by default. But if flag_fast_math is turned on,
8503 then assume sqrt will always be called with valid arguments. */
8505 if (! flag_fast_math)
8507 /* Don't define the builtin FP instructions
8508 if your machine is not IEEE. */
8509 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8510 abort ();
8512 lab1 = gen_label_rtx ();
8514 /* Test the result; if it is NaN, set errno=EDOM because
8515 the argument was not in the domain. */
8516 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8517 emit_jump_insn (gen_beq (lab1));
8519 #ifdef TARGET_EDOM
8521 #ifdef GEN_ERRNO_RTX
8522 rtx errno_rtx = GEN_ERRNO_RTX;
8523 #else
8524 rtx errno_rtx
8525 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8526 #endif
8528 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8530 #else
8531 /* We can't set errno=EDOM directly; let the library call do it.
8532 Pop the arguments right away in case the call gets deleted. */
8533 NO_DEFER_POP;
8534 expand_call (exp, target, 0);
8535 OK_DEFER_POP;
8536 #endif
8538 emit_label (lab1);
8541 /* Output the entire sequence. */
8542 insns = get_insns ();
8543 end_sequence ();
8544 emit_insns (insns);
8546 return target;
8548 case BUILT_IN_FMOD:
8549 break;
8551 /* __builtin_apply_args returns block of memory allocated on
8552 the stack into which is stored the arg pointer, structure
8553 value address, static chain, and all the registers that might
8554 possibly be used in performing a function call. The code is
8555 moved to the start of the function so the incoming values are
8556 saved. */
8557 case BUILT_IN_APPLY_ARGS:
8558 /* Don't do __builtin_apply_args more than once in a function.
8559 Save the result of the first call and reuse it. */
8560 if (apply_args_value != 0)
8561 return apply_args_value;
8563 /* When this function is called, it means that registers must be
8564 saved on entry to this function. So we migrate the
8565 call to the first insn of this function. */
8566 rtx temp;
8567 rtx seq;
8569 start_sequence ();
8570 temp = expand_builtin_apply_args ();
8571 seq = get_insns ();
8572 end_sequence ();
8574 apply_args_value = temp;
8576 /* Put the sequence after the NOTE that starts the function.
8577 If this is inside a SEQUENCE, make the outer-level insn
8578 chain current, so the code is placed at the start of the
8579 function. */
8580 push_topmost_sequence ();
8581 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8582 pop_topmost_sequence ();
8583 return temp;
8586 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8587 FUNCTION with a copy of the parameters described by
8588 ARGUMENTS, and ARGSIZE. It returns a block of memory
8589 allocated on the stack into which is stored all the registers
8590 that might possibly be used for returning the result of a
8591 function. ARGUMENTS is the value returned by
8592 __builtin_apply_args. ARGSIZE is the number of bytes of
8593 arguments that must be copied. ??? How should this value be
8594 computed? We'll also need a safe worst case value for varargs
8595 functions. */
8596 case BUILT_IN_APPLY:
8597 if (arglist == 0
8598 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8599 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8600 || TREE_CHAIN (arglist) == 0
8601 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8602 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8603 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8604 return const0_rtx;
8605 else
8607 int i;
8608 tree t;
8609 rtx ops[3];
8611 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8612 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8614 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8617 /* __builtin_return (RESULT) causes the function to return the
8618 value described by RESULT. RESULT is address of the block of
8619 memory returned by __builtin_apply. */
8620 case BUILT_IN_RETURN:
8621 if (arglist
8622 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8623 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8624 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8625 NULL_RTX, VOIDmode, 0));
8626 return const0_rtx;
8628 case BUILT_IN_SAVEREGS:
8629 /* Don't do __builtin_saveregs more than once in a function.
8630 Save the result of the first call and reuse it. */
8631 if (saveregs_value != 0)
8632 return saveregs_value;
8634 /* When this function is called, it means that registers must be
8635 saved on entry to this function. So we migrate the
8636 call to the first insn of this function. */
8637 rtx temp;
8638 rtx seq;
8640 /* Now really call the function. `expand_call' does not call
8641 expand_builtin, so there is no danger of infinite recursion here. */
8642 start_sequence ();
8644 #ifdef EXPAND_BUILTIN_SAVEREGS
8645 /* Do whatever the machine needs done in this case. */
8646 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8647 #else
8648 /* The register where the function returns its value
8649 is likely to have something else in it, such as an argument.
8650 So preserve that register around the call. */
8652 if (value_mode != VOIDmode)
8654 rtx valreg = hard_libcall_value (value_mode);
8655 rtx saved_valreg = gen_reg_rtx (value_mode);
8657 emit_move_insn (saved_valreg, valreg);
8658 temp = expand_call (exp, target, ignore);
8659 emit_move_insn (valreg, saved_valreg);
8661 else
8662 /* Generate the call, putting the value in a pseudo. */
8663 temp = expand_call (exp, target, ignore);
8664 #endif
8666 seq = get_insns ();
8667 end_sequence ();
8669 saveregs_value = temp;
8671 /* Put the sequence after the NOTE that starts the function.
8672 If this is inside a SEQUENCE, make the outer-level insn
8673 chain current, so the code is placed at the start of the
8674 function. */
8675 push_topmost_sequence ();
8676 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8677 pop_topmost_sequence ();
8678 return temp;
8681 /* __builtin_args_info (N) returns word N of the arg space info
8682 for the current function. The number and meanings of words
8683 is controlled by the definition of CUMULATIVE_ARGS. */
8684 case BUILT_IN_ARGS_INFO:
8686 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8687 int *word_ptr = (int *) &current_function_args_info;
8688 #if 0
8689 /* These are used by the code below that is if 0'ed away */
8690 int i;
8691 tree type, elts, result;
8692 #endif
8694 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8695 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8696 __FILE__, __LINE__);
8698 if (arglist != 0)
8700 tree arg = TREE_VALUE (arglist);
8701 if (TREE_CODE (arg) != INTEGER_CST)
8702 error ("argument of `__builtin_args_info' must be constant");
8703 else
8705 int wordnum = TREE_INT_CST_LOW (arg);
8707 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8708 error ("argument of `__builtin_args_info' out of range");
8709 else
8710 return GEN_INT (word_ptr[wordnum]);
8713 else
8714 error ("missing argument in `__builtin_args_info'");
8716 return const0_rtx;
8718 #if 0
8719 for (i = 0; i < nwords; i++)
8720 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8722 type = build_array_type (integer_type_node,
8723 build_index_type (build_int_2 (nwords, 0)));
8724 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8725 TREE_CONSTANT (result) = 1;
8726 TREE_STATIC (result) = 1;
8727 result = build (INDIRECT_REF, build_pointer_type (type), result);
8728 TREE_CONSTANT (result) = 1;
8729 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8730 #endif
8733 /* Return the address of the first anonymous stack arg. */
8734 case BUILT_IN_NEXT_ARG:
8736 tree fntype = TREE_TYPE (current_function_decl);
8738 if ((TYPE_ARG_TYPES (fntype) == 0
8739 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8740 == void_type_node))
8741 && ! current_function_varargs)
8743 error ("`va_start' used in function with fixed args");
8744 return const0_rtx;
8747 if (arglist)
8749 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8750 tree arg = TREE_VALUE (arglist);
8752 /* Strip off all nops for the sake of the comparison. This
8753 is not quite the same as STRIP_NOPS. It does more.
8754 We must also strip off INDIRECT_EXPR for C++ reference
8755 parameters. */
8756 while (TREE_CODE (arg) == NOP_EXPR
8757 || TREE_CODE (arg) == CONVERT_EXPR
8758 || TREE_CODE (arg) == NON_LVALUE_EXPR
8759 || TREE_CODE (arg) == INDIRECT_REF)
8760 arg = TREE_OPERAND (arg, 0);
8761 if (arg != last_parm)
8762 warning ("second parameter of `va_start' not last named argument");
8764 else if (! current_function_varargs)
8765 /* Evidently an out of date version of <stdarg.h>; can't validate
8766 va_start's second argument, but can still work as intended. */
8767 warning ("`__builtin_next_arg' called without an argument");
8770 return expand_binop (Pmode, add_optab,
8771 current_function_internal_arg_pointer,
8772 current_function_arg_offset_rtx,
8773 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8775 case BUILT_IN_CLASSIFY_TYPE:
8776 if (arglist != 0)
8778 tree type = TREE_TYPE (TREE_VALUE (arglist));
8779 enum tree_code code = TREE_CODE (type);
8780 if (code == VOID_TYPE)
8781 return GEN_INT (void_type_class);
8782 if (code == INTEGER_TYPE)
8783 return GEN_INT (integer_type_class);
8784 if (code == CHAR_TYPE)
8785 return GEN_INT (char_type_class);
8786 if (code == ENUMERAL_TYPE)
8787 return GEN_INT (enumeral_type_class);
8788 if (code == BOOLEAN_TYPE)
8789 return GEN_INT (boolean_type_class);
8790 if (code == POINTER_TYPE)
8791 return GEN_INT (pointer_type_class);
8792 if (code == REFERENCE_TYPE)
8793 return GEN_INT (reference_type_class);
8794 if (code == OFFSET_TYPE)
8795 return GEN_INT (offset_type_class);
8796 if (code == REAL_TYPE)
8797 return GEN_INT (real_type_class);
8798 if (code == COMPLEX_TYPE)
8799 return GEN_INT (complex_type_class);
8800 if (code == FUNCTION_TYPE)
8801 return GEN_INT (function_type_class);
8802 if (code == METHOD_TYPE)
8803 return GEN_INT (method_type_class);
8804 if (code == RECORD_TYPE)
8805 return GEN_INT (record_type_class);
8806 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8807 return GEN_INT (union_type_class);
8808 if (code == ARRAY_TYPE)
8810 if (TYPE_STRING_FLAG (type))
8811 return GEN_INT (string_type_class);
8812 else
8813 return GEN_INT (array_type_class);
8815 if (code == SET_TYPE)
8816 return GEN_INT (set_type_class);
8817 if (code == FILE_TYPE)
8818 return GEN_INT (file_type_class);
8819 if (code == LANG_TYPE)
8820 return GEN_INT (lang_type_class);
8822 return GEN_INT (no_type_class);
8824 case BUILT_IN_CONSTANT_P:
8825 if (arglist == 0)
8826 return const0_rtx;
8827 else
8829 tree arg = TREE_VALUE (arglist);
8831 STRIP_NOPS (arg);
8832 if (really_constant_p (arg)
8833 || (TREE_CODE (arg) == ADDR_EXPR
8834 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
8835 return const1_rtx;
8837 /* Only emit CONSTANT_P_RTX if CSE will be run.
8838 Moreover, we don't want to expand trees that have side effects,
8839 as the original __builtin_constant_p did not evaluate its
8840 argument at all, and we would break existing usage by changing
8841 this. This quirk was generally useful, eliminating a bit of hair
8842 in the writing of the macros that use this function. Now the
8843 same thing can be better accomplished in an inline function. */
8845 if (! cse_not_expected && ! TREE_SIDE_EFFECTS (arg))
8847 /* Lazy fixup of old code: issue a warning and fail the test. */
8848 if (! can_handle_constant_p)
8850 warning ("Delayed evaluation of __builtin_constant_p not supported on this target.");
8851 warning ("Please report this as a bug to egcs-bugs@cygnus.com.");
8852 return const0_rtx;
8854 return gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
8855 expand_expr (arg, NULL_RTX,
8856 VOIDmode, 0));
8859 return const0_rtx;
8862 case BUILT_IN_FRAME_ADDRESS:
8863 /* The argument must be a nonnegative integer constant.
8864 It counts the number of frames to scan up the stack.
8865 The value is the address of that frame. */
8866 case BUILT_IN_RETURN_ADDRESS:
8867 /* The argument must be a nonnegative integer constant.
8868 It counts the number of frames to scan up the stack.
8869 The value is the return address saved in that frame. */
8870 if (arglist == 0)
8871 /* Warning about missing arg was already issued. */
8872 return const0_rtx;
8873 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8874 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8876 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8877 error ("invalid arg to `__builtin_frame_address'");
8878 else
8879 error ("invalid arg to `__builtin_return_address'");
8880 return const0_rtx;
8882 else
8884 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8885 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8886 hard_frame_pointer_rtx);
8888 /* Some ports cannot access arbitrary stack frames. */
8889 if (tem == NULL)
8891 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8892 warning ("unsupported arg to `__builtin_frame_address'");
8893 else
8894 warning ("unsupported arg to `__builtin_return_address'");
8895 return const0_rtx;
8898 /* For __builtin_frame_address, return what we've got. */
8899 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8900 return tem;
8902 if (GET_CODE (tem) != REG)
8903 tem = copy_to_reg (tem);
8904 return tem;
8907 /* Returns the address of the area where the structure is returned.
8908 0 otherwise. */
8909 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8910 if (arglist != 0
8911 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8912 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8913 return const0_rtx;
8914 else
8915 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8917 case BUILT_IN_ALLOCA:
8918 if (arglist == 0
8919 /* Arg could be non-integer if user redeclared this fcn wrong. */
8920 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8921 break;
8923 /* Compute the argument. */
8924 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8926 /* Allocate the desired space. */
8927 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8929 case BUILT_IN_FFS:
8930 /* If not optimizing, call the library function. */
8931 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8932 break;
8934 if (arglist == 0
8935 /* Arg could be non-integer if user redeclared this fcn wrong. */
8936 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8937 break;
8939 /* Compute the argument. */
8940 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8941 /* Compute ffs, into TARGET if possible.
8942 Set TARGET to wherever the result comes back. */
8943 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8944 ffs_optab, op0, target, 1);
8945 if (target == 0)
8946 abort ();
8947 return target;
8949 case BUILT_IN_STRLEN:
8950 /* If not optimizing, call the library function. */
8951 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8952 break;
8954 if (arglist == 0
8955 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8956 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8957 break;
8958 else
8960 tree src = TREE_VALUE (arglist);
8961 tree len = c_strlen (src);
8963 int align
8964 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8966 rtx result, src_rtx, char_rtx;
8967 enum machine_mode insn_mode = value_mode, char_mode;
8968 enum insn_code icode;
8970 /* If the length is known, just return it. */
8971 if (len != 0)
8972 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8974 /* If SRC is not a pointer type, don't do this operation inline. */
8975 if (align == 0)
8976 break;
8978 /* Call a function if we can't compute strlen in the right mode. */
8980 while (insn_mode != VOIDmode)
8982 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8983 if (icode != CODE_FOR_nothing)
8984 break;
8986 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8988 if (insn_mode == VOIDmode)
8989 break;
8991 /* Make a place to write the result of the instruction. */
8992 result = target;
8993 if (! (result != 0
8994 && GET_CODE (result) == REG
8995 && GET_MODE (result) == insn_mode
8996 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8997 result = gen_reg_rtx (insn_mode);
8999 /* Make sure the operands are acceptable to the predicates. */
9001 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9002 result = gen_reg_rtx (insn_mode);
9003 src_rtx = memory_address (BLKmode,
9004 expand_expr (src, NULL_RTX, ptr_mode,
9005 EXPAND_NORMAL));
9007 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9008 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9010 /* Check the string is readable and has an end. */
9011 if (flag_check_memory_usage)
9012 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9013 src_rtx, ptr_mode,
9014 GEN_INT (MEMORY_USE_RO),
9015 TYPE_MODE (integer_type_node));
9017 char_rtx = const0_rtx;
9018 char_mode = insn_operand_mode[(int)icode][2];
9019 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9020 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9022 emit_insn (GEN_FCN (icode) (result,
9023 gen_rtx_MEM (BLKmode, src_rtx),
9024 char_rtx, GEN_INT (align)));
9026 /* Return the value in the proper mode for this function. */
9027 if (GET_MODE (result) == value_mode)
9028 return result;
9029 else if (target != 0)
9031 convert_move (target, result, 0);
9032 return target;
9034 else
9035 return convert_to_mode (value_mode, result, 0);
9038 case BUILT_IN_STRCPY:
9039 /* If not optimizing, call the library function. */
9040 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9041 break;
9043 if (arglist == 0
9044 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9045 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9046 || TREE_CHAIN (arglist) == 0
9047 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9048 break;
9049 else
9051 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9053 if (len == 0)
9054 break;
9056 len = size_binop (PLUS_EXPR, len, integer_one_node);
9058 chainon (arglist, build_tree_list (NULL_TREE, len));
9061 /* Drops in. */
9062 case BUILT_IN_MEMCPY:
9063 /* If not optimizing, call the library function. */
9064 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9065 break;
9067 if (arglist == 0
9068 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9069 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9070 || TREE_CHAIN (arglist) == 0
9071 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9072 != POINTER_TYPE)
9073 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9074 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9075 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9076 != INTEGER_TYPE))
9077 break;
9078 else
9080 tree dest = TREE_VALUE (arglist);
9081 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9082 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9084 int src_align
9085 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9086 int dest_align
9087 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9088 rtx dest_mem, src_mem, dest_addr, len_rtx;
9090 /* If either SRC or DEST is not a pointer type, don't do
9091 this operation in-line. */
9092 if (src_align == 0 || dest_align == 0)
9094 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9095 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9096 break;
9099 dest_mem = get_memory_rtx (dest);
9100 src_mem = get_memory_rtx (src);
9101 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9103 /* Just copy the rights of SRC to the rights of DEST. */
9104 if (flag_check_memory_usage)
9105 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9106 XEXP (dest_mem, 0), ptr_mode,
9107 XEXP (src_mem, 0), ptr_mode,
9108 len_rtx, TYPE_MODE (sizetype));
9110 /* Copy word part most expediently. */
9111 dest_addr
9112 = emit_block_move (dest_mem, src_mem, len_rtx,
9113 MIN (src_align, dest_align));
9115 if (dest_addr == 0)
9116 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9118 return dest_addr;
9121 case BUILT_IN_MEMSET:
9122 /* If not optimizing, call the library function. */
9123 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9124 break;
9126 if (arglist == 0
9127 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9128 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9129 || TREE_CHAIN (arglist) == 0
9130 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9131 != INTEGER_TYPE)
9132 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9133 || (INTEGER_TYPE
9134 != (TREE_CODE (TREE_TYPE
9135 (TREE_VALUE
9136 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9137 break;
9138 else
9140 tree dest = TREE_VALUE (arglist);
9141 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9142 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9144 int dest_align
9145 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9146 rtx dest_mem, dest_addr, len_rtx;
9148 /* If DEST is not a pointer type, don't do this
9149 operation in-line. */
9150 if (dest_align == 0)
9151 break;
9153 /* If the arguments have side-effects, then we can only evaluate
9154 them at most once. The following code evaluates them twice if
9155 they are not constants because we break out to expand_call
9156 in that case. They can't be constants if they have side-effects
9157 so we can check for that first. Alternatively, we could call
9158 save_expr to make multiple evaluation safe. */
9159 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9160 break;
9162 /* If VAL is not 0, don't do this operation in-line. */
9163 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9164 break;
9166 /* If LEN does not expand to a constant, don't do this
9167 operation in-line. */
9168 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9169 if (GET_CODE (len_rtx) != CONST_INT)
9170 break;
9172 dest_mem = get_memory_rtx (dest);
9174 /* Just check DST is writable and mark it as readable. */
9175 if (flag_check_memory_usage)
9176 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9177 XEXP (dest_mem, 0), ptr_mode,
9178 len_rtx, TYPE_MODE (sizetype),
9179 GEN_INT (MEMORY_USE_WO),
9180 TYPE_MODE (integer_type_node));
9183 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9185 if (dest_addr == 0)
9186 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9188 return dest_addr;
9191 /* These comparison functions need an instruction that returns an actual
9192 index. An ordinary compare that just sets the condition codes
9193 is not enough. */
9194 #ifdef HAVE_cmpstrsi
9195 case BUILT_IN_STRCMP:
9196 /* If not optimizing, call the library function. */
9197 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9198 break;
9200 /* If we need to check memory accesses, call the library function. */
9201 if (flag_check_memory_usage)
9202 break;
9204 if (arglist == 0
9205 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9206 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9207 || TREE_CHAIN (arglist) == 0
9208 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9209 break;
9210 else if (!HAVE_cmpstrsi)
9211 break;
9213 tree arg1 = TREE_VALUE (arglist);
9214 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9215 tree len, len2;
9217 len = c_strlen (arg1);
9218 if (len)
9219 len = size_binop (PLUS_EXPR, integer_one_node, len);
9220 len2 = c_strlen (arg2);
9221 if (len2)
9222 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9224 /* If we don't have a constant length for the first, use the length
9225 of the second, if we know it. We don't require a constant for
9226 this case; some cost analysis could be done if both are available
9227 but neither is constant. For now, assume they're equally cheap.
9229 If both strings have constant lengths, use the smaller. This
9230 could arise if optimization results in strcpy being called with
9231 two fixed strings, or if the code was machine-generated. We should
9232 add some code to the `memcmp' handler below to deal with such
9233 situations, someday. */
9234 if (!len || TREE_CODE (len) != INTEGER_CST)
9236 if (len2)
9237 len = len2;
9238 else if (len == 0)
9239 break;
9241 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9243 if (tree_int_cst_lt (len2, len))
9244 len = len2;
9247 chainon (arglist, build_tree_list (NULL_TREE, len));
9250 /* Drops in. */
9251 case BUILT_IN_MEMCMP:
9252 /* If not optimizing, call the library function. */
9253 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9254 break;
9256 /* If we need to check memory accesses, call the library function. */
9257 if (flag_check_memory_usage)
9258 break;
9260 if (arglist == 0
9261 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9262 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9263 || TREE_CHAIN (arglist) == 0
9264 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9265 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9266 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9267 break;
9268 else if (!HAVE_cmpstrsi)
9269 break;
9271 tree arg1 = TREE_VALUE (arglist);
9272 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9273 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9274 rtx result;
9276 int arg1_align
9277 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9278 int arg2_align
9279 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9280 enum machine_mode insn_mode
9281 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9283 /* If we don't have POINTER_TYPE, call the function. */
9284 if (arg1_align == 0 || arg2_align == 0)
9286 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9287 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9288 break;
9291 /* Make a place to write the result of the instruction. */
9292 result = target;
9293 if (! (result != 0
9294 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9295 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9296 result = gen_reg_rtx (insn_mode);
9298 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9299 get_memory_rtx (arg2),
9300 expand_expr (len, NULL_RTX, VOIDmode, 0),
9301 GEN_INT (MIN (arg1_align, arg2_align))));
9303 /* Return the value in the proper mode for this function. */
9304 mode = TYPE_MODE (TREE_TYPE (exp));
9305 if (GET_MODE (result) == mode)
9306 return result;
9307 else if (target != 0)
9309 convert_move (target, result, 0);
9310 return target;
9312 else
9313 return convert_to_mode (mode, result, 0);
9315 #else
9316 case BUILT_IN_STRCMP:
9317 case BUILT_IN_MEMCMP:
9318 break;
9319 #endif
9321 case BUILT_IN_SETJMP:
9322 if (arglist == 0
9323 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9324 break;
9325 else
9327 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9328 VOIDmode, 0);
9329 rtx lab = gen_label_rtx ();
9330 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9331 emit_label (lab);
9332 return ret;
9335 /* __builtin_longjmp is passed a pointer to an array of five words.
9336 It's similar to the C library longjmp function but works with
9337 __builtin_setjmp above. */
9338 case BUILT_IN_LONGJMP:
9339 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9340 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9341 break;
9342 else
9344 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9345 VOIDmode, 0);
9346 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9347 NULL_RTX, VOIDmode, 0);
9349 if (value != const1_rtx)
9351 error ("__builtin_longjmp second argument must be 1");
9352 return const0_rtx;
9355 expand_builtin_longjmp (buf_addr, value);
9356 return const0_rtx;
9359 case BUILT_IN_TRAP:
9360 #ifdef HAVE_trap
9361 if (HAVE_trap)
9362 emit_insn (gen_trap ());
9363 else
9364 #endif
9365 error ("__builtin_trap not supported by this target");
9366 emit_barrier ();
9367 return const0_rtx;
9369 /* Various hooks for the DWARF 2 __throw routine. */
9370 case BUILT_IN_UNWIND_INIT:
9371 expand_builtin_unwind_init ();
9372 return const0_rtx;
9373 case BUILT_IN_DWARF_CFA:
9374 return virtual_cfa_rtx;
9375 #ifdef DWARF2_UNWIND_INFO
9376 case BUILT_IN_DWARF_FP_REGNUM:
9377 return expand_builtin_dwarf_fp_regnum ();
9378 case BUILT_IN_DWARF_REG_SIZE:
9379 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9380 #endif
9381 case BUILT_IN_FROB_RETURN_ADDR:
9382 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9383 case BUILT_IN_EXTRACT_RETURN_ADDR:
9384 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9385 case BUILT_IN_EH_RETURN:
9386 expand_builtin_eh_return (TREE_VALUE (arglist),
9387 TREE_VALUE (TREE_CHAIN (arglist)),
9388 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9389 return const0_rtx;
9391 default: /* just do library call, if unknown builtin */
9392 error ("built-in function `%s' not currently supported",
9393 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9396 /* The switch statement above can drop through to cause the function
9397 to be called normally. */
9399 return expand_call (exp, target, ignore);
9402 /* Built-in functions to perform an untyped call and return. */
9404 /* For each register that may be used for calling a function, this
9405 gives a mode used to copy the register's value. VOIDmode indicates
9406 the register is not used for calling a function. If the machine
9407 has register windows, this gives only the outbound registers.
9408 INCOMING_REGNO gives the corresponding inbound register. */
9409 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9411 /* For each register that may be used for returning values, this gives
9412 a mode used to copy the register's value. VOIDmode indicates the
9413 register is not used for returning values. If the machine has
9414 register windows, this gives only the outbound registers.
9415 INCOMING_REGNO gives the corresponding inbound register. */
9416 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9418 /* For each register that may be used for calling a function, this
9419 gives the offset of that register into the block returned by
9420 __builtin_apply_args. 0 indicates that the register is not
9421 used for calling a function. */
9422 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9424 /* Return the offset of register REGNO into the block returned by
9425 __builtin_apply_args. This is not declared static, since it is
9426 needed in objc-act.c. */
9428 int
9429 apply_args_register_offset (regno)
9430 int regno;
9432 apply_args_size ();
9434 /* Arguments are always put in outgoing registers (in the argument
9435 block) if such make sense. */
9436 #ifdef OUTGOING_REGNO
9437 regno = OUTGOING_REGNO(regno);
9438 #endif
9439 return apply_args_reg_offset[regno];
9442 /* Return the size required for the block returned by __builtin_apply_args,
9443 and initialize apply_args_mode. */
9445 static int
9446 apply_args_size ()
9448 static int size = -1;
9449 int align, regno;
9450 enum machine_mode mode;
9452 /* The values computed by this function never change. */
9453 if (size < 0)
9455 /* The first value is the incoming arg-pointer. */
9456 size = GET_MODE_SIZE (Pmode);
9458 /* The second value is the structure value address unless this is
9459 passed as an "invisible" first argument. */
9460 if (struct_value_rtx)
9461 size += GET_MODE_SIZE (Pmode);
9463 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9464 if (FUNCTION_ARG_REGNO_P (regno))
9466 /* Search for the proper mode for copying this register's
9467 value. I'm not sure this is right, but it works so far. */
9468 enum machine_mode best_mode = VOIDmode;
9470 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9471 mode != VOIDmode;
9472 mode = GET_MODE_WIDER_MODE (mode))
9473 if (HARD_REGNO_MODE_OK (regno, mode)
9474 && HARD_REGNO_NREGS (regno, mode) == 1)
9475 best_mode = mode;
9477 if (best_mode == VOIDmode)
9478 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9479 mode != VOIDmode;
9480 mode = GET_MODE_WIDER_MODE (mode))
9481 if (HARD_REGNO_MODE_OK (regno, mode)
9482 && (mov_optab->handlers[(int) mode].insn_code
9483 != CODE_FOR_nothing))
9484 best_mode = mode;
9486 mode = best_mode;
9487 if (mode == VOIDmode)
9488 abort ();
9490 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9491 if (size % align != 0)
9492 size = CEIL (size, align) * align;
9493 apply_args_reg_offset[regno] = size;
9494 size += GET_MODE_SIZE (mode);
9495 apply_args_mode[regno] = mode;
9497 else
9499 apply_args_mode[regno] = VOIDmode;
9500 apply_args_reg_offset[regno] = 0;
9503 return size;
9506 /* Return the size required for the block returned by __builtin_apply,
9507 and initialize apply_result_mode. */
9509 static int
9510 apply_result_size ()
9512 static int size = -1;
9513 int align, regno;
9514 enum machine_mode mode;
9516 /* The values computed by this function never change. */
9517 if (size < 0)
9519 size = 0;
9521 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9522 if (FUNCTION_VALUE_REGNO_P (regno))
9524 /* Search for the proper mode for copying this register's
9525 value. I'm not sure this is right, but it works so far. */
9526 enum machine_mode best_mode = VOIDmode;
9528 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9529 mode != TImode;
9530 mode = GET_MODE_WIDER_MODE (mode))
9531 if (HARD_REGNO_MODE_OK (regno, mode))
9532 best_mode = mode;
9534 if (best_mode == VOIDmode)
9535 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9536 mode != VOIDmode;
9537 mode = GET_MODE_WIDER_MODE (mode))
9538 if (HARD_REGNO_MODE_OK (regno, mode)
9539 && (mov_optab->handlers[(int) mode].insn_code
9540 != CODE_FOR_nothing))
9541 best_mode = mode;
9543 mode = best_mode;
9544 if (mode == VOIDmode)
9545 abort ();
9547 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9548 if (size % align != 0)
9549 size = CEIL (size, align) * align;
9550 size += GET_MODE_SIZE (mode);
9551 apply_result_mode[regno] = mode;
9553 else
9554 apply_result_mode[regno] = VOIDmode;
9556 /* Allow targets that use untyped_call and untyped_return to override
9557 the size so that machine-specific information can be stored here. */
9558 #ifdef APPLY_RESULT_SIZE
9559 size = APPLY_RESULT_SIZE;
9560 #endif
9562 return size;
9565 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9566 /* Create a vector describing the result block RESULT. If SAVEP is true,
9567 the result block is used to save the values; otherwise it is used to
9568 restore the values. */
9570 static rtx
9571 result_vector (savep, result)
9572 int savep;
9573 rtx result;
9575 int regno, size, align, nelts;
9576 enum machine_mode mode;
9577 rtx reg, mem;
9578 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9580 size = nelts = 0;
9581 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9582 if ((mode = apply_result_mode[regno]) != VOIDmode)
9584 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9585 if (size % align != 0)
9586 size = CEIL (size, align) * align;
9587 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9588 mem = change_address (result, mode,
9589 plus_constant (XEXP (result, 0), size));
9590 savevec[nelts++] = (savep
9591 ? gen_rtx_SET (VOIDmode, mem, reg)
9592 : gen_rtx_SET (VOIDmode, reg, mem));
9593 size += GET_MODE_SIZE (mode);
9595 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9597 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9599 /* Save the state required to perform an untyped call with the same
9600 arguments as were passed to the current function. */
9602 static rtx
9603 expand_builtin_apply_args ()
9605 rtx registers;
9606 int size, align, regno;
9607 enum machine_mode mode;
9609 /* Create a block where the arg-pointer, structure value address,
9610 and argument registers can be saved. */
9611 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9613 /* Walk past the arg-pointer and structure value address. */
9614 size = GET_MODE_SIZE (Pmode);
9615 if (struct_value_rtx)
9616 size += GET_MODE_SIZE (Pmode);
9618 /* Save each register used in calling a function to the block. */
9619 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9620 if ((mode = apply_args_mode[regno]) != VOIDmode)
9622 rtx tem;
9624 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9625 if (size % align != 0)
9626 size = CEIL (size, align) * align;
9628 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9630 #ifdef STACK_REGS
9631 /* For reg-stack.c's stack register household.
9632 Compare with a similar piece of code in function.c. */
9634 emit_insn (gen_rtx_USE (mode, tem));
9635 #endif
9637 emit_move_insn (change_address (registers, mode,
9638 plus_constant (XEXP (registers, 0),
9639 size)),
9640 tem);
9641 size += GET_MODE_SIZE (mode);
9644 /* Save the arg pointer to the block. */
9645 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9646 copy_to_reg (virtual_incoming_args_rtx));
9647 size = GET_MODE_SIZE (Pmode);
9649 /* Save the structure value address unless this is passed as an
9650 "invisible" first argument. */
9651 if (struct_value_incoming_rtx)
9653 emit_move_insn (change_address (registers, Pmode,
9654 plus_constant (XEXP (registers, 0),
9655 size)),
9656 copy_to_reg (struct_value_incoming_rtx));
9657 size += GET_MODE_SIZE (Pmode);
9660 /* Return the address of the block. */
9661 return copy_addr_to_reg (XEXP (registers, 0));
9664 /* Perform an untyped call and save the state required to perform an
9665 untyped return of whatever value was returned by the given function. */
9667 static rtx
9668 expand_builtin_apply (function, arguments, argsize)
9669 rtx function, arguments, argsize;
9671 int size, align, regno;
9672 enum machine_mode mode;
9673 rtx incoming_args, result, reg, dest, call_insn;
9674 rtx old_stack_level = 0;
9675 rtx call_fusage = 0;
9677 /* Create a block where the return registers can be saved. */
9678 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9680 /* ??? The argsize value should be adjusted here. */
9682 /* Fetch the arg pointer from the ARGUMENTS block. */
9683 incoming_args = gen_reg_rtx (Pmode);
9684 emit_move_insn (incoming_args,
9685 gen_rtx_MEM (Pmode, arguments));
9686 #ifndef STACK_GROWS_DOWNWARD
9687 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9688 incoming_args, 0, OPTAB_LIB_WIDEN);
9689 #endif
9691 /* Perform postincrements before actually calling the function. */
9692 emit_queue ();
9694 /* Push a new argument block and copy the arguments. */
9695 do_pending_stack_adjust ();
9697 /* Save the stack with nonlocal if available */
9698 #ifdef HAVE_save_stack_nonlocal
9699 if (HAVE_save_stack_nonlocal)
9700 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9701 else
9702 #endif
9703 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9705 /* Push a block of memory onto the stack to store the memory arguments.
9706 Save the address in a register, and copy the memory arguments. ??? I
9707 haven't figured out how the calling convention macros effect this,
9708 but it's likely that the source and/or destination addresses in
9709 the block copy will need updating in machine specific ways. */
9710 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9711 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9712 gen_rtx_MEM (BLKmode, incoming_args),
9713 argsize,
9714 PARM_BOUNDARY / BITS_PER_UNIT);
9716 /* Refer to the argument block. */
9717 apply_args_size ();
9718 arguments = gen_rtx_MEM (BLKmode, arguments);
9720 /* Walk past the arg-pointer and structure value address. */
9721 size = GET_MODE_SIZE (Pmode);
9722 if (struct_value_rtx)
9723 size += GET_MODE_SIZE (Pmode);
9725 /* Restore each of the registers previously saved. Make USE insns
9726 for each of these registers for use in making the call. */
9727 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9728 if ((mode = apply_args_mode[regno]) != VOIDmode)
9730 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9731 if (size % align != 0)
9732 size = CEIL (size, align) * align;
9733 reg = gen_rtx_REG (mode, regno);
9734 emit_move_insn (reg,
9735 change_address (arguments, mode,
9736 plus_constant (XEXP (arguments, 0),
9737 size)));
9739 use_reg (&call_fusage, reg);
9740 size += GET_MODE_SIZE (mode);
9743 /* Restore the structure value address unless this is passed as an
9744 "invisible" first argument. */
9745 size = GET_MODE_SIZE (Pmode);
9746 if (struct_value_rtx)
9748 rtx value = gen_reg_rtx (Pmode);
9749 emit_move_insn (value,
9750 change_address (arguments, Pmode,
9751 plus_constant (XEXP (arguments, 0),
9752 size)));
9753 emit_move_insn (struct_value_rtx, value);
9754 if (GET_CODE (struct_value_rtx) == REG)
9755 use_reg (&call_fusage, struct_value_rtx);
9756 size += GET_MODE_SIZE (Pmode);
9759 /* All arguments and registers used for the call are set up by now! */
9760 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9762 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9763 and we don't want to load it into a register as an optimization,
9764 because prepare_call_address already did it if it should be done. */
9765 if (GET_CODE (function) != SYMBOL_REF)
9766 function = memory_address (FUNCTION_MODE, function);
9768 /* Generate the actual call instruction and save the return value. */
9769 #ifdef HAVE_untyped_call
9770 if (HAVE_untyped_call)
9771 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9772 result, result_vector (1, result)));
9773 else
9774 #endif
9775 #ifdef HAVE_call_value
9776 if (HAVE_call_value)
9778 rtx valreg = 0;
9780 /* Locate the unique return register. It is not possible to
9781 express a call that sets more than one return register using
9782 call_value; use untyped_call for that. In fact, untyped_call
9783 only needs to save the return registers in the given block. */
9784 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9785 if ((mode = apply_result_mode[regno]) != VOIDmode)
9787 if (valreg)
9788 abort (); /* HAVE_untyped_call required. */
9789 valreg = gen_rtx_REG (mode, regno);
9792 emit_call_insn (gen_call_value (valreg,
9793 gen_rtx_MEM (FUNCTION_MODE, function),
9794 const0_rtx, NULL_RTX, const0_rtx));
9796 emit_move_insn (change_address (result, GET_MODE (valreg),
9797 XEXP (result, 0)),
9798 valreg);
9800 else
9801 #endif
9802 abort ();
9804 /* Find the CALL insn we just emitted. */
9805 for (call_insn = get_last_insn ();
9806 call_insn && GET_CODE (call_insn) != CALL_INSN;
9807 call_insn = PREV_INSN (call_insn))
9810 if (! call_insn)
9811 abort ();
9813 /* Put the register usage information on the CALL. If there is already
9814 some usage information, put ours at the end. */
9815 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9817 rtx link;
9819 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9820 link = XEXP (link, 1))
9823 XEXP (link, 1) = call_fusage;
9825 else
9826 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9828 /* Restore the stack. */
9829 #ifdef HAVE_save_stack_nonlocal
9830 if (HAVE_save_stack_nonlocal)
9831 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9832 else
9833 #endif
9834 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9836 /* Return the address of the result block. */
9837 return copy_addr_to_reg (XEXP (result, 0));
9840 /* Perform an untyped return. */
9842 static void
9843 expand_builtin_return (result)
9844 rtx result;
9846 int size, align, regno;
9847 enum machine_mode mode;
9848 rtx reg;
9849 rtx call_fusage = 0;
9851 apply_result_size ();
9852 result = gen_rtx_MEM (BLKmode, result);
9854 #ifdef HAVE_untyped_return
9855 if (HAVE_untyped_return)
9857 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9858 emit_barrier ();
9859 return;
9861 #endif
9863 /* Restore the return value and note that each value is used. */
9864 size = 0;
9865 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9866 if ((mode = apply_result_mode[regno]) != VOIDmode)
9868 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9869 if (size % align != 0)
9870 size = CEIL (size, align) * align;
9871 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9872 emit_move_insn (reg,
9873 change_address (result, mode,
9874 plus_constant (XEXP (result, 0),
9875 size)));
9877 push_to_sequence (call_fusage);
9878 emit_insn (gen_rtx_USE (VOIDmode, reg));
9879 call_fusage = get_insns ();
9880 end_sequence ();
9881 size += GET_MODE_SIZE (mode);
9884 /* Put the USE insns before the return. */
9885 emit_insns (call_fusage);
9887 /* Return whatever values was restored by jumping directly to the end
9888 of the function. */
9889 expand_null_return ();
9892 /* Expand code for a post- or pre- increment or decrement
9893 and return the RTX for the result.
9894 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9896 static rtx
9897 expand_increment (exp, post, ignore)
9898 register tree exp;
9899 int post, ignore;
9901 register rtx op0, op1;
9902 register rtx temp, value;
9903 register tree incremented = TREE_OPERAND (exp, 0);
9904 optab this_optab = add_optab;
9905 int icode;
9906 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9907 int op0_is_copy = 0;
9908 int single_insn = 0;
9909 /* 1 means we can't store into OP0 directly,
9910 because it is a subreg narrower than a word,
9911 and we don't dare clobber the rest of the word. */
9912 int bad_subreg = 0;
9914 /* Stabilize any component ref that might need to be
9915 evaluated more than once below. */
9916 if (!post
9917 || TREE_CODE (incremented) == BIT_FIELD_REF
9918 || (TREE_CODE (incremented) == COMPONENT_REF
9919 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9920 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9921 incremented = stabilize_reference (incremented);
9922 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9923 ones into save exprs so that they don't accidentally get evaluated
9924 more than once by the code below. */
9925 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9926 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9927 incremented = save_expr (incremented);
9929 /* Compute the operands as RTX.
9930 Note whether OP0 is the actual lvalue or a copy of it:
9931 I believe it is a copy iff it is a register or subreg
9932 and insns were generated in computing it. */
9934 temp = get_last_insn ();
9935 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9937 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9938 in place but instead must do sign- or zero-extension during assignment,
9939 so we copy it into a new register and let the code below use it as
9940 a copy.
9942 Note that we can safely modify this SUBREG since it is know not to be
9943 shared (it was made by the expand_expr call above). */
9945 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9947 if (post)
9948 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9949 else
9950 bad_subreg = 1;
9952 else if (GET_CODE (op0) == SUBREG
9953 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9955 /* We cannot increment this SUBREG in place. If we are
9956 post-incrementing, get a copy of the old value. Otherwise,
9957 just mark that we cannot increment in place. */
9958 if (post)
9959 op0 = copy_to_reg (op0);
9960 else
9961 bad_subreg = 1;
9964 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9965 && temp != get_last_insn ());
9966 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9967 EXPAND_MEMORY_USE_BAD);
9969 /* Decide whether incrementing or decrementing. */
9970 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9971 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9972 this_optab = sub_optab;
9974 /* Convert decrement by a constant into a negative increment. */
9975 if (this_optab == sub_optab
9976 && GET_CODE (op1) == CONST_INT)
9978 op1 = GEN_INT (- INTVAL (op1));
9979 this_optab = add_optab;
9982 /* For a preincrement, see if we can do this with a single instruction. */
9983 if (!post)
9985 icode = (int) this_optab->handlers[(int) mode].insn_code;
9986 if (icode != (int) CODE_FOR_nothing
9987 /* Make sure that OP0 is valid for operands 0 and 1
9988 of the insn we want to queue. */
9989 && (*insn_operand_predicate[icode][0]) (op0, mode)
9990 && (*insn_operand_predicate[icode][1]) (op0, mode)
9991 && (*insn_operand_predicate[icode][2]) (op1, mode))
9992 single_insn = 1;
9995 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9996 then we cannot just increment OP0. We must therefore contrive to
9997 increment the original value. Then, for postincrement, we can return
9998 OP0 since it is a copy of the old value. For preincrement, expand here
9999 unless we can do it with a single insn.
10001 Likewise if storing directly into OP0 would clobber high bits
10002 we need to preserve (bad_subreg). */
10003 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10005 /* This is the easiest way to increment the value wherever it is.
10006 Problems with multiple evaluation of INCREMENTED are prevented
10007 because either (1) it is a component_ref or preincrement,
10008 in which case it was stabilized above, or (2) it is an array_ref
10009 with constant index in an array in a register, which is
10010 safe to reevaluate. */
10011 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10012 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10013 ? MINUS_EXPR : PLUS_EXPR),
10014 TREE_TYPE (exp),
10015 incremented,
10016 TREE_OPERAND (exp, 1));
10018 while (TREE_CODE (incremented) == NOP_EXPR
10019 || TREE_CODE (incremented) == CONVERT_EXPR)
10021 newexp = convert (TREE_TYPE (incremented), newexp);
10022 incremented = TREE_OPERAND (incremented, 0);
10025 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10026 return post ? op0 : temp;
10029 if (post)
10031 /* We have a true reference to the value in OP0.
10032 If there is an insn to add or subtract in this mode, queue it.
10033 Queueing the increment insn avoids the register shuffling
10034 that often results if we must increment now and first save
10035 the old value for subsequent use. */
10037 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10038 op0 = stabilize (op0);
10039 #endif
10041 icode = (int) this_optab->handlers[(int) mode].insn_code;
10042 if (icode != (int) CODE_FOR_nothing
10043 /* Make sure that OP0 is valid for operands 0 and 1
10044 of the insn we want to queue. */
10045 && (*insn_operand_predicate[icode][0]) (op0, mode)
10046 && (*insn_operand_predicate[icode][1]) (op0, mode))
10048 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10049 op1 = force_reg (mode, op1);
10051 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10053 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10055 rtx addr = (general_operand (XEXP (op0, 0), mode)
10056 ? force_reg (Pmode, XEXP (op0, 0))
10057 : copy_to_reg (XEXP (op0, 0)));
10058 rtx temp, result;
10060 op0 = change_address (op0, VOIDmode, addr);
10061 temp = force_reg (GET_MODE (op0), op0);
10062 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10063 op1 = force_reg (mode, op1);
10065 /* The increment queue is LIFO, thus we have to `queue'
10066 the instructions in reverse order. */
10067 enqueue_insn (op0, gen_move_insn (op0, temp));
10068 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10069 return result;
10073 /* Preincrement, or we can't increment with one simple insn. */
10074 if (post)
10075 /* Save a copy of the value before inc or dec, to return it later. */
10076 temp = value = copy_to_reg (op0);
10077 else
10078 /* Arrange to return the incremented value. */
10079 /* Copy the rtx because expand_binop will protect from the queue,
10080 and the results of that would be invalid for us to return
10081 if our caller does emit_queue before using our result. */
10082 temp = copy_rtx (value = op0);
10084 /* Increment however we can. */
10085 op1 = expand_binop (mode, this_optab, value, op1,
10086 flag_check_memory_usage ? NULL_RTX : op0,
10087 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10088 /* Make sure the value is stored into OP0. */
10089 if (op1 != op0)
10090 emit_move_insn (op0, op1);
10092 return temp;
10095 /* Expand all function calls contained within EXP, innermost ones first.
10096 But don't look within expressions that have sequence points.
10097 For each CALL_EXPR, record the rtx for its value
10098 in the CALL_EXPR_RTL field. */
10100 static void
10101 preexpand_calls (exp)
10102 tree exp;
10104 register int nops, i;
10105 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10107 if (! do_preexpand_calls)
10108 return;
10110 /* Only expressions and references can contain calls. */
10112 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10113 return;
10115 switch (TREE_CODE (exp))
10117 case CALL_EXPR:
10118 /* Do nothing if already expanded. */
10119 if (CALL_EXPR_RTL (exp) != 0
10120 /* Do nothing if the call returns a variable-sized object. */
10121 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10122 /* Do nothing to built-in functions. */
10123 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10124 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10125 == FUNCTION_DECL)
10126 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10127 return;
10129 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10130 return;
10132 case COMPOUND_EXPR:
10133 case COND_EXPR:
10134 case TRUTH_ANDIF_EXPR:
10135 case TRUTH_ORIF_EXPR:
10136 /* If we find one of these, then we can be sure
10137 the adjust will be done for it (since it makes jumps).
10138 Do it now, so that if this is inside an argument
10139 of a function, we don't get the stack adjustment
10140 after some other args have already been pushed. */
10141 do_pending_stack_adjust ();
10142 return;
10144 case BLOCK:
10145 case RTL_EXPR:
10146 case WITH_CLEANUP_EXPR:
10147 case CLEANUP_POINT_EXPR:
10148 case TRY_CATCH_EXPR:
10149 return;
10151 case SAVE_EXPR:
10152 if (SAVE_EXPR_RTL (exp) != 0)
10153 return;
10155 default:
10156 break;
10159 nops = tree_code_length[(int) TREE_CODE (exp)];
10160 for (i = 0; i < nops; i++)
10161 if (TREE_OPERAND (exp, i) != 0)
10163 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10164 if (type == 'e' || type == '<' || type == '1' || type == '2'
10165 || type == 'r')
10166 preexpand_calls (TREE_OPERAND (exp, i));
10170 /* At the start of a function, record that we have no previously-pushed
10171 arguments waiting to be popped. */
10173 void
10174 init_pending_stack_adjust ()
10176 pending_stack_adjust = 0;
10179 /* When exiting from function, if safe, clear out any pending stack adjust
10180 so the adjustment won't get done.
10182 Note, if the current function calls alloca, then it must have a
10183 frame pointer regardless of the value of flag_omit_frame_pointer. */
10185 void
10186 clear_pending_stack_adjust ()
10188 #ifdef EXIT_IGNORE_STACK
10189 if (optimize > 0
10190 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10191 && EXIT_IGNORE_STACK
10192 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10193 && ! flag_inline_functions)
10194 pending_stack_adjust = 0;
10195 #endif
10198 /* Pop any previously-pushed arguments that have not been popped yet. */
10200 void
10201 do_pending_stack_adjust ()
10203 if (inhibit_defer_pop == 0)
10205 if (pending_stack_adjust != 0)
10206 adjust_stack (GEN_INT (pending_stack_adjust));
10207 pending_stack_adjust = 0;
10211 /* Expand conditional expressions. */
10213 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10214 LABEL is an rtx of code CODE_LABEL, in this function and all the
10215 functions here. */
10217 void
10218 jumpifnot (exp, label)
10219 tree exp;
10220 rtx label;
10222 do_jump (exp, label, NULL_RTX);
10225 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10227 void
10228 jumpif (exp, label)
10229 tree exp;
10230 rtx label;
10232 do_jump (exp, NULL_RTX, label);
10235 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10236 the result is zero, or IF_TRUE_LABEL if the result is one.
10237 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10238 meaning fall through in that case.
10240 do_jump always does any pending stack adjust except when it does not
10241 actually perform a jump. An example where there is no jump
10242 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10244 This function is responsible for optimizing cases such as
10245 &&, || and comparison operators in EXP. */
10247 void
10248 do_jump (exp, if_false_label, if_true_label)
10249 tree exp;
10250 rtx if_false_label, if_true_label;
10252 register enum tree_code code = TREE_CODE (exp);
10253 /* Some cases need to create a label to jump to
10254 in order to properly fall through.
10255 These cases set DROP_THROUGH_LABEL nonzero. */
10256 rtx drop_through_label = 0;
10257 rtx temp;
10258 rtx comparison = 0;
10259 int i;
10260 tree type;
10261 enum machine_mode mode;
10263 #ifdef MAX_INTEGER_COMPUTATION_MODE
10264 check_max_integer_computation_mode (exp);
10265 #endif
10267 emit_queue ();
10269 switch (code)
10271 case ERROR_MARK:
10272 break;
10274 case INTEGER_CST:
10275 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10276 if (temp)
10277 emit_jump (temp);
10278 break;
10280 #if 0
10281 /* This is not true with #pragma weak */
10282 case ADDR_EXPR:
10283 /* The address of something can never be zero. */
10284 if (if_true_label)
10285 emit_jump (if_true_label);
10286 break;
10287 #endif
10289 case NOP_EXPR:
10290 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10291 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10292 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10293 goto normal;
10294 case CONVERT_EXPR:
10295 /* If we are narrowing the operand, we have to do the compare in the
10296 narrower mode. */
10297 if ((TYPE_PRECISION (TREE_TYPE (exp))
10298 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10299 goto normal;
10300 case NON_LVALUE_EXPR:
10301 case REFERENCE_EXPR:
10302 case ABS_EXPR:
10303 case NEGATE_EXPR:
10304 case LROTATE_EXPR:
10305 case RROTATE_EXPR:
10306 /* These cannot change zero->non-zero or vice versa. */
10307 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10308 break;
10310 #if 0
10311 /* This is never less insns than evaluating the PLUS_EXPR followed by
10312 a test and can be longer if the test is eliminated. */
10313 case PLUS_EXPR:
10314 /* Reduce to minus. */
10315 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10316 TREE_OPERAND (exp, 0),
10317 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10318 TREE_OPERAND (exp, 1))));
10319 /* Process as MINUS. */
10320 #endif
10322 case MINUS_EXPR:
10323 /* Non-zero iff operands of minus differ. */
10324 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10325 TREE_OPERAND (exp, 0),
10326 TREE_OPERAND (exp, 1)),
10327 NE, NE);
10328 break;
10330 case BIT_AND_EXPR:
10331 /* If we are AND'ing with a small constant, do this comparison in the
10332 smallest type that fits. If the machine doesn't have comparisons
10333 that small, it will be converted back to the wider comparison.
10334 This helps if we are testing the sign bit of a narrower object.
10335 combine can't do this for us because it can't know whether a
10336 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10338 if (! SLOW_BYTE_ACCESS
10339 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10340 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10341 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10342 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10343 && (type = type_for_mode (mode, 1)) != 0
10344 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10345 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10346 != CODE_FOR_nothing))
10348 do_jump (convert (type, exp), if_false_label, if_true_label);
10349 break;
10351 goto normal;
10353 case TRUTH_NOT_EXPR:
10354 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10355 break;
10357 case TRUTH_ANDIF_EXPR:
10358 if (if_false_label == 0)
10359 if_false_label = drop_through_label = gen_label_rtx ();
10360 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10361 start_cleanup_deferral ();
10362 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10363 end_cleanup_deferral ();
10364 break;
10366 case TRUTH_ORIF_EXPR:
10367 if (if_true_label == 0)
10368 if_true_label = drop_through_label = gen_label_rtx ();
10369 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10370 start_cleanup_deferral ();
10371 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10372 end_cleanup_deferral ();
10373 break;
10375 case COMPOUND_EXPR:
10376 push_temp_slots ();
10377 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10378 preserve_temp_slots (NULL_RTX);
10379 free_temp_slots ();
10380 pop_temp_slots ();
10381 emit_queue ();
10382 do_pending_stack_adjust ();
10383 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10384 break;
10386 case COMPONENT_REF:
10387 case BIT_FIELD_REF:
10388 case ARRAY_REF:
10390 int bitsize, bitpos, unsignedp;
10391 enum machine_mode mode;
10392 tree type;
10393 tree offset;
10394 int volatilep = 0;
10395 int alignment;
10397 /* Get description of this reference. We don't actually care
10398 about the underlying object here. */
10399 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10400 &mode, &unsignedp, &volatilep,
10401 &alignment);
10403 type = type_for_size (bitsize, unsignedp);
10404 if (! SLOW_BYTE_ACCESS
10405 && type != 0 && bitsize >= 0
10406 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10407 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10408 != CODE_FOR_nothing))
10410 do_jump (convert (type, exp), if_false_label, if_true_label);
10411 break;
10413 goto normal;
10416 case COND_EXPR:
10417 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10418 if (integer_onep (TREE_OPERAND (exp, 1))
10419 && integer_zerop (TREE_OPERAND (exp, 2)))
10420 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10422 else if (integer_zerop (TREE_OPERAND (exp, 1))
10423 && integer_onep (TREE_OPERAND (exp, 2)))
10424 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10426 else
10428 register rtx label1 = gen_label_rtx ();
10429 drop_through_label = gen_label_rtx ();
10431 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10433 start_cleanup_deferral ();
10434 /* Now the THEN-expression. */
10435 do_jump (TREE_OPERAND (exp, 1),
10436 if_false_label ? if_false_label : drop_through_label,
10437 if_true_label ? if_true_label : drop_through_label);
10438 /* In case the do_jump just above never jumps. */
10439 do_pending_stack_adjust ();
10440 emit_label (label1);
10442 /* Now the ELSE-expression. */
10443 do_jump (TREE_OPERAND (exp, 2),
10444 if_false_label ? if_false_label : drop_through_label,
10445 if_true_label ? if_true_label : drop_through_label);
10446 end_cleanup_deferral ();
10448 break;
10450 case EQ_EXPR:
10452 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10454 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10455 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10457 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10458 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10459 do_jump
10460 (fold
10461 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10462 fold (build (EQ_EXPR, TREE_TYPE (exp),
10463 fold (build1 (REALPART_EXPR,
10464 TREE_TYPE (inner_type),
10465 exp0)),
10466 fold (build1 (REALPART_EXPR,
10467 TREE_TYPE (inner_type),
10468 exp1)))),
10469 fold (build (EQ_EXPR, TREE_TYPE (exp),
10470 fold (build1 (IMAGPART_EXPR,
10471 TREE_TYPE (inner_type),
10472 exp0)),
10473 fold (build1 (IMAGPART_EXPR,
10474 TREE_TYPE (inner_type),
10475 exp1)))))),
10476 if_false_label, if_true_label);
10479 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10480 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10482 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10483 && !can_compare_p (TYPE_MODE (inner_type)))
10484 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10485 else
10486 comparison = compare (exp, EQ, EQ);
10487 break;
10490 case NE_EXPR:
10492 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10494 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10495 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10497 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10498 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10499 do_jump
10500 (fold
10501 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10502 fold (build (NE_EXPR, TREE_TYPE (exp),
10503 fold (build1 (REALPART_EXPR,
10504 TREE_TYPE (inner_type),
10505 exp0)),
10506 fold (build1 (REALPART_EXPR,
10507 TREE_TYPE (inner_type),
10508 exp1)))),
10509 fold (build (NE_EXPR, TREE_TYPE (exp),
10510 fold (build1 (IMAGPART_EXPR,
10511 TREE_TYPE (inner_type),
10512 exp0)),
10513 fold (build1 (IMAGPART_EXPR,
10514 TREE_TYPE (inner_type),
10515 exp1)))))),
10516 if_false_label, if_true_label);
10519 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10520 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10522 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10523 && !can_compare_p (TYPE_MODE (inner_type)))
10524 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10525 else
10526 comparison = compare (exp, NE, NE);
10527 break;
10530 case LT_EXPR:
10531 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10532 == MODE_INT)
10533 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10534 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10535 else
10536 comparison = compare (exp, LT, LTU);
10537 break;
10539 case LE_EXPR:
10540 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10541 == MODE_INT)
10542 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10543 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10544 else
10545 comparison = compare (exp, LE, LEU);
10546 break;
10548 case GT_EXPR:
10549 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10550 == MODE_INT)
10551 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10552 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10553 else
10554 comparison = compare (exp, GT, GTU);
10555 break;
10557 case GE_EXPR:
10558 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10559 == MODE_INT)
10560 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10561 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10562 else
10563 comparison = compare (exp, GE, GEU);
10564 break;
10566 default:
10567 normal:
10568 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10569 #if 0
10570 /* This is not needed any more and causes poor code since it causes
10571 comparisons and tests from non-SI objects to have different code
10572 sequences. */
10573 /* Copy to register to avoid generating bad insns by cse
10574 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10575 if (!cse_not_expected && GET_CODE (temp) == MEM)
10576 temp = copy_to_reg (temp);
10577 #endif
10578 do_pending_stack_adjust ();
10579 if (GET_CODE (temp) == CONST_INT)
10580 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10581 else if (GET_CODE (temp) == LABEL_REF)
10582 comparison = const_true_rtx;
10583 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10584 && !can_compare_p (GET_MODE (temp)))
10585 /* Note swapping the labels gives us not-equal. */
10586 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10587 else if (GET_MODE (temp) != VOIDmode)
10588 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10589 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10590 GET_MODE (temp), NULL_RTX, 0);
10591 else
10592 abort ();
10595 /* Do any postincrements in the expression that was tested. */
10596 emit_queue ();
10598 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10599 straight into a conditional jump instruction as the jump condition.
10600 Otherwise, all the work has been done already. */
10602 if (comparison == const_true_rtx)
10604 if (if_true_label)
10605 emit_jump (if_true_label);
10607 else if (comparison == const0_rtx)
10609 if (if_false_label)
10610 emit_jump (if_false_label);
10612 else if (comparison)
10613 do_jump_for_compare (comparison, if_false_label, if_true_label);
10615 if (drop_through_label)
10617 /* If do_jump produces code that might be jumped around,
10618 do any stack adjusts from that code, before the place
10619 where control merges in. */
10620 do_pending_stack_adjust ();
10621 emit_label (drop_through_label);
10625 /* Given a comparison expression EXP for values too wide to be compared
10626 with one insn, test the comparison and jump to the appropriate label.
10627 The code of EXP is ignored; we always test GT if SWAP is 0,
10628 and LT if SWAP is 1. */
10630 static void
10631 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10632 tree exp;
10633 int swap;
10634 rtx if_false_label, if_true_label;
10636 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10637 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10638 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10639 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10640 rtx drop_through_label = 0;
10641 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10642 int i;
10644 if (! if_true_label || ! if_false_label)
10645 drop_through_label = gen_label_rtx ();
10646 if (! if_true_label)
10647 if_true_label = drop_through_label;
10648 if (! if_false_label)
10649 if_false_label = drop_through_label;
10651 /* Compare a word at a time, high order first. */
10652 for (i = 0; i < nwords; i++)
10654 rtx comp;
10655 rtx op0_word, op1_word;
10657 if (WORDS_BIG_ENDIAN)
10659 op0_word = operand_subword_force (op0, i, mode);
10660 op1_word = operand_subword_force (op1, i, mode);
10662 else
10664 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10665 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10668 /* All but high-order word must be compared as unsigned. */
10669 comp = compare_from_rtx (op0_word, op1_word,
10670 (unsignedp || i > 0) ? GTU : GT,
10671 unsignedp, word_mode, NULL_RTX, 0);
10672 if (comp == const_true_rtx)
10673 emit_jump (if_true_label);
10674 else if (comp != const0_rtx)
10675 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10677 /* Consider lower words only if these are equal. */
10678 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10679 NULL_RTX, 0);
10680 if (comp == const_true_rtx)
10681 emit_jump (if_false_label);
10682 else if (comp != const0_rtx)
10683 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10686 if (if_false_label)
10687 emit_jump (if_false_label);
10688 if (drop_through_label)
10689 emit_label (drop_through_label);
10692 /* Compare OP0 with OP1, word at a time, in mode MODE.
10693 UNSIGNEDP says to do unsigned comparison.
10694 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10696 void
10697 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10698 enum machine_mode mode;
10699 int unsignedp;
10700 rtx op0, op1;
10701 rtx if_false_label, if_true_label;
10703 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10704 rtx drop_through_label = 0;
10705 int i;
10707 if (! if_true_label || ! if_false_label)
10708 drop_through_label = gen_label_rtx ();
10709 if (! if_true_label)
10710 if_true_label = drop_through_label;
10711 if (! if_false_label)
10712 if_false_label = drop_through_label;
10714 /* Compare a word at a time, high order first. */
10715 for (i = 0; i < nwords; i++)
10717 rtx comp;
10718 rtx op0_word, op1_word;
10720 if (WORDS_BIG_ENDIAN)
10722 op0_word = operand_subword_force (op0, i, mode);
10723 op1_word = operand_subword_force (op1, i, mode);
10725 else
10727 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10728 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10731 /* All but high-order word must be compared as unsigned. */
10732 comp = compare_from_rtx (op0_word, op1_word,
10733 (unsignedp || i > 0) ? GTU : GT,
10734 unsignedp, word_mode, NULL_RTX, 0);
10735 if (comp == const_true_rtx)
10736 emit_jump (if_true_label);
10737 else if (comp != const0_rtx)
10738 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10740 /* Consider lower words only if these are equal. */
10741 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10742 NULL_RTX, 0);
10743 if (comp == const_true_rtx)
10744 emit_jump (if_false_label);
10745 else if (comp != const0_rtx)
10746 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10749 if (if_false_label)
10750 emit_jump (if_false_label);
10751 if (drop_through_label)
10752 emit_label (drop_through_label);
10755 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10756 with one insn, test the comparison and jump to the appropriate label. */
10758 static void
10759 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10760 tree exp;
10761 rtx if_false_label, if_true_label;
10763 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10764 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10765 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10766 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10767 int i;
10768 rtx drop_through_label = 0;
10770 if (! if_false_label)
10771 drop_through_label = if_false_label = gen_label_rtx ();
10773 for (i = 0; i < nwords; i++)
10775 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10776 operand_subword_force (op1, i, mode),
10777 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10778 word_mode, NULL_RTX, 0);
10779 if (comp == const_true_rtx)
10780 emit_jump (if_false_label);
10781 else if (comp != const0_rtx)
10782 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10785 if (if_true_label)
10786 emit_jump (if_true_label);
10787 if (drop_through_label)
10788 emit_label (drop_through_label);
10791 /* Jump according to whether OP0 is 0.
10792 We assume that OP0 has an integer mode that is too wide
10793 for the available compare insns. */
10795 void
10796 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10797 rtx op0;
10798 rtx if_false_label, if_true_label;
10800 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10801 rtx part;
10802 int i;
10803 rtx drop_through_label = 0;
10805 /* The fastest way of doing this comparison on almost any machine is to
10806 "or" all the words and compare the result. If all have to be loaded
10807 from memory and this is a very wide item, it's possible this may
10808 be slower, but that's highly unlikely. */
10810 part = gen_reg_rtx (word_mode);
10811 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10812 for (i = 1; i < nwords && part != 0; i++)
10813 part = expand_binop (word_mode, ior_optab, part,
10814 operand_subword_force (op0, i, GET_MODE (op0)),
10815 part, 1, OPTAB_WIDEN);
10817 if (part != 0)
10819 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10820 NULL_RTX, 0);
10822 if (comp == const_true_rtx)
10823 emit_jump (if_false_label);
10824 else if (comp == const0_rtx)
10825 emit_jump (if_true_label);
10826 else
10827 do_jump_for_compare (comp, if_false_label, if_true_label);
10829 return;
10832 /* If we couldn't do the "or" simply, do this with a series of compares. */
10833 if (! if_false_label)
10834 drop_through_label = if_false_label = gen_label_rtx ();
10836 for (i = 0; i < nwords; i++)
10838 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10839 GET_MODE (op0)),
10840 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10841 if (comp == const_true_rtx)
10842 emit_jump (if_false_label);
10843 else if (comp != const0_rtx)
10844 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10847 if (if_true_label)
10848 emit_jump (if_true_label);
10850 if (drop_through_label)
10851 emit_label (drop_through_label);
10854 /* Given a comparison expression in rtl form, output conditional branches to
10855 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10857 static void
10858 do_jump_for_compare (comparison, if_false_label, if_true_label)
10859 rtx comparison, if_false_label, if_true_label;
10861 if (if_true_label)
10863 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10864 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10865 else
10866 abort ();
10868 if (if_false_label)
10869 emit_jump (if_false_label);
10871 else if (if_false_label)
10873 rtx insn;
10874 rtx prev = get_last_insn ();
10875 rtx branch = 0;
10877 /* Output the branch with the opposite condition. Then try to invert
10878 what is generated. If more than one insn is a branch, or if the
10879 branch is not the last insn written, abort. If we can't invert
10880 the branch, emit make a true label, redirect this jump to that,
10881 emit a jump to the false label and define the true label. */
10883 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10884 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10885 else
10886 abort ();
10888 /* Here we get the first insn that was just emitted. It used to be the
10889 case that, on some machines, emitting the branch would discard
10890 the previous compare insn and emit a replacement. This isn't
10891 done anymore, but abort if we see that PREV is deleted. */
10893 if (prev == 0)
10894 insn = get_insns ();
10895 else if (INSN_DELETED_P (prev))
10896 abort ();
10897 else
10898 insn = NEXT_INSN (prev);
10900 for (; insn; insn = NEXT_INSN (insn))
10901 if (GET_CODE (insn) == JUMP_INSN)
10903 if (branch)
10904 abort ();
10905 branch = insn;
10908 if (branch != get_last_insn ())
10909 abort ();
10911 JUMP_LABEL (branch) = if_false_label;
10912 if (! invert_jump (branch, if_false_label))
10914 if_true_label = gen_label_rtx ();
10915 redirect_jump (branch, if_true_label);
10916 emit_jump (if_false_label);
10917 emit_label (if_true_label);
10922 /* Generate code for a comparison expression EXP
10923 (including code to compute the values to be compared)
10924 and set (CC0) according to the result.
10925 SIGNED_CODE should be the rtx operation for this comparison for
10926 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10928 We force a stack adjustment unless there are currently
10929 things pushed on the stack that aren't yet used. */
10931 static rtx
10932 compare (exp, signed_code, unsigned_code)
10933 register tree exp;
10934 enum rtx_code signed_code, unsigned_code;
10936 register rtx op0
10937 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10938 register rtx op1
10939 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10940 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10941 register enum machine_mode mode = TYPE_MODE (type);
10942 int unsignedp = TREE_UNSIGNED (type);
10943 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10945 #ifdef HAVE_canonicalize_funcptr_for_compare
10946 /* If function pointers need to be "canonicalized" before they can
10947 be reliably compared, then canonicalize them. */
10948 if (HAVE_canonicalize_funcptr_for_compare
10949 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10950 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10951 == FUNCTION_TYPE))
10953 rtx new_op0 = gen_reg_rtx (mode);
10955 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10956 op0 = new_op0;
10959 if (HAVE_canonicalize_funcptr_for_compare
10960 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10961 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10962 == FUNCTION_TYPE))
10964 rtx new_op1 = gen_reg_rtx (mode);
10966 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10967 op1 = new_op1;
10969 #endif
10971 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10972 ((mode == BLKmode)
10973 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10974 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10977 /* Like compare but expects the values to compare as two rtx's.
10978 The decision as to signed or unsigned comparison must be made by the caller.
10980 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10981 compared.
10983 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10984 size of MODE should be used. */
10987 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10988 register rtx op0, op1;
10989 enum rtx_code code;
10990 int unsignedp;
10991 enum machine_mode mode;
10992 rtx size;
10993 int align;
10995 rtx tem;
10997 /* If one operand is constant, make it the second one. Only do this
10998 if the other operand is not constant as well. */
11000 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11001 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11003 tem = op0;
11004 op0 = op1;
11005 op1 = tem;
11006 code = swap_condition (code);
11009 if (flag_force_mem)
11011 op0 = force_not_mem (op0);
11012 op1 = force_not_mem (op1);
11015 do_pending_stack_adjust ();
11017 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11018 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11019 return tem;
11021 #if 0
11022 /* There's no need to do this now that combine.c can eliminate lots of
11023 sign extensions. This can be less efficient in certain cases on other
11024 machines. */
11026 /* If this is a signed equality comparison, we can do it as an
11027 unsigned comparison since zero-extension is cheaper than sign
11028 extension and comparisons with zero are done as unsigned. This is
11029 the case even on machines that can do fast sign extension, since
11030 zero-extension is easier to combine with other operations than
11031 sign-extension is. If we are comparing against a constant, we must
11032 convert it to what it would look like unsigned. */
11033 if ((code == EQ || code == NE) && ! unsignedp
11034 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11036 if (GET_CODE (op1) == CONST_INT
11037 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11038 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11039 unsignedp = 1;
11041 #endif
11043 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11045 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11048 /* Generate code to calculate EXP using a store-flag instruction
11049 and return an rtx for the result. EXP is either a comparison
11050 or a TRUTH_NOT_EXPR whose operand is a comparison.
11052 If TARGET is nonzero, store the result there if convenient.
11054 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11055 cheap.
11057 Return zero if there is no suitable set-flag instruction
11058 available on this machine.
11060 Once expand_expr has been called on the arguments of the comparison,
11061 we are committed to doing the store flag, since it is not safe to
11062 re-evaluate the expression. We emit the store-flag insn by calling
11063 emit_store_flag, but only expand the arguments if we have a reason
11064 to believe that emit_store_flag will be successful. If we think that
11065 it will, but it isn't, we have to simulate the store-flag with a
11066 set/jump/set sequence. */
11068 static rtx
11069 do_store_flag (exp, target, mode, only_cheap)
11070 tree exp;
11071 rtx target;
11072 enum machine_mode mode;
11073 int only_cheap;
11075 enum rtx_code code;
11076 tree arg0, arg1, type;
11077 tree tem;
11078 enum machine_mode operand_mode;
11079 int invert = 0;
11080 int unsignedp;
11081 rtx op0, op1;
11082 enum insn_code icode;
11083 rtx subtarget = target;
11084 rtx result, label;
11086 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11087 result at the end. We can't simply invert the test since it would
11088 have already been inverted if it were valid. This case occurs for
11089 some floating-point comparisons. */
11091 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11092 invert = 1, exp = TREE_OPERAND (exp, 0);
11094 arg0 = TREE_OPERAND (exp, 0);
11095 arg1 = TREE_OPERAND (exp, 1);
11096 type = TREE_TYPE (arg0);
11097 operand_mode = TYPE_MODE (type);
11098 unsignedp = TREE_UNSIGNED (type);
11100 /* We won't bother with BLKmode store-flag operations because it would mean
11101 passing a lot of information to emit_store_flag. */
11102 if (operand_mode == BLKmode)
11103 return 0;
11105 /* We won't bother with store-flag operations involving function pointers
11106 when function pointers must be canonicalized before comparisons. */
11107 #ifdef HAVE_canonicalize_funcptr_for_compare
11108 if (HAVE_canonicalize_funcptr_for_compare
11109 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11110 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11111 == FUNCTION_TYPE))
11112 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11113 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11114 == FUNCTION_TYPE))))
11115 return 0;
11116 #endif
11118 STRIP_NOPS (arg0);
11119 STRIP_NOPS (arg1);
11121 /* Get the rtx comparison code to use. We know that EXP is a comparison
11122 operation of some type. Some comparisons against 1 and -1 can be
11123 converted to comparisons with zero. Do so here so that the tests
11124 below will be aware that we have a comparison with zero. These
11125 tests will not catch constants in the first operand, but constants
11126 are rarely passed as the first operand. */
11128 switch (TREE_CODE (exp))
11130 case EQ_EXPR:
11131 code = EQ;
11132 break;
11133 case NE_EXPR:
11134 code = NE;
11135 break;
11136 case LT_EXPR:
11137 if (integer_onep (arg1))
11138 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11139 else
11140 code = unsignedp ? LTU : LT;
11141 break;
11142 case LE_EXPR:
11143 if (! unsignedp && integer_all_onesp (arg1))
11144 arg1 = integer_zero_node, code = LT;
11145 else
11146 code = unsignedp ? LEU : LE;
11147 break;
11148 case GT_EXPR:
11149 if (! unsignedp && integer_all_onesp (arg1))
11150 arg1 = integer_zero_node, code = GE;
11151 else
11152 code = unsignedp ? GTU : GT;
11153 break;
11154 case GE_EXPR:
11155 if (integer_onep (arg1))
11156 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11157 else
11158 code = unsignedp ? GEU : GE;
11159 break;
11160 default:
11161 abort ();
11164 /* Put a constant second. */
11165 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11167 tem = arg0; arg0 = arg1; arg1 = tem;
11168 code = swap_condition (code);
11171 /* If this is an equality or inequality test of a single bit, we can
11172 do this by shifting the bit being tested to the low-order bit and
11173 masking the result with the constant 1. If the condition was EQ,
11174 we xor it with 1. This does not require an scc insn and is faster
11175 than an scc insn even if we have it. */
11177 if ((code == NE || code == EQ)
11178 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11179 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11181 tree inner = TREE_OPERAND (arg0, 0);
11182 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11183 int ops_unsignedp;
11185 /* If INNER is a right shift of a constant and it plus BITNUM does
11186 not overflow, adjust BITNUM and INNER. */
11188 if (TREE_CODE (inner) == RSHIFT_EXPR
11189 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11190 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11191 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11192 < TYPE_PRECISION (type)))
11194 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11195 inner = TREE_OPERAND (inner, 0);
11198 /* If we are going to be able to omit the AND below, we must do our
11199 operations as unsigned. If we must use the AND, we have a choice.
11200 Normally unsigned is faster, but for some machines signed is. */
11201 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11202 #ifdef LOAD_EXTEND_OP
11203 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11204 #else
11206 #endif
11209 if (subtarget == 0 || GET_CODE (subtarget) != REG
11210 || GET_MODE (subtarget) != operand_mode
11211 || ! safe_from_p (subtarget, inner, 1))
11212 subtarget = 0;
11214 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11216 if (bitnum != 0)
11217 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11218 size_int (bitnum), subtarget, ops_unsignedp);
11220 if (GET_MODE (op0) != mode)
11221 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11223 if ((code == EQ && ! invert) || (code == NE && invert))
11224 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11225 ops_unsignedp, OPTAB_LIB_WIDEN);
11227 /* Put the AND last so it can combine with more things. */
11228 if (bitnum != TYPE_PRECISION (type) - 1)
11229 op0 = expand_and (op0, const1_rtx, subtarget);
11231 return op0;
11234 /* Now see if we are likely to be able to do this. Return if not. */
11235 if (! can_compare_p (operand_mode))
11236 return 0;
11237 icode = setcc_gen_code[(int) code];
11238 if (icode == CODE_FOR_nothing
11239 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11241 /* We can only do this if it is one of the special cases that
11242 can be handled without an scc insn. */
11243 if ((code == LT && integer_zerop (arg1))
11244 || (! only_cheap && code == GE && integer_zerop (arg1)))
11246 else if (BRANCH_COST >= 0
11247 && ! only_cheap && (code == NE || code == EQ)
11248 && TREE_CODE (type) != REAL_TYPE
11249 && ((abs_optab->handlers[(int) operand_mode].insn_code
11250 != CODE_FOR_nothing)
11251 || (ffs_optab->handlers[(int) operand_mode].insn_code
11252 != CODE_FOR_nothing)))
11254 else
11255 return 0;
11258 preexpand_calls (exp);
11259 if (subtarget == 0 || GET_CODE (subtarget) != REG
11260 || GET_MODE (subtarget) != operand_mode
11261 || ! safe_from_p (subtarget, arg1, 1))
11262 subtarget = 0;
11264 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11265 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11267 if (target == 0)
11268 target = gen_reg_rtx (mode);
11270 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11271 because, if the emit_store_flag does anything it will succeed and
11272 OP0 and OP1 will not be used subsequently. */
11274 result = emit_store_flag (target, code,
11275 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11276 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11277 operand_mode, unsignedp, 1);
11279 if (result)
11281 if (invert)
11282 result = expand_binop (mode, xor_optab, result, const1_rtx,
11283 result, 0, OPTAB_LIB_WIDEN);
11284 return result;
11287 /* If this failed, we have to do this with set/compare/jump/set code. */
11288 if (GET_CODE (target) != REG
11289 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11290 target = gen_reg_rtx (GET_MODE (target));
11292 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11293 result = compare_from_rtx (op0, op1, code, unsignedp,
11294 operand_mode, NULL_RTX, 0);
11295 if (GET_CODE (result) == CONST_INT)
11296 return (((result == const0_rtx && ! invert)
11297 || (result != const0_rtx && invert))
11298 ? const0_rtx : const1_rtx);
11300 label = gen_label_rtx ();
11301 if (bcc_gen_fctn[(int) code] == 0)
11302 abort ();
11304 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11305 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11306 emit_label (label);
11308 return target;
11311 /* Generate a tablejump instruction (used for switch statements). */
11313 #ifdef HAVE_tablejump
11315 /* INDEX is the value being switched on, with the lowest value
11316 in the table already subtracted.
11317 MODE is its expected mode (needed if INDEX is constant).
11318 RANGE is the length of the jump table.
11319 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11321 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11322 index value is out of range. */
11324 void
11325 do_tablejump (index, mode, range, table_label, default_label)
11326 rtx index, range, table_label, default_label;
11327 enum machine_mode mode;
11329 register rtx temp, vector;
11331 /* Do an unsigned comparison (in the proper mode) between the index
11332 expression and the value which represents the length of the range.
11333 Since we just finished subtracting the lower bound of the range
11334 from the index expression, this comparison allows us to simultaneously
11335 check that the original index expression value is both greater than
11336 or equal to the minimum value of the range and less than or equal to
11337 the maximum value of the range. */
11339 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11340 emit_jump_insn (gen_bgtu (default_label));
11342 /* If index is in range, it must fit in Pmode.
11343 Convert to Pmode so we can index with it. */
11344 if (mode != Pmode)
11345 index = convert_to_mode (Pmode, index, 1);
11347 /* Don't let a MEM slip thru, because then INDEX that comes
11348 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11349 and break_out_memory_refs will go to work on it and mess it up. */
11350 #ifdef PIC_CASE_VECTOR_ADDRESS
11351 if (flag_pic && GET_CODE (index) != REG)
11352 index = copy_to_mode_reg (Pmode, index);
11353 #endif
11355 /* If flag_force_addr were to affect this address
11356 it could interfere with the tricky assumptions made
11357 about addresses that contain label-refs,
11358 which may be valid only very near the tablejump itself. */
11359 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11360 GET_MODE_SIZE, because this indicates how large insns are. The other
11361 uses should all be Pmode, because they are addresses. This code
11362 could fail if addresses and insns are not the same size. */
11363 index = gen_rtx_PLUS (Pmode,
11364 gen_rtx_MULT (Pmode, index,
11365 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11366 gen_rtx_LABEL_REF (Pmode, table_label));
11367 #ifdef PIC_CASE_VECTOR_ADDRESS
11368 if (flag_pic)
11369 index = PIC_CASE_VECTOR_ADDRESS (index);
11370 else
11371 #endif
11372 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11373 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11374 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11375 RTX_UNCHANGING_P (vector) = 1;
11376 convert_move (temp, vector, 0);
11378 emit_jump_insn (gen_tablejump (temp, table_label));
11380 /* If we are generating PIC code or if the table is PC-relative, the
11381 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11382 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11383 emit_barrier ();
11386 #endif /* HAVE_tablejump */