Do not do src->dest copy if register would not be allocated a normal register
[official-gcc.git] / gcc / expr.c
blob657737f63e0dfa39dad5cc55eb37ed5083ea5e03
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
52 #ifdef PUSH_ROUNDING
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
58 #endif
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
84 /* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87 int do_preexpand_calls = 1;
89 /* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91 int pending_stack_adjust;
93 /* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97 int inhibit_defer_pop;
99 /* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
101 returned. */
102 static rtx saveregs_value;
104 /* Similarly for __builtin_apply_args. */
105 static rtx apply_args_value;
107 /* Don't check memory usage, since code is being emitted to check a memory
108 usage. Used when flag_check_memory_usage is true, to avoid infinite
109 recursion. */
110 static int in_check_memory_usage;
112 /* This structure is used by move_by_pieces to describe the move to
113 be performed. */
114 struct move_by_pieces
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 int to_struct;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 int from_struct;
126 int len;
127 int offset;
128 int reverse;
131 /* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
134 struct clear_by_pieces
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
146 extern struct obstack permanent_obstack;
147 extern rtx arg_pointer_save_area;
149 static rtx get_push_address PROTO ((int));
151 static rtx enqueue_insn PROTO((rtx, rtx));
152 static int queued_subexp_p PROTO((rtx));
153 static void init_queue PROTO((void));
154 static void move_by_pieces PROTO((rtx, rtx, int, int));
155 static int move_by_pieces_ninsns PROTO((unsigned int, int));
156 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
157 struct move_by_pieces *));
158 static void clear_by_pieces PROTO((rtx, int, int));
159 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
160 struct clear_by_pieces *));
161 static int is_zeros_p PROTO((tree));
162 static int mostly_zeros_p PROTO((tree));
163 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
164 tree, tree, int));
165 static void store_constructor PROTO((tree, rtx, int));
166 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
167 enum machine_mode, int, int, int));
168 static enum memory_use_mode
169 get_memory_usage_from_modifier PROTO((enum expand_modifier));
170 static tree save_noncopied_parts PROTO((tree, tree));
171 static tree init_noncopied_parts PROTO((tree, tree));
172 static int safe_from_p PROTO((rtx, tree, int));
173 static int fixed_type_p PROTO((tree));
174 static rtx var_rtx PROTO((tree));
175 static int get_pointer_alignment PROTO((tree, unsigned));
176 static tree string_constant PROTO((tree, tree *));
177 static tree c_strlen PROTO((tree));
178 static rtx expand_builtin PROTO((tree, rtx, rtx,
179 enum machine_mode, int));
180 static int apply_args_size PROTO((void));
181 static int apply_result_size PROTO((void));
182 static rtx result_vector PROTO((int, rtx));
183 static rtx expand_builtin_apply_args PROTO((void));
184 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
185 static void expand_builtin_return PROTO((rtx));
186 static rtx expand_increment PROTO((tree, int, int));
187 static void preexpand_calls PROTO((tree));
188 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
189 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
190 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
191 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
192 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
193 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
194 extern tree truthvalue_conversion PROTO((tree));
196 /* Record for each mode whether we can move a register directly to or
197 from an object of that mode in memory. If we can't, we won't try
198 to use that mode directly when accessing a field of that mode. */
200 static char direct_load[NUM_MACHINE_MODES];
201 static char direct_store[NUM_MACHINE_MODES];
203 /* MOVE_RATIO is the number of move instructions that is better than
204 a block move. */
206 #ifndef MOVE_RATIO
207 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
208 #define MOVE_RATIO 2
209 #else
210 /* If we are optimizing for space (-Os), cut down the default move ratio */
211 #define MOVE_RATIO (optimize_size ? 3 : 15)
212 #endif
213 #endif
215 /* This array records the insn_code of insns to perform block moves. */
216 enum insn_code movstr_optab[NUM_MACHINE_MODES];
218 /* This array records the insn_code of insns to perform block clears. */
219 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
221 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
223 #ifndef SLOW_UNALIGNED_ACCESS
224 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
225 #endif
227 /* Register mappings for target machines without register windows. */
228 #ifndef INCOMING_REGNO
229 #define INCOMING_REGNO(OUT) (OUT)
230 #endif
231 #ifndef OUTGOING_REGNO
232 #define OUTGOING_REGNO(IN) (IN)
233 #endif
235 /* This is run once per compilation to set up which modes can be used
236 directly in memory and to initialize the block move optab. */
238 void
239 init_expr_once ()
241 rtx insn, pat;
242 enum machine_mode mode;
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
249 start_sequence ();
250 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
251 pat = PATTERN (insn);
253 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
254 mode = (enum machine_mode) ((int) mode + 1))
256 int regno;
257 rtx reg;
258 int num_clobbers;
260 direct_load[(int) mode] = direct_store[(int) mode] = 0;
261 PUT_MODE (mem, mode);
262 PUT_MODE (mem1, mode);
264 /* See if there is some register that can be used in this mode and
265 directly loaded or stored from memory. */
267 if (mode != VOIDmode && mode != BLKmode)
268 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
269 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
270 regno++)
272 if (! HARD_REGNO_MODE_OK (regno, mode))
273 continue;
275 reg = gen_rtx_REG (mode, regno);
277 SET_SRC (pat) = mem;
278 SET_DEST (pat) = reg;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_load[(int) mode] = 1;
282 SET_SRC (pat) = mem1;
283 SET_DEST (pat) = reg;
284 if (recog (pat, insn, &num_clobbers) >= 0)
285 direct_load[(int) mode] = 1;
287 SET_SRC (pat) = reg;
288 SET_DEST (pat) = mem;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_store[(int) mode] = 1;
292 SET_SRC (pat) = reg;
293 SET_DEST (pat) = mem1;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_store[(int) mode] = 1;
299 end_sequence ();
302 /* This is run at the start of compiling a function. */
304 void
305 init_expr ()
307 init_queue ();
309 pending_stack_adjust = 0;
310 inhibit_defer_pop = 0;
311 saveregs_value = 0;
312 apply_args_value = 0;
313 forced_labels = 0;
316 /* Save all variables describing the current status into the structure *P.
317 This is used before starting a nested function. */
319 void
320 save_expr_status (p)
321 struct function *p;
323 /* Instead of saving the postincrement queue, empty it. */
324 emit_queue ();
326 p->pending_stack_adjust = pending_stack_adjust;
327 p->inhibit_defer_pop = inhibit_defer_pop;
328 p->saveregs_value = saveregs_value;
329 p->apply_args_value = apply_args_value;
330 p->forced_labels = forced_labels;
332 pending_stack_adjust = 0;
333 inhibit_defer_pop = 0;
334 saveregs_value = 0;
335 apply_args_value = 0;
336 forced_labels = 0;
339 /* Restore all variables describing the current status from the structure *P.
340 This is used after a nested function. */
342 void
343 restore_expr_status (p)
344 struct function *p;
346 pending_stack_adjust = p->pending_stack_adjust;
347 inhibit_defer_pop = p->inhibit_defer_pop;
348 saveregs_value = p->saveregs_value;
349 apply_args_value = p->apply_args_value;
350 forced_labels = p->forced_labels;
353 /* Manage the queue of increment instructions to be output
354 for POSTINCREMENT_EXPR expressions, etc. */
356 static rtx pending_chain;
358 /* Queue up to increment (or change) VAR later. BODY says how:
359 BODY should be the same thing you would pass to emit_insn
360 to increment right away. It will go to emit_insn later on.
362 The value is a QUEUED expression to be used in place of VAR
363 where you want to guarantee the pre-incrementation value of VAR. */
365 static rtx
366 enqueue_insn (var, body)
367 rtx var, body;
369 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
370 var, NULL_RTX, NULL_RTX, body,
371 pending_chain);
372 return pending_chain;
375 /* Use protect_from_queue to convert a QUEUED expression
376 into something that you can put immediately into an instruction.
377 If the queued incrementation has not happened yet,
378 protect_from_queue returns the variable itself.
379 If the incrementation has happened, protect_from_queue returns a temp
380 that contains a copy of the old value of the variable.
382 Any time an rtx which might possibly be a QUEUED is to be put
383 into an instruction, it must be passed through protect_from_queue first.
384 QUEUED expressions are not meaningful in instructions.
386 Do not pass a value through protect_from_queue and then hold
387 on to it for a while before putting it in an instruction!
388 If the queue is flushed in between, incorrect code will result. */
391 protect_from_queue (x, modify)
392 register rtx x;
393 int modify;
395 register RTX_CODE code = GET_CODE (x);
397 #if 0 /* A QUEUED can hang around after the queue is forced out. */
398 /* Shortcut for most common case. */
399 if (pending_chain == 0)
400 return x;
401 #endif
403 if (code != QUEUED)
405 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
406 use of autoincrement. Make a copy of the contents of the memory
407 location rather than a copy of the address, but not if the value is
408 of mode BLKmode. Don't modify X in place since it might be
409 shared. */
410 if (code == MEM && GET_MODE (x) != BLKmode
411 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
413 register rtx y = XEXP (x, 0);
414 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
416 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
417 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
418 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
420 if (QUEUED_INSN (y))
422 register rtx temp = gen_reg_rtx (GET_MODE (new));
423 emit_insn_before (gen_move_insn (temp, new),
424 QUEUED_INSN (y));
425 return temp;
427 return new;
429 /* Otherwise, recursively protect the subexpressions of all
430 the kinds of rtx's that can contain a QUEUED. */
431 if (code == MEM)
433 rtx tem = protect_from_queue (XEXP (x, 0), 0);
434 if (tem != XEXP (x, 0))
436 x = copy_rtx (x);
437 XEXP (x, 0) = tem;
440 else if (code == PLUS || code == MULT)
442 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
443 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
444 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
446 x = copy_rtx (x);
447 XEXP (x, 0) = new0;
448 XEXP (x, 1) = new1;
451 return x;
453 /* If the increment has not happened, use the variable itself. */
454 if (QUEUED_INSN (x) == 0)
455 return QUEUED_VAR (x);
456 /* If the increment has happened and a pre-increment copy exists,
457 use that copy. */
458 if (QUEUED_COPY (x) != 0)
459 return QUEUED_COPY (x);
460 /* The increment has happened but we haven't set up a pre-increment copy.
461 Set one up now, and use it. */
462 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
463 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
464 QUEUED_INSN (x));
465 return QUEUED_COPY (x);
468 /* Return nonzero if X contains a QUEUED expression:
469 if it contains anything that will be altered by a queued increment.
470 We handle only combinations of MEM, PLUS, MINUS and MULT operators
471 since memory addresses generally contain only those. */
473 static int
474 queued_subexp_p (x)
475 rtx x;
477 register enum rtx_code code = GET_CODE (x);
478 switch (code)
480 case QUEUED:
481 return 1;
482 case MEM:
483 return queued_subexp_p (XEXP (x, 0));
484 case MULT:
485 case PLUS:
486 case MINUS:
487 return (queued_subexp_p (XEXP (x, 0))
488 || queued_subexp_p (XEXP (x, 1)));
489 default:
490 return 0;
494 /* Perform all the pending incrementations. */
496 void
497 emit_queue ()
499 register rtx p;
500 while ((p = pending_chain))
502 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
503 pending_chain = QUEUED_NEXT (p);
507 static void
508 init_queue ()
510 if (pending_chain)
511 abort ();
514 /* Copy data from FROM to TO, where the machine modes are not the same.
515 Both modes may be integer, or both may be floating.
516 UNSIGNEDP should be nonzero if FROM is an unsigned type.
517 This causes zero-extension instead of sign-extension. */
519 void
520 convert_move (to, from, unsignedp)
521 register rtx to, from;
522 int unsignedp;
524 enum machine_mode to_mode = GET_MODE (to);
525 enum machine_mode from_mode = GET_MODE (from);
526 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
527 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
528 enum insn_code code;
529 rtx libcall;
531 /* rtx code for making an equivalent value. */
532 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
534 to = protect_from_queue (to, 1);
535 from = protect_from_queue (from, 0);
537 if (to_real != from_real)
538 abort ();
540 /* If FROM is a SUBREG that indicates that we have already done at least
541 the required extension, strip it. We don't handle such SUBREGs as
542 TO here. */
544 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
545 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
546 >= GET_MODE_SIZE (to_mode))
547 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
548 from = gen_lowpart (to_mode, from), from_mode = to_mode;
550 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
551 abort ();
553 if (to_mode == from_mode
554 || (from_mode == VOIDmode && CONSTANT_P (from)))
556 emit_move_insn (to, from);
557 return;
560 if (to_real)
562 rtx value;
564 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
566 /* Try converting directly if the insn is supported. */
567 if ((code = can_extend_p (to_mode, from_mode, 0))
568 != CODE_FOR_nothing)
570 emit_unop_insn (code, to, from, UNKNOWN);
571 return;
575 #ifdef HAVE_trunchfqf2
576 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
579 return;
581 #endif
582 #ifdef HAVE_trunctqfqf2
583 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
586 return;
588 #endif
589 #ifdef HAVE_truncsfqf2
590 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
593 return;
595 #endif
596 #ifdef HAVE_truncdfqf2
597 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
599 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
600 return;
602 #endif
603 #ifdef HAVE_truncxfqf2
604 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
606 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
607 return;
609 #endif
610 #ifdef HAVE_trunctfqf2
611 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
613 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
614 return;
616 #endif
618 #ifdef HAVE_trunctqfhf2
619 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
622 return;
624 #endif
625 #ifdef HAVE_truncsfhf2
626 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
629 return;
631 #endif
632 #ifdef HAVE_truncdfhf2
633 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
635 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
636 return;
638 #endif
639 #ifdef HAVE_truncxfhf2
640 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
642 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
643 return;
645 #endif
646 #ifdef HAVE_trunctfhf2
647 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
649 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
650 return;
652 #endif
654 #ifdef HAVE_truncsftqf2
655 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
658 return;
660 #endif
661 #ifdef HAVE_truncdftqf2
662 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
664 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
665 return;
667 #endif
668 #ifdef HAVE_truncxftqf2
669 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
671 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
672 return;
674 #endif
675 #ifdef HAVE_trunctftqf2
676 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
678 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
679 return;
681 #endif
683 #ifdef HAVE_truncdfsf2
684 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
686 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
687 return;
689 #endif
690 #ifdef HAVE_truncxfsf2
691 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
693 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
694 return;
696 #endif
697 #ifdef HAVE_trunctfsf2
698 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
700 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
701 return;
703 #endif
704 #ifdef HAVE_truncxfdf2
705 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
707 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
708 return;
710 #endif
711 #ifdef HAVE_trunctfdf2
712 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
714 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
715 return;
717 #endif
719 libcall = (rtx) 0;
720 switch (from_mode)
722 case SFmode:
723 switch (to_mode)
725 case DFmode:
726 libcall = extendsfdf2_libfunc;
727 break;
729 case XFmode:
730 libcall = extendsfxf2_libfunc;
731 break;
733 case TFmode:
734 libcall = extendsftf2_libfunc;
735 break;
737 default:
738 break;
740 break;
742 case DFmode:
743 switch (to_mode)
745 case SFmode:
746 libcall = truncdfsf2_libfunc;
747 break;
749 case XFmode:
750 libcall = extenddfxf2_libfunc;
751 break;
753 case TFmode:
754 libcall = extenddftf2_libfunc;
755 break;
757 default:
758 break;
760 break;
762 case XFmode:
763 switch (to_mode)
765 case SFmode:
766 libcall = truncxfsf2_libfunc;
767 break;
769 case DFmode:
770 libcall = truncxfdf2_libfunc;
771 break;
773 default:
774 break;
776 break;
778 case TFmode:
779 switch (to_mode)
781 case SFmode:
782 libcall = trunctfsf2_libfunc;
783 break;
785 case DFmode:
786 libcall = trunctfdf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 default:
795 break;
798 if (libcall == (rtx) 0)
799 /* This conversion is not implemented yet. */
800 abort ();
802 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
803 1, from, from_mode);
804 emit_move_insn (to, value);
805 return;
808 /* Now both modes are integers. */
810 /* Handle expanding beyond a word. */
811 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
812 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
814 rtx insns;
815 rtx lowpart;
816 rtx fill_value;
817 rtx lowfrom;
818 int i;
819 enum machine_mode lowpart_mode;
820 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
822 /* Try converting directly if the insn is supported. */
823 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
824 != CODE_FOR_nothing)
826 /* If FROM is a SUBREG, put it into a register. Do this
827 so that we always generate the same set of insns for
828 better cse'ing; if an intermediate assignment occurred,
829 we won't be doing the operation directly on the SUBREG. */
830 if (optimize > 0 && GET_CODE (from) == SUBREG)
831 from = force_reg (from_mode, from);
832 emit_unop_insn (code, to, from, equiv_code);
833 return;
835 /* Next, try converting via full word. */
836 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
837 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
838 != CODE_FOR_nothing))
840 if (GET_CODE (to) == REG)
841 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
842 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
843 emit_unop_insn (code, to,
844 gen_lowpart (word_mode, to), equiv_code);
845 return;
848 /* No special multiword conversion insn; do it by hand. */
849 start_sequence ();
851 /* Since we will turn this into a no conflict block, we must ensure
852 that the source does not overlap the target. */
854 if (reg_overlap_mentioned_p (to, from))
855 from = force_reg (from_mode, from);
857 /* Get a copy of FROM widened to a word, if necessary. */
858 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
859 lowpart_mode = word_mode;
860 else
861 lowpart_mode = from_mode;
863 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
865 lowpart = gen_lowpart (lowpart_mode, to);
866 emit_move_insn (lowpart, lowfrom);
868 /* Compute the value to put in each remaining word. */
869 if (unsignedp)
870 fill_value = const0_rtx;
871 else
873 #ifdef HAVE_slt
874 if (HAVE_slt
875 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
876 && STORE_FLAG_VALUE == -1)
878 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
879 lowpart_mode, 0, 0);
880 fill_value = gen_reg_rtx (word_mode);
881 emit_insn (gen_slt (fill_value));
883 else
884 #endif
886 fill_value
887 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
888 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
889 NULL_RTX, 0);
890 fill_value = convert_to_mode (word_mode, fill_value, 1);
894 /* Fill the remaining words. */
895 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
897 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
898 rtx subword = operand_subword (to, index, 1, to_mode);
900 if (subword == 0)
901 abort ();
903 if (fill_value != subword)
904 emit_move_insn (subword, fill_value);
907 insns = get_insns ();
908 end_sequence ();
910 emit_no_conflict_block (insns, to, from, NULL_RTX,
911 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
912 return;
915 /* Truncating multi-word to a word or less. */
916 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
917 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
919 if (!((GET_CODE (from) == MEM
920 && ! MEM_VOLATILE_P (from)
921 && direct_load[(int) to_mode]
922 && ! mode_dependent_address_p (XEXP (from, 0)))
923 || GET_CODE (from) == REG
924 || GET_CODE (from) == SUBREG))
925 from = force_reg (from_mode, from);
926 convert_move (to, gen_lowpart (word_mode, from), 0);
927 return;
930 /* Handle pointer conversion */ /* SPEE 900220 */
931 if (to_mode == PQImode)
933 if (from_mode != QImode)
934 from = convert_to_mode (QImode, from, unsignedp);
936 #ifdef HAVE_truncqipqi2
937 if (HAVE_truncqipqi2)
939 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
940 return;
942 #endif /* HAVE_truncqipqi2 */
943 abort ();
946 if (from_mode == PQImode)
948 if (to_mode != QImode)
950 from = convert_to_mode (QImode, from, unsignedp);
951 from_mode = QImode;
953 else
955 #ifdef HAVE_extendpqiqi2
956 if (HAVE_extendpqiqi2)
958 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
959 return;
961 #endif /* HAVE_extendpqiqi2 */
962 abort ();
966 if (to_mode == PSImode)
968 if (from_mode != SImode)
969 from = convert_to_mode (SImode, from, unsignedp);
971 #ifdef HAVE_truncsipsi2
972 if (HAVE_truncsipsi2)
974 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
975 return;
977 #endif /* HAVE_truncsipsi2 */
978 abort ();
981 if (from_mode == PSImode)
983 if (to_mode != SImode)
985 from = convert_to_mode (SImode, from, unsignedp);
986 from_mode = SImode;
988 else
990 #ifdef HAVE_extendpsisi2
991 if (HAVE_extendpsisi2)
993 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
994 return;
996 #endif /* HAVE_extendpsisi2 */
997 abort ();
1001 if (to_mode == PDImode)
1003 if (from_mode != DImode)
1004 from = convert_to_mode (DImode, from, unsignedp);
1006 #ifdef HAVE_truncdipdi2
1007 if (HAVE_truncdipdi2)
1009 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1010 return;
1012 #endif /* HAVE_truncdipdi2 */
1013 abort ();
1016 if (from_mode == PDImode)
1018 if (to_mode != DImode)
1020 from = convert_to_mode (DImode, from, unsignedp);
1021 from_mode = DImode;
1023 else
1025 #ifdef HAVE_extendpdidi2
1026 if (HAVE_extendpdidi2)
1028 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1029 return;
1031 #endif /* HAVE_extendpdidi2 */
1032 abort ();
1036 /* Now follow all the conversions between integers
1037 no more than a word long. */
1039 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1040 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1041 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1042 GET_MODE_BITSIZE (from_mode)))
1044 if (!((GET_CODE (from) == MEM
1045 && ! MEM_VOLATILE_P (from)
1046 && direct_load[(int) to_mode]
1047 && ! mode_dependent_address_p (XEXP (from, 0)))
1048 || GET_CODE (from) == REG
1049 || GET_CODE (from) == SUBREG))
1050 from = force_reg (from_mode, from);
1051 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1052 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1053 from = copy_to_reg (from);
1054 emit_move_insn (to, gen_lowpart (to_mode, from));
1055 return;
1058 /* Handle extension. */
1059 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1061 /* Convert directly if that works. */
1062 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1063 != CODE_FOR_nothing)
1065 emit_unop_insn (code, to, from, equiv_code);
1066 return;
1068 else
1070 enum machine_mode intermediate;
1072 /* Search for a mode to convert via. */
1073 for (intermediate = from_mode; intermediate != VOIDmode;
1074 intermediate = GET_MODE_WIDER_MODE (intermediate))
1075 if (((can_extend_p (to_mode, intermediate, unsignedp)
1076 != CODE_FOR_nothing)
1077 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1078 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1079 && (can_extend_p (intermediate, from_mode, unsignedp)
1080 != CODE_FOR_nothing))
1082 convert_move (to, convert_to_mode (intermediate, from,
1083 unsignedp), unsignedp);
1084 return;
1087 /* No suitable intermediate mode. */
1088 abort ();
1092 /* Support special truncate insns for certain modes. */
1094 if (from_mode == DImode && to_mode == SImode)
1096 #ifdef HAVE_truncdisi2
1097 if (HAVE_truncdisi2)
1099 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1100 return;
1102 #endif
1103 convert_move (to, force_reg (from_mode, from), unsignedp);
1104 return;
1107 if (from_mode == DImode && to_mode == HImode)
1109 #ifdef HAVE_truncdihi2
1110 if (HAVE_truncdihi2)
1112 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1113 return;
1115 #endif
1116 convert_move (to, force_reg (from_mode, from), unsignedp);
1117 return;
1120 if (from_mode == DImode && to_mode == QImode)
1122 #ifdef HAVE_truncdiqi2
1123 if (HAVE_truncdiqi2)
1125 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1126 return;
1128 #endif
1129 convert_move (to, force_reg (from_mode, from), unsignedp);
1130 return;
1133 if (from_mode == SImode && to_mode == HImode)
1135 #ifdef HAVE_truncsihi2
1136 if (HAVE_truncsihi2)
1138 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1139 return;
1141 #endif
1142 convert_move (to, force_reg (from_mode, from), unsignedp);
1143 return;
1146 if (from_mode == SImode && to_mode == QImode)
1148 #ifdef HAVE_truncsiqi2
1149 if (HAVE_truncsiqi2)
1151 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1152 return;
1154 #endif
1155 convert_move (to, force_reg (from_mode, from), unsignedp);
1156 return;
1159 if (from_mode == HImode && to_mode == QImode)
1161 #ifdef HAVE_trunchiqi2
1162 if (HAVE_trunchiqi2)
1164 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1165 return;
1167 #endif
1168 convert_move (to, force_reg (from_mode, from), unsignedp);
1169 return;
1172 if (from_mode == TImode && to_mode == DImode)
1174 #ifdef HAVE_trunctidi2
1175 if (HAVE_trunctidi2)
1177 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1178 return;
1180 #endif
1181 convert_move (to, force_reg (from_mode, from), unsignedp);
1182 return;
1185 if (from_mode == TImode && to_mode == SImode)
1187 #ifdef HAVE_trunctisi2
1188 if (HAVE_trunctisi2)
1190 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1191 return;
1193 #endif
1194 convert_move (to, force_reg (from_mode, from), unsignedp);
1195 return;
1198 if (from_mode == TImode && to_mode == HImode)
1200 #ifdef HAVE_trunctihi2
1201 if (HAVE_trunctihi2)
1203 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1204 return;
1206 #endif
1207 convert_move (to, force_reg (from_mode, from), unsignedp);
1208 return;
1211 if (from_mode == TImode && to_mode == QImode)
1213 #ifdef HAVE_trunctiqi2
1214 if (HAVE_trunctiqi2)
1216 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1217 return;
1219 #endif
1220 convert_move (to, force_reg (from_mode, from), unsignedp);
1221 return;
1224 /* Handle truncation of volatile memrefs, and so on;
1225 the things that couldn't be truncated directly,
1226 and for which there was no special instruction. */
1227 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1229 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1230 emit_move_insn (to, temp);
1231 return;
1234 /* Mode combination is not recognized. */
1235 abort ();
1238 /* Return an rtx for a value that would result
1239 from converting X to mode MODE.
1240 Both X and MODE may be floating, or both integer.
1241 UNSIGNEDP is nonzero if X is an unsigned value.
1242 This can be done by referring to a part of X in place
1243 or by copying to a new temporary with conversion.
1245 This function *must not* call protect_from_queue
1246 except when putting X into an insn (in which case convert_move does it). */
1249 convert_to_mode (mode, x, unsignedp)
1250 enum machine_mode mode;
1251 rtx x;
1252 int unsignedp;
1254 return convert_modes (mode, VOIDmode, x, unsignedp);
1257 /* Return an rtx for a value that would result
1258 from converting X from mode OLDMODE to mode MODE.
1259 Both modes may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1262 This can be done by referring to a part of X in place
1263 or by copying to a new temporary with conversion.
1265 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1271 convert_modes (mode, oldmode, x, unsignedp)
1272 enum machine_mode mode, oldmode;
1273 rtx x;
1274 int unsignedp;
1276 register rtx temp;
1278 /* If FROM is a SUBREG that indicates that we have already done at least
1279 the required extension, strip it. */
1281 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1282 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1283 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1284 x = gen_lowpart (mode, x);
1286 if (GET_MODE (x) != VOIDmode)
1287 oldmode = GET_MODE (x);
1289 if (mode == oldmode)
1290 return x;
1292 /* There is one case that we must handle specially: If we are converting
1293 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1294 we are to interpret the constant as unsigned, gen_lowpart will do
1295 the wrong if the constant appears negative. What we want to do is
1296 make the high-order word of the constant zero, not all ones. */
1298 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1299 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1300 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1302 HOST_WIDE_INT val = INTVAL (x);
1304 if (oldmode != VOIDmode
1305 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1307 int width = GET_MODE_BITSIZE (oldmode);
1309 /* We need to zero extend VAL. */
1310 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1313 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1316 /* We can do this with a gen_lowpart if both desired and current modes
1317 are integer, and this is either a constant integer, a register, or a
1318 non-volatile MEM. Except for the constant case where MODE is no
1319 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1321 if ((GET_CODE (x) == CONST_INT
1322 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1323 || (GET_MODE_CLASS (mode) == MODE_INT
1324 && GET_MODE_CLASS (oldmode) == MODE_INT
1325 && (GET_CODE (x) == CONST_DOUBLE
1326 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1327 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1328 && direct_load[(int) mode])
1329 || (GET_CODE (x) == REG
1330 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1331 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1333 /* ?? If we don't know OLDMODE, we have to assume here that
1334 X does not need sign- or zero-extension. This may not be
1335 the case, but it's the best we can do. */
1336 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1337 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1339 HOST_WIDE_INT val = INTVAL (x);
1340 int width = GET_MODE_BITSIZE (oldmode);
1342 /* We must sign or zero-extend in this case. Start by
1343 zero-extending, then sign extend if we need to. */
1344 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1345 if (! unsignedp
1346 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1347 val |= (HOST_WIDE_INT) (-1) << width;
1349 return GEN_INT (val);
1352 return gen_lowpart (mode, x);
1355 temp = gen_reg_rtx (mode);
1356 convert_move (temp, x, unsignedp);
1357 return temp;
1360 /* Generate several move instructions to copy LEN bytes
1361 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1362 The caller must pass FROM and TO
1363 through protect_from_queue before calling.
1364 ALIGN (in bytes) is maximum alignment we can assume. */
1366 static void
1367 move_by_pieces (to, from, len, align)
1368 rtx to, from;
1369 int len, align;
1371 struct move_by_pieces data;
1372 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1373 int max_size = MOVE_MAX + 1;
1375 data.offset = 0;
1376 data.to_addr = to_addr;
1377 data.from_addr = from_addr;
1378 data.to = to;
1379 data.from = from;
1380 data.autinc_to
1381 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1382 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1383 data.autinc_from
1384 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1385 || GET_CODE (from_addr) == POST_INC
1386 || GET_CODE (from_addr) == POST_DEC);
1388 data.explicit_inc_from = 0;
1389 data.explicit_inc_to = 0;
1390 data.reverse
1391 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1392 if (data.reverse) data.offset = len;
1393 data.len = len;
1395 data.to_struct = MEM_IN_STRUCT_P (to);
1396 data.from_struct = MEM_IN_STRUCT_P (from);
1398 /* If copying requires more than two move insns,
1399 copy addresses to registers (to make displacements shorter)
1400 and use post-increment if available. */
1401 if (!(data.autinc_from && data.autinc_to)
1402 && move_by_pieces_ninsns (len, align) > 2)
1404 #ifdef HAVE_PRE_DECREMENT
1405 if (data.reverse && ! data.autinc_from)
1407 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1408 data.autinc_from = 1;
1409 data.explicit_inc_from = -1;
1411 #endif
1412 #ifdef HAVE_POST_INCREMENT
1413 if (! data.autinc_from)
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 data.autinc_from = 1;
1417 data.explicit_inc_from = 1;
1419 #endif
1420 if (!data.autinc_from && CONSTANT_P (from_addr))
1421 data.from_addr = copy_addr_to_reg (from_addr);
1422 #ifdef HAVE_PRE_DECREMENT
1423 if (data.reverse && ! data.autinc_to)
1425 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1426 data.autinc_to = 1;
1427 data.explicit_inc_to = -1;
1429 #endif
1430 #ifdef HAVE_POST_INCREMENT
1431 if (! data.reverse && ! data.autinc_to)
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 data.autinc_to = 1;
1435 data.explicit_inc_to = 1;
1437 #endif
1438 if (!data.autinc_to && CONSTANT_P (to_addr))
1439 data.to_addr = copy_addr_to_reg (to_addr);
1442 if (! SLOW_UNALIGNED_ACCESS
1443 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1444 align = MOVE_MAX;
1446 /* First move what we can in the largest integer mode, then go to
1447 successively smaller modes. */
1449 while (max_size > 1)
1451 enum machine_mode mode = VOIDmode, tmode;
1452 enum insn_code icode;
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1457 mode = tmode;
1459 if (mode == VOIDmode)
1460 break;
1462 icode = mov_optab->handlers[(int) mode].insn_code;
1463 if (icode != CODE_FOR_nothing
1464 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1465 GET_MODE_SIZE (mode)))
1466 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1468 max_size = GET_MODE_SIZE (mode);
1471 /* The code above should have handled everything. */
1472 if (data.len > 0)
1473 abort ();
1476 /* Return number of insns required to move L bytes by pieces.
1477 ALIGN (in bytes) is maximum alignment we can assume. */
1479 static int
1480 move_by_pieces_ninsns (l, align)
1481 unsigned int l;
1482 int align;
1484 register int n_insns = 0;
1485 int max_size = MOVE_MAX + 1;
1487 if (! SLOW_UNALIGNED_ACCESS
1488 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1489 align = MOVE_MAX;
1491 while (max_size > 1)
1493 enum machine_mode mode = VOIDmode, tmode;
1494 enum insn_code icode;
1496 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1497 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1498 if (GET_MODE_SIZE (tmode) < max_size)
1499 mode = tmode;
1501 if (mode == VOIDmode)
1502 break;
1504 icode = mov_optab->handlers[(int) mode].insn_code;
1505 if (icode != CODE_FOR_nothing
1506 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1507 GET_MODE_SIZE (mode)))
1508 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1510 max_size = GET_MODE_SIZE (mode);
1513 return n_insns;
1516 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1517 with move instructions for mode MODE. GENFUN is the gen_... function
1518 to make a move insn for that mode. DATA has all the other info. */
1520 static void
1521 move_by_pieces_1 (genfun, mode, data)
1522 rtx (*genfun) PROTO ((rtx, ...));
1523 enum machine_mode mode;
1524 struct move_by_pieces *data;
1526 register int size = GET_MODE_SIZE (mode);
1527 register rtx to1, from1;
1529 while (data->len >= size)
1531 if (data->reverse) data->offset -= size;
1533 to1 = (data->autinc_to
1534 ? gen_rtx_MEM (mode, data->to_addr)
1535 : copy_rtx (change_address (data->to, mode,
1536 plus_constant (data->to_addr,
1537 data->offset))));
1538 MEM_IN_STRUCT_P (to1) = data->to_struct;
1540 from1
1541 = (data->autinc_from
1542 ? gen_rtx_MEM (mode, data->from_addr)
1543 : copy_rtx (change_address (data->from, mode,
1544 plus_constant (data->from_addr,
1545 data->offset))));
1546 MEM_IN_STRUCT_P (from1) = data->from_struct;
1548 #ifdef HAVE_PRE_DECREMENT
1549 if (data->explicit_inc_to < 0)
1550 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1551 if (data->explicit_inc_from < 0)
1552 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1553 #endif
1555 emit_insn ((*genfun) (to1, from1));
1556 #ifdef HAVE_POST_INCREMENT
1557 if (data->explicit_inc_to > 0)
1558 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1559 if (data->explicit_inc_from > 0)
1560 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1561 #endif
1563 if (! data->reverse) data->offset += size;
1565 data->len -= size;
1569 /* Emit code to move a block Y to a block X.
1570 This may be done with string-move instructions,
1571 with multiple scalar move instructions, or with a library call.
1573 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1574 with mode BLKmode.
1575 SIZE is an rtx that says how long they are.
1576 ALIGN is the maximum alignment we can assume they have,
1577 measured in bytes.
1579 Return the address of the new block, if memcpy is called and returns it,
1580 0 otherwise. */
1583 emit_block_move (x, y, size, align)
1584 rtx x, y;
1585 rtx size;
1586 int align;
1588 rtx retval = 0;
1590 if (GET_MODE (x) != BLKmode)
1591 abort ();
1593 if (GET_MODE (y) != BLKmode)
1594 abort ();
1596 x = protect_from_queue (x, 1);
1597 y = protect_from_queue (y, 0);
1598 size = protect_from_queue (size, 0);
1600 if (GET_CODE (x) != MEM)
1601 abort ();
1602 if (GET_CODE (y) != MEM)
1603 abort ();
1604 if (size == 0)
1605 abort ();
1607 if (GET_CODE (size) == CONST_INT
1608 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1609 move_by_pieces (x, y, INTVAL (size), align);
1610 else
1612 /* Try the most limited insn first, because there's no point
1613 including more than one in the machine description unless
1614 the more limited one has some advantage. */
1616 rtx opalign = GEN_INT (align);
1617 enum machine_mode mode;
1619 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1620 mode = GET_MODE_WIDER_MODE (mode))
1622 enum insn_code code = movstr_optab[(int) mode];
1624 if (code != CODE_FOR_nothing
1625 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1626 here because if SIZE is less than the mode mask, as it is
1627 returned by the macro, it will definitely be less than the
1628 actual mode mask. */
1629 && ((GET_CODE (size) == CONST_INT
1630 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1631 <= (GET_MODE_MASK (mode) >> 1)))
1632 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1633 && (insn_operand_predicate[(int) code][0] == 0
1634 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1635 && (insn_operand_predicate[(int) code][1] == 0
1636 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1637 && (insn_operand_predicate[(int) code][3] == 0
1638 || (*insn_operand_predicate[(int) code][3]) (opalign,
1639 VOIDmode)))
1641 rtx op2;
1642 rtx last = get_last_insn ();
1643 rtx pat;
1645 op2 = convert_to_mode (mode, size, 1);
1646 if (insn_operand_predicate[(int) code][2] != 0
1647 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1648 op2 = copy_to_mode_reg (mode, op2);
1650 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1651 if (pat)
1653 emit_insn (pat);
1654 return 0;
1656 else
1657 delete_insns_since (last);
1661 #ifdef TARGET_MEM_FUNCTIONS
1662 retval
1663 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1664 ptr_mode, 3, XEXP (x, 0), Pmode,
1665 XEXP (y, 0), Pmode,
1666 convert_to_mode (TYPE_MODE (sizetype), size,
1667 TREE_UNSIGNED (sizetype)),
1668 TYPE_MODE (sizetype));
1669 #else
1670 emit_library_call (bcopy_libfunc, 0,
1671 VOIDmode, 3, XEXP (y, 0), Pmode,
1672 XEXP (x, 0), Pmode,
1673 convert_to_mode (TYPE_MODE (integer_type_node), size,
1674 TREE_UNSIGNED (integer_type_node)),
1675 TYPE_MODE (integer_type_node));
1676 #endif
1679 return retval;
1682 /* Copy all or part of a value X into registers starting at REGNO.
1683 The number of registers to be filled is NREGS. */
1685 void
1686 move_block_to_reg (regno, x, nregs, mode)
1687 int regno;
1688 rtx x;
1689 int nregs;
1690 enum machine_mode mode;
1692 int i;
1693 #ifdef HAVE_load_multiple
1694 rtx pat;
1695 rtx last;
1696 #endif
1698 if (nregs == 0)
1699 return;
1701 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1702 x = validize_mem (force_const_mem (mode, x));
1704 /* See if the machine can do this with a load multiple insn. */
1705 #ifdef HAVE_load_multiple
1706 if (HAVE_load_multiple)
1708 last = get_last_insn ();
1709 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1710 GEN_INT (nregs));
1711 if (pat)
1713 emit_insn (pat);
1714 return;
1716 else
1717 delete_insns_since (last);
1719 #endif
1721 for (i = 0; i < nregs; i++)
1722 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1723 operand_subword_force (x, i, mode));
1726 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1727 The number of registers to be filled is NREGS. SIZE indicates the number
1728 of bytes in the object X. */
1731 void
1732 move_block_from_reg (regno, x, nregs, size)
1733 int regno;
1734 rtx x;
1735 int nregs;
1736 int size;
1738 int i;
1739 #ifdef HAVE_store_multiple
1740 rtx pat;
1741 rtx last;
1742 #endif
1743 enum machine_mode mode;
1745 /* If SIZE is that of a mode no bigger than a word, just use that
1746 mode's store operation. */
1747 if (size <= UNITS_PER_WORD
1748 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1750 emit_move_insn (change_address (x, mode, NULL),
1751 gen_rtx_REG (mode, regno));
1752 return;
1755 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1756 to the left before storing to memory. Note that the previous test
1757 doesn't handle all cases (e.g. SIZE == 3). */
1758 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1760 rtx tem = operand_subword (x, 0, 1, BLKmode);
1761 rtx shift;
1763 if (tem == 0)
1764 abort ();
1766 shift = expand_shift (LSHIFT_EXPR, word_mode,
1767 gen_rtx_REG (word_mode, regno),
1768 build_int_2 ((UNITS_PER_WORD - size)
1769 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1770 emit_move_insn (tem, shift);
1771 return;
1774 /* See if the machine can do this with a store multiple insn. */
1775 #ifdef HAVE_store_multiple
1776 if (HAVE_store_multiple)
1778 last = get_last_insn ();
1779 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1780 GEN_INT (nregs));
1781 if (pat)
1783 emit_insn (pat);
1784 return;
1786 else
1787 delete_insns_since (last);
1789 #endif
1791 for (i = 0; i < nregs; i++)
1793 rtx tem = operand_subword (x, i, 1, BLKmode);
1795 if (tem == 0)
1796 abort ();
1798 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1802 /* Emit code to move a block Y to a block X, where X is non-consecutive
1803 registers represented by a PARALLEL. */
1805 void
1806 emit_group_load (x, y)
1807 rtx x, y;
1809 rtx target_reg, source;
1810 int i;
1812 if (GET_CODE (x) != PARALLEL)
1813 abort ();
1815 /* Check for a NULL entry, used to indicate that the parameter goes
1816 both on the stack and in registers. */
1817 if (XEXP (XVECEXP (x, 0, 0), 0))
1818 i = 0;
1819 else
1820 i = 1;
1822 for (; i < XVECLEN (x, 0); i++)
1824 rtx element = XVECEXP (x, 0, i);
1826 target_reg = XEXP (element, 0);
1828 if (GET_CODE (y) == MEM)
1829 source = change_address (y, GET_MODE (target_reg),
1830 plus_constant (XEXP (y, 0),
1831 INTVAL (XEXP (element, 1))));
1832 else if (XEXP (element, 1) == const0_rtx)
1834 if (GET_MODE (target_reg) == GET_MODE (y))
1835 source = y;
1836 /* Allow for the target_reg to be smaller than the input register
1837 to allow for AIX with 4 DF arguments after a single SI arg. The
1838 last DF argument will only load 1 word into the integer registers,
1839 but load a DF value into the float registers. */
1840 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1841 <= GET_MODE_SIZE (GET_MODE (y)))
1842 && GET_MODE (target_reg) == word_mode)
1843 /* This might be a const_double, so we can't just use SUBREG. */
1844 source = operand_subword (y, 0, 0, VOIDmode);
1845 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1846 == GET_MODE_SIZE (GET_MODE (y)))
1847 source = gen_lowpart (GET_MODE (target_reg), y);
1848 else
1849 abort ();
1851 else
1852 abort ();
1854 emit_move_insn (target_reg, source);
1858 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1859 registers represented by a PARALLEL. */
1861 void
1862 emit_group_store (x, y)
1863 rtx x, y;
1865 rtx source_reg, target;
1866 int i;
1868 if (GET_CODE (y) != PARALLEL)
1869 abort ();
1871 /* Check for a NULL entry, used to indicate that the parameter goes
1872 both on the stack and in registers. */
1873 if (XEXP (XVECEXP (y, 0, 0), 0))
1874 i = 0;
1875 else
1876 i = 1;
1878 for (; i < XVECLEN (y, 0); i++)
1880 rtx element = XVECEXP (y, 0, i);
1882 source_reg = XEXP (element, 0);
1884 if (GET_CODE (x) == MEM)
1885 target = change_address (x, GET_MODE (source_reg),
1886 plus_constant (XEXP (x, 0),
1887 INTVAL (XEXP (element, 1))));
1888 else if (XEXP (element, 1) == const0_rtx)
1890 target = x;
1891 if (GET_MODE (target) != GET_MODE (source_reg))
1892 target = gen_lowpart (GET_MODE (source_reg), target);
1894 else
1895 abort ();
1897 emit_move_insn (target, source_reg);
1901 /* Add a USE expression for REG to the (possibly empty) list pointed
1902 to by CALL_FUSAGE. REG must denote a hard register. */
1904 void
1905 use_reg (call_fusage, reg)
1906 rtx *call_fusage, reg;
1908 if (GET_CODE (reg) != REG
1909 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1910 abort();
1912 *call_fusage
1913 = gen_rtx_EXPR_LIST (VOIDmode,
1914 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1917 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1918 starting at REGNO. All of these registers must be hard registers. */
1920 void
1921 use_regs (call_fusage, regno, nregs)
1922 rtx *call_fusage;
1923 int regno;
1924 int nregs;
1926 int i;
1928 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1929 abort ();
1931 for (i = 0; i < nregs; i++)
1932 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
1935 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1936 PARALLEL REGS. This is for calls that pass values in multiple
1937 non-contiguous locations. The Irix 6 ABI has examples of this. */
1939 void
1940 use_group_regs (call_fusage, regs)
1941 rtx *call_fusage;
1942 rtx regs;
1944 int i;
1946 for (i = 0; i < XVECLEN (regs, 0); i++)
1948 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1950 /* A NULL entry means the parameter goes both on the stack and in
1951 registers. This can also be a MEM for targets that pass values
1952 partially on the stack and partially in registers. */
1953 if (reg != 0 && GET_CODE (reg) == REG)
1954 use_reg (call_fusage, reg);
1958 /* Generate several move instructions to clear LEN bytes of block TO.
1959 (A MEM rtx with BLKmode). The caller must pass TO through
1960 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1961 we can assume. */
1963 static void
1964 clear_by_pieces (to, len, align)
1965 rtx to;
1966 int len, align;
1968 struct clear_by_pieces data;
1969 rtx to_addr = XEXP (to, 0);
1970 int max_size = MOVE_MAX + 1;
1972 data.offset = 0;
1973 data.to_addr = to_addr;
1974 data.to = to;
1975 data.autinc_to
1976 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1977 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1979 data.explicit_inc_to = 0;
1980 data.reverse
1981 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1982 if (data.reverse) data.offset = len;
1983 data.len = len;
1985 data.to_struct = MEM_IN_STRUCT_P (to);
1987 /* If copying requires more than two move insns,
1988 copy addresses to registers (to make displacements shorter)
1989 and use post-increment if available. */
1990 if (!data.autinc_to
1991 && move_by_pieces_ninsns (len, align) > 2)
1993 #ifdef HAVE_PRE_DECREMENT
1994 if (data.reverse && ! data.autinc_to)
1996 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1997 data.autinc_to = 1;
1998 data.explicit_inc_to = -1;
2000 #endif
2001 #ifdef HAVE_POST_INCREMENT
2002 if (! data.reverse && ! data.autinc_to)
2004 data.to_addr = copy_addr_to_reg (to_addr);
2005 data.autinc_to = 1;
2006 data.explicit_inc_to = 1;
2008 #endif
2009 if (!data.autinc_to && CONSTANT_P (to_addr))
2010 data.to_addr = copy_addr_to_reg (to_addr);
2013 if (! SLOW_UNALIGNED_ACCESS
2014 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2015 align = MOVE_MAX;
2017 /* First move what we can in the largest integer mode, then go to
2018 successively smaller modes. */
2020 while (max_size > 1)
2022 enum machine_mode mode = VOIDmode, tmode;
2023 enum insn_code icode;
2025 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2026 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2027 if (GET_MODE_SIZE (tmode) < max_size)
2028 mode = tmode;
2030 if (mode == VOIDmode)
2031 break;
2033 icode = mov_optab->handlers[(int) mode].insn_code;
2034 if (icode != CODE_FOR_nothing
2035 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2036 GET_MODE_SIZE (mode)))
2037 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2039 max_size = GET_MODE_SIZE (mode);
2042 /* The code above should have handled everything. */
2043 if (data.len != 0)
2044 abort ();
2047 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2048 with move instructions for mode MODE. GENFUN is the gen_... function
2049 to make a move insn for that mode. DATA has all the other info. */
2051 static void
2052 clear_by_pieces_1 (genfun, mode, data)
2053 rtx (*genfun) PROTO ((rtx, ...));
2054 enum machine_mode mode;
2055 struct clear_by_pieces *data;
2057 register int size = GET_MODE_SIZE (mode);
2058 register rtx to1;
2060 while (data->len >= size)
2062 if (data->reverse) data->offset -= size;
2064 to1 = (data->autinc_to
2065 ? gen_rtx_MEM (mode, data->to_addr)
2066 : copy_rtx (change_address (data->to, mode,
2067 plus_constant (data->to_addr,
2068 data->offset))));
2069 MEM_IN_STRUCT_P (to1) = data->to_struct;
2071 #ifdef HAVE_PRE_DECREMENT
2072 if (data->explicit_inc_to < 0)
2073 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2074 #endif
2076 emit_insn ((*genfun) (to1, const0_rtx));
2077 #ifdef HAVE_POST_INCREMENT
2078 if (data->explicit_inc_to > 0)
2079 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2080 #endif
2082 if (! data->reverse) data->offset += size;
2084 data->len -= size;
2088 /* Write zeros through the storage of OBJECT.
2089 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2090 the maximum alignment we can is has, measured in bytes.
2092 If we call a function that returns the length of the block, return it. */
2095 clear_storage (object, size, align)
2096 rtx object;
2097 rtx size;
2098 int align;
2100 rtx retval = 0;
2102 if (GET_MODE (object) == BLKmode)
2104 object = protect_from_queue (object, 1);
2105 size = protect_from_queue (size, 0);
2107 if (GET_CODE (size) == CONST_INT
2108 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2109 clear_by_pieces (object, INTVAL (size), align);
2111 else
2113 /* Try the most limited insn first, because there's no point
2114 including more than one in the machine description unless
2115 the more limited one has some advantage. */
2117 rtx opalign = GEN_INT (align);
2118 enum machine_mode mode;
2120 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2121 mode = GET_MODE_WIDER_MODE (mode))
2123 enum insn_code code = clrstr_optab[(int) mode];
2125 if (code != CODE_FOR_nothing
2126 /* We don't need MODE to be narrower than
2127 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2128 the mode mask, as it is returned by the macro, it will
2129 definitely be less than the actual mode mask. */
2130 && ((GET_CODE (size) == CONST_INT
2131 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2132 <= (GET_MODE_MASK (mode) >> 1)))
2133 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2134 && (insn_operand_predicate[(int) code][0] == 0
2135 || (*insn_operand_predicate[(int) code][0]) (object,
2136 BLKmode))
2137 && (insn_operand_predicate[(int) code][2] == 0
2138 || (*insn_operand_predicate[(int) code][2]) (opalign,
2139 VOIDmode)))
2141 rtx op1;
2142 rtx last = get_last_insn ();
2143 rtx pat;
2145 op1 = convert_to_mode (mode, size, 1);
2146 if (insn_operand_predicate[(int) code][1] != 0
2147 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2148 mode))
2149 op1 = copy_to_mode_reg (mode, op1);
2151 pat = GEN_FCN ((int) code) (object, op1, opalign);
2152 if (pat)
2154 emit_insn (pat);
2155 return 0;
2157 else
2158 delete_insns_since (last);
2163 #ifdef TARGET_MEM_FUNCTIONS
2164 retval
2165 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2166 ptr_mode, 3,
2167 XEXP (object, 0), Pmode,
2168 const0_rtx,
2169 TYPE_MODE (integer_type_node),
2170 convert_to_mode
2171 (TYPE_MODE (sizetype), size,
2172 TREE_UNSIGNED (sizetype)),
2173 TYPE_MODE (sizetype));
2174 #else
2175 emit_library_call (bzero_libfunc, 0,
2176 VOIDmode, 2,
2177 XEXP (object, 0), Pmode,
2178 convert_to_mode
2179 (TYPE_MODE (integer_type_node), size,
2180 TREE_UNSIGNED (integer_type_node)),
2181 TYPE_MODE (integer_type_node));
2182 #endif
2185 else
2186 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2188 return retval;
2191 /* Generate code to copy Y into X.
2192 Both Y and X must have the same mode, except that
2193 Y can be a constant with VOIDmode.
2194 This mode cannot be BLKmode; use emit_block_move for that.
2196 Return the last instruction emitted. */
2199 emit_move_insn (x, y)
2200 rtx x, y;
2202 enum machine_mode mode = GET_MODE (x);
2204 x = protect_from_queue (x, 1);
2205 y = protect_from_queue (y, 0);
2207 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2208 abort ();
2210 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2211 y = force_const_mem (mode, y);
2213 /* If X or Y are memory references, verify that their addresses are valid
2214 for the machine. */
2215 if (GET_CODE (x) == MEM
2216 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2217 && ! push_operand (x, GET_MODE (x)))
2218 || (flag_force_addr
2219 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2220 x = change_address (x, VOIDmode, XEXP (x, 0));
2222 if (GET_CODE (y) == MEM
2223 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2224 || (flag_force_addr
2225 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2226 y = change_address (y, VOIDmode, XEXP (y, 0));
2228 if (mode == BLKmode)
2229 abort ();
2231 return emit_move_insn_1 (x, y);
2234 /* Low level part of emit_move_insn.
2235 Called just like emit_move_insn, but assumes X and Y
2236 are basically valid. */
2239 emit_move_insn_1 (x, y)
2240 rtx x, y;
2242 enum machine_mode mode = GET_MODE (x);
2243 enum machine_mode submode;
2244 enum mode_class class = GET_MODE_CLASS (mode);
2245 int i;
2247 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2248 return
2249 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2251 /* Expand complex moves by moving real part and imag part, if possible. */
2252 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2253 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2254 * BITS_PER_UNIT),
2255 (class == MODE_COMPLEX_INT
2256 ? MODE_INT : MODE_FLOAT),
2258 && (mov_optab->handlers[(int) submode].insn_code
2259 != CODE_FOR_nothing))
2261 /* Don't split destination if it is a stack push. */
2262 int stack = push_operand (x, GET_MODE (x));
2264 /* If this is a stack, push the highpart first, so it
2265 will be in the argument order.
2267 In that case, change_address is used only to convert
2268 the mode, not to change the address. */
2269 if (stack)
2271 /* Note that the real part always precedes the imag part in memory
2272 regardless of machine's endianness. */
2273 #ifdef STACK_GROWS_DOWNWARD
2274 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2275 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2276 gen_imagpart (submode, y)));
2277 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2278 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2279 gen_realpart (submode, y)));
2280 #else
2281 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2282 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2283 gen_realpart (submode, y)));
2284 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2285 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2286 gen_imagpart (submode, y)));
2287 #endif
2289 else
2291 /* Show the output dies here. */
2292 if (x != y)
2293 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2295 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2296 (gen_realpart (submode, x), gen_realpart (submode, y)));
2297 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2298 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2301 return get_last_insn ();
2304 /* This will handle any multi-word mode that lacks a move_insn pattern.
2305 However, you will get better code if you define such patterns,
2306 even if they must turn into multiple assembler instructions. */
2307 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2309 rtx last_insn = 0;
2311 #ifdef PUSH_ROUNDING
2313 /* If X is a push on the stack, do the push now and replace
2314 X with a reference to the stack pointer. */
2315 if (push_operand (x, GET_MODE (x)))
2317 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2318 x = change_address (x, VOIDmode, stack_pointer_rtx);
2320 #endif
2322 /* Show the output dies here. */
2323 if (x != y)
2324 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2326 for (i = 0;
2327 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2328 i++)
2330 rtx xpart = operand_subword (x, i, 1, mode);
2331 rtx ypart = operand_subword (y, i, 1, mode);
2333 /* If we can't get a part of Y, put Y into memory if it is a
2334 constant. Otherwise, force it into a register. If we still
2335 can't get a part of Y, abort. */
2336 if (ypart == 0 && CONSTANT_P (y))
2338 y = force_const_mem (mode, y);
2339 ypart = operand_subword (y, i, 1, mode);
2341 else if (ypart == 0)
2342 ypart = operand_subword_force (y, i, mode);
2344 if (xpart == 0 || ypart == 0)
2345 abort ();
2347 last_insn = emit_move_insn (xpart, ypart);
2350 return last_insn;
2352 else
2353 abort ();
2356 /* Pushing data onto the stack. */
2358 /* Push a block of length SIZE (perhaps variable)
2359 and return an rtx to address the beginning of the block.
2360 Note that it is not possible for the value returned to be a QUEUED.
2361 The value may be virtual_outgoing_args_rtx.
2363 EXTRA is the number of bytes of padding to push in addition to SIZE.
2364 BELOW nonzero means this padding comes at low addresses;
2365 otherwise, the padding comes at high addresses. */
2368 push_block (size, extra, below)
2369 rtx size;
2370 int extra, below;
2372 register rtx temp;
2374 size = convert_modes (Pmode, ptr_mode, size, 1);
2375 if (CONSTANT_P (size))
2376 anti_adjust_stack (plus_constant (size, extra));
2377 else if (GET_CODE (size) == REG && extra == 0)
2378 anti_adjust_stack (size);
2379 else
2381 rtx temp = copy_to_mode_reg (Pmode, size);
2382 if (extra != 0)
2383 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2384 temp, 0, OPTAB_LIB_WIDEN);
2385 anti_adjust_stack (temp);
2388 #ifdef STACK_GROWS_DOWNWARD
2389 temp = virtual_outgoing_args_rtx;
2390 if (extra != 0 && below)
2391 temp = plus_constant (temp, extra);
2392 #else
2393 if (GET_CODE (size) == CONST_INT)
2394 temp = plus_constant (virtual_outgoing_args_rtx,
2395 - INTVAL (size) - (below ? 0 : extra));
2396 else if (extra != 0 && !below)
2397 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2398 negate_rtx (Pmode, plus_constant (size, extra)));
2399 else
2400 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2401 negate_rtx (Pmode, size));
2402 #endif
2404 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2408 gen_push_operand ()
2410 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2413 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2414 block of SIZE bytes. */
2416 static rtx
2417 get_push_address (size)
2418 int size;
2420 register rtx temp;
2422 if (STACK_PUSH_CODE == POST_DEC)
2423 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2424 else if (STACK_PUSH_CODE == POST_INC)
2425 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2426 else
2427 temp = stack_pointer_rtx;
2429 return copy_to_reg (temp);
2432 /* Generate code to push X onto the stack, assuming it has mode MODE and
2433 type TYPE.
2434 MODE is redundant except when X is a CONST_INT (since they don't
2435 carry mode info).
2436 SIZE is an rtx for the size of data to be copied (in bytes),
2437 needed only if X is BLKmode.
2439 ALIGN (in bytes) is maximum alignment we can assume.
2441 If PARTIAL and REG are both nonzero, then copy that many of the first
2442 words of X into registers starting with REG, and push the rest of X.
2443 The amount of space pushed is decreased by PARTIAL words,
2444 rounded *down* to a multiple of PARM_BOUNDARY.
2445 REG must be a hard register in this case.
2446 If REG is zero but PARTIAL is not, take any all others actions for an
2447 argument partially in registers, but do not actually load any
2448 registers.
2450 EXTRA is the amount in bytes of extra space to leave next to this arg.
2451 This is ignored if an argument block has already been allocated.
2453 On a machine that lacks real push insns, ARGS_ADDR is the address of
2454 the bottom of the argument block for this call. We use indexing off there
2455 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2456 argument block has not been preallocated.
2458 ARGS_SO_FAR is the size of args previously pushed for this call.
2460 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2461 for arguments passed in registers. If nonzero, it will be the number
2462 of bytes required. */
2464 void
2465 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2466 args_addr, args_so_far, reg_parm_stack_space)
2467 register rtx x;
2468 enum machine_mode mode;
2469 tree type;
2470 rtx size;
2471 int align;
2472 int partial;
2473 rtx reg;
2474 int extra;
2475 rtx args_addr;
2476 rtx args_so_far;
2477 int reg_parm_stack_space;
2479 rtx xinner;
2480 enum direction stack_direction
2481 #ifdef STACK_GROWS_DOWNWARD
2482 = downward;
2483 #else
2484 = upward;
2485 #endif
2487 /* Decide where to pad the argument: `downward' for below,
2488 `upward' for above, or `none' for don't pad it.
2489 Default is below for small data on big-endian machines; else above. */
2490 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2492 /* Invert direction if stack is post-update. */
2493 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2494 if (where_pad != none)
2495 where_pad = (where_pad == downward ? upward : downward);
2497 xinner = x = protect_from_queue (x, 0);
2499 if (mode == BLKmode)
2501 /* Copy a block into the stack, entirely or partially. */
2503 register rtx temp;
2504 int used = partial * UNITS_PER_WORD;
2505 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2506 int skip;
2508 if (size == 0)
2509 abort ();
2511 used -= offset;
2513 /* USED is now the # of bytes we need not copy to the stack
2514 because registers will take care of them. */
2516 if (partial != 0)
2517 xinner = change_address (xinner, BLKmode,
2518 plus_constant (XEXP (xinner, 0), used));
2520 /* If the partial register-part of the arg counts in its stack size,
2521 skip the part of stack space corresponding to the registers.
2522 Otherwise, start copying to the beginning of the stack space,
2523 by setting SKIP to 0. */
2524 skip = (reg_parm_stack_space == 0) ? 0 : used;
2526 #ifdef PUSH_ROUNDING
2527 /* Do it with several push insns if that doesn't take lots of insns
2528 and if there is no difficulty with push insns that skip bytes
2529 on the stack for alignment purposes. */
2530 if (args_addr == 0
2531 && GET_CODE (size) == CONST_INT
2532 && skip == 0
2533 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2534 < MOVE_RATIO)
2535 /* Here we avoid the case of a structure whose weak alignment
2536 forces many pushes of a small amount of data,
2537 and such small pushes do rounding that causes trouble. */
2538 && ((! SLOW_UNALIGNED_ACCESS)
2539 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2540 || PUSH_ROUNDING (align) == align)
2541 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2543 /* Push padding now if padding above and stack grows down,
2544 or if padding below and stack grows up.
2545 But if space already allocated, this has already been done. */
2546 if (extra && args_addr == 0
2547 && where_pad != none && where_pad != stack_direction)
2548 anti_adjust_stack (GEN_INT (extra));
2550 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2551 INTVAL (size) - used, align);
2553 if (flag_check_memory_usage && ! in_check_memory_usage)
2555 rtx temp;
2557 in_check_memory_usage = 1;
2558 temp = get_push_address (INTVAL(size) - used);
2559 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2560 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2561 temp, ptr_mode,
2562 XEXP (xinner, 0), ptr_mode,
2563 GEN_INT (INTVAL(size) - used),
2564 TYPE_MODE (sizetype));
2565 else
2566 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2567 temp, ptr_mode,
2568 GEN_INT (INTVAL(size) - used),
2569 TYPE_MODE (sizetype),
2570 GEN_INT (MEMORY_USE_RW),
2571 TYPE_MODE (integer_type_node));
2572 in_check_memory_usage = 0;
2575 else
2576 #endif /* PUSH_ROUNDING */
2578 /* Otherwise make space on the stack and copy the data
2579 to the address of that space. */
2581 /* Deduct words put into registers from the size we must copy. */
2582 if (partial != 0)
2584 if (GET_CODE (size) == CONST_INT)
2585 size = GEN_INT (INTVAL (size) - used);
2586 else
2587 size = expand_binop (GET_MODE (size), sub_optab, size,
2588 GEN_INT (used), NULL_RTX, 0,
2589 OPTAB_LIB_WIDEN);
2592 /* Get the address of the stack space.
2593 In this case, we do not deal with EXTRA separately.
2594 A single stack adjust will do. */
2595 if (! args_addr)
2597 temp = push_block (size, extra, where_pad == downward);
2598 extra = 0;
2600 else if (GET_CODE (args_so_far) == CONST_INT)
2601 temp = memory_address (BLKmode,
2602 plus_constant (args_addr,
2603 skip + INTVAL (args_so_far)));
2604 else
2605 temp = memory_address (BLKmode,
2606 plus_constant (gen_rtx_PLUS (Pmode,
2607 args_addr,
2608 args_so_far),
2609 skip));
2610 if (flag_check_memory_usage && ! in_check_memory_usage)
2612 rtx target;
2614 in_check_memory_usage = 1;
2615 target = copy_to_reg (temp);
2616 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2617 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2618 target, ptr_mode,
2619 XEXP (xinner, 0), ptr_mode,
2620 size, TYPE_MODE (sizetype));
2621 else
2622 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2623 target, ptr_mode,
2624 size, TYPE_MODE (sizetype),
2625 GEN_INT (MEMORY_USE_RW),
2626 TYPE_MODE (integer_type_node));
2627 in_check_memory_usage = 0;
2630 /* TEMP is the address of the block. Copy the data there. */
2631 if (GET_CODE (size) == CONST_INT
2632 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2633 < MOVE_RATIO))
2635 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2636 INTVAL (size), align);
2637 goto ret;
2639 else
2641 rtx opalign = GEN_INT (align);
2642 enum machine_mode mode;
2643 rtx target = gen_rtx (MEM, BLKmode, temp);
2645 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2646 mode != VOIDmode;
2647 mode = GET_MODE_WIDER_MODE (mode))
2649 enum insn_code code = movstr_optab[(int) mode];
2651 if (code != CODE_FOR_nothing
2652 && ((GET_CODE (size) == CONST_INT
2653 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2654 <= (GET_MODE_MASK (mode) >> 1)))
2655 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2656 && (insn_operand_predicate[(int) code][0] == 0
2657 || ((*insn_operand_predicate[(int) code][0])
2658 (target, BLKmode)))
2659 && (insn_operand_predicate[(int) code][1] == 0
2660 || ((*insn_operand_predicate[(int) code][1])
2661 (xinner, BLKmode)))
2662 && (insn_operand_predicate[(int) code][3] == 0
2663 || ((*insn_operand_predicate[(int) code][3])
2664 (opalign, VOIDmode))))
2666 rtx op2 = convert_to_mode (mode, size, 1);
2667 rtx last = get_last_insn ();
2668 rtx pat;
2670 if (insn_operand_predicate[(int) code][2] != 0
2671 && ! ((*insn_operand_predicate[(int) code][2])
2672 (op2, mode)))
2673 op2 = copy_to_mode_reg (mode, op2);
2675 pat = GEN_FCN ((int) code) (target, xinner,
2676 op2, opalign);
2677 if (pat)
2679 emit_insn (pat);
2680 goto ret;
2682 else
2683 delete_insns_since (last);
2688 #ifndef ACCUMULATE_OUTGOING_ARGS
2689 /* If the source is referenced relative to the stack pointer,
2690 copy it to another register to stabilize it. We do not need
2691 to do this if we know that we won't be changing sp. */
2693 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2694 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2695 temp = copy_to_reg (temp);
2696 #endif
2698 /* Make inhibit_defer_pop nonzero around the library call
2699 to force it to pop the bcopy-arguments right away. */
2700 NO_DEFER_POP;
2701 #ifdef TARGET_MEM_FUNCTIONS
2702 emit_library_call (memcpy_libfunc, 0,
2703 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2704 convert_to_mode (TYPE_MODE (sizetype),
2705 size, TREE_UNSIGNED (sizetype)),
2706 TYPE_MODE (sizetype));
2707 #else
2708 emit_library_call (bcopy_libfunc, 0,
2709 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2710 convert_to_mode (TYPE_MODE (integer_type_node),
2711 size,
2712 TREE_UNSIGNED (integer_type_node)),
2713 TYPE_MODE (integer_type_node));
2714 #endif
2715 OK_DEFER_POP;
2718 else if (partial > 0)
2720 /* Scalar partly in registers. */
2722 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2723 int i;
2724 int not_stack;
2725 /* # words of start of argument
2726 that we must make space for but need not store. */
2727 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2728 int args_offset = INTVAL (args_so_far);
2729 int skip;
2731 /* Push padding now if padding above and stack grows down,
2732 or if padding below and stack grows up.
2733 But if space already allocated, this has already been done. */
2734 if (extra && args_addr == 0
2735 && where_pad != none && where_pad != stack_direction)
2736 anti_adjust_stack (GEN_INT (extra));
2738 /* If we make space by pushing it, we might as well push
2739 the real data. Otherwise, we can leave OFFSET nonzero
2740 and leave the space uninitialized. */
2741 if (args_addr == 0)
2742 offset = 0;
2744 /* Now NOT_STACK gets the number of words that we don't need to
2745 allocate on the stack. */
2746 not_stack = partial - offset;
2748 /* If the partial register-part of the arg counts in its stack size,
2749 skip the part of stack space corresponding to the registers.
2750 Otherwise, start copying to the beginning of the stack space,
2751 by setting SKIP to 0. */
2752 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
2754 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2755 x = validize_mem (force_const_mem (mode, x));
2757 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2758 SUBREGs of such registers are not allowed. */
2759 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2760 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2761 x = copy_to_reg (x);
2763 /* Loop over all the words allocated on the stack for this arg. */
2764 /* We can do it by words, because any scalar bigger than a word
2765 has a size a multiple of a word. */
2766 #ifndef PUSH_ARGS_REVERSED
2767 for (i = not_stack; i < size; i++)
2768 #else
2769 for (i = size - 1; i >= not_stack; i--)
2770 #endif
2771 if (i >= not_stack + offset)
2772 emit_push_insn (operand_subword_force (x, i, mode),
2773 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2774 0, args_addr,
2775 GEN_INT (args_offset + ((i - not_stack + skip)
2776 * UNITS_PER_WORD)),
2777 reg_parm_stack_space);
2779 else
2781 rtx addr;
2782 rtx target = NULL_RTX;
2784 /* Push padding now if padding above and stack grows down,
2785 or if padding below and stack grows up.
2786 But if space already allocated, this has already been done. */
2787 if (extra && args_addr == 0
2788 && where_pad != none && where_pad != stack_direction)
2789 anti_adjust_stack (GEN_INT (extra));
2791 #ifdef PUSH_ROUNDING
2792 if (args_addr == 0)
2793 addr = gen_push_operand ();
2794 else
2795 #endif
2797 if (GET_CODE (args_so_far) == CONST_INT)
2798 addr
2799 = memory_address (mode,
2800 plus_constant (args_addr,
2801 INTVAL (args_so_far)));
2802 else
2803 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2804 args_so_far));
2805 target = addr;
2808 emit_move_insn (gen_rtx_MEM (mode, addr), x);
2810 if (flag_check_memory_usage && ! in_check_memory_usage)
2812 in_check_memory_usage = 1;
2813 if (target == 0)
2814 target = get_push_address (GET_MODE_SIZE (mode));
2816 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2817 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2818 target, ptr_mode,
2819 XEXP (x, 0), ptr_mode,
2820 GEN_INT (GET_MODE_SIZE (mode)),
2821 TYPE_MODE (sizetype));
2822 else
2823 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2824 target, ptr_mode,
2825 GEN_INT (GET_MODE_SIZE (mode)),
2826 TYPE_MODE (sizetype),
2827 GEN_INT (MEMORY_USE_RW),
2828 TYPE_MODE (integer_type_node));
2829 in_check_memory_usage = 0;
2833 ret:
2834 /* If part should go in registers, copy that part
2835 into the appropriate registers. Do this now, at the end,
2836 since mem-to-mem copies above may do function calls. */
2837 if (partial > 0 && reg != 0)
2839 /* Handle calls that pass values in multiple non-contiguous locations.
2840 The Irix 6 ABI has examples of this. */
2841 if (GET_CODE (reg) == PARALLEL)
2842 emit_group_load (reg, x);
2843 else
2844 move_block_to_reg (REGNO (reg), x, partial, mode);
2847 if (extra && args_addr == 0 && where_pad == stack_direction)
2848 anti_adjust_stack (GEN_INT (extra));
2851 /* Expand an assignment that stores the value of FROM into TO.
2852 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2853 (This may contain a QUEUED rtx;
2854 if the value is constant, this rtx is a constant.)
2855 Otherwise, the returned value is NULL_RTX.
2857 SUGGEST_REG is no longer actually used.
2858 It used to mean, copy the value through a register
2859 and return that register, if that is possible.
2860 We now use WANT_VALUE to decide whether to do this. */
2863 expand_assignment (to, from, want_value, suggest_reg)
2864 tree to, from;
2865 int want_value;
2866 int suggest_reg;
2868 register rtx to_rtx = 0;
2869 rtx result;
2871 /* Don't crash if the lhs of the assignment was erroneous. */
2873 if (TREE_CODE (to) == ERROR_MARK)
2875 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2876 return want_value ? result : NULL_RTX;
2879 /* Assignment of a structure component needs special treatment
2880 if the structure component's rtx is not simply a MEM.
2881 Assignment of an array element at a constant index, and assignment of
2882 an array element in an unaligned packed structure field, has the same
2883 problem. */
2885 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2886 || TREE_CODE (to) == ARRAY_REF)
2888 enum machine_mode mode1;
2889 int bitsize;
2890 int bitpos;
2891 tree offset;
2892 int unsignedp;
2893 int volatilep = 0;
2894 tree tem;
2895 int alignment;
2897 push_temp_slots ();
2898 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2899 &unsignedp, &volatilep, &alignment);
2901 /* If we are going to use store_bit_field and extract_bit_field,
2902 make sure to_rtx will be safe for multiple use. */
2904 if (mode1 == VOIDmode && want_value)
2905 tem = stabilize_reference (tem);
2907 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2908 if (offset != 0)
2910 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2912 if (GET_CODE (to_rtx) != MEM)
2913 abort ();
2915 if (GET_MODE (offset_rtx) != ptr_mode)
2917 #ifdef POINTERS_EXTEND_UNSIGNED
2918 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
2919 #else
2920 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2921 #endif
2924 to_rtx = change_address (to_rtx, VOIDmode,
2925 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2926 force_reg (ptr_mode, offset_rtx)));
2928 if (volatilep)
2930 if (GET_CODE (to_rtx) == MEM)
2932 /* When the offset is zero, to_rtx is the address of the
2933 structure we are storing into, and hence may be shared.
2934 We must make a new MEM before setting the volatile bit. */
2935 if (offset == 0)
2936 to_rtx = copy_rtx (to_rtx);
2938 MEM_VOLATILE_P (to_rtx) = 1;
2940 #if 0 /* This was turned off because, when a field is volatile
2941 in an object which is not volatile, the object may be in a register,
2942 and then we would abort over here. */
2943 else
2944 abort ();
2945 #endif
2948 if (TREE_CODE (to) == COMPONENT_REF
2949 && TREE_READONLY (TREE_OPERAND (to, 1)))
2951 if (offset == 0)
2952 to_rtx = copy_rtx (to_rtx);
2954 RTX_UNCHANGING_P (to_rtx) = 1;
2957 /* Check the access. */
2958 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2960 rtx to_addr;
2961 int size;
2962 int best_mode_size;
2963 enum machine_mode best_mode;
2965 best_mode = get_best_mode (bitsize, bitpos,
2966 TYPE_ALIGN (TREE_TYPE (tem)),
2967 mode1, volatilep);
2968 if (best_mode == VOIDmode)
2969 best_mode = QImode;
2971 best_mode_size = GET_MODE_BITSIZE (best_mode);
2972 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2973 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2974 size *= GET_MODE_SIZE (best_mode);
2976 /* Check the access right of the pointer. */
2977 if (size)
2978 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2979 to_addr, ptr_mode,
2980 GEN_INT (size), TYPE_MODE (sizetype),
2981 GEN_INT (MEMORY_USE_WO),
2982 TYPE_MODE (integer_type_node));
2985 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2986 (want_value
2987 /* Spurious cast makes HPUX compiler happy. */
2988 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2989 : VOIDmode),
2990 unsignedp,
2991 /* Required alignment of containing datum. */
2992 alignment,
2993 int_size_in_bytes (TREE_TYPE (tem)));
2994 preserve_temp_slots (result);
2995 free_temp_slots ();
2996 pop_temp_slots ();
2998 /* If the value is meaningful, convert RESULT to the proper mode.
2999 Otherwise, return nothing. */
3000 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3001 TYPE_MODE (TREE_TYPE (from)),
3002 result,
3003 TREE_UNSIGNED (TREE_TYPE (to)))
3004 : NULL_RTX);
3007 /* If the rhs is a function call and its value is not an aggregate,
3008 call the function before we start to compute the lhs.
3009 This is needed for correct code for cases such as
3010 val = setjmp (buf) on machines where reference to val
3011 requires loading up part of an address in a separate insn.
3013 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3014 a promoted variable where the zero- or sign- extension needs to be done.
3015 Handling this in the normal way is safe because no computation is done
3016 before the call. */
3017 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3018 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3019 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3021 rtx value;
3023 push_temp_slots ();
3024 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3025 if (to_rtx == 0)
3026 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3028 /* Handle calls that return values in multiple non-contiguous locations.
3029 The Irix 6 ABI has examples of this. */
3030 if (GET_CODE (to_rtx) == PARALLEL)
3031 emit_group_load (to_rtx, value);
3032 else if (GET_MODE (to_rtx) == BLKmode)
3033 emit_block_move (to_rtx, value, expr_size (from),
3034 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3035 else
3036 emit_move_insn (to_rtx, value);
3037 preserve_temp_slots (to_rtx);
3038 free_temp_slots ();
3039 pop_temp_slots ();
3040 return want_value ? to_rtx : NULL_RTX;
3043 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3044 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3046 if (to_rtx == 0)
3047 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3049 /* Don't move directly into a return register. */
3050 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3052 rtx temp;
3054 push_temp_slots ();
3055 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3056 emit_move_insn (to_rtx, temp);
3057 preserve_temp_slots (to_rtx);
3058 free_temp_slots ();
3059 pop_temp_slots ();
3060 return want_value ? to_rtx : NULL_RTX;
3063 /* In case we are returning the contents of an object which overlaps
3064 the place the value is being stored, use a safe function when copying
3065 a value through a pointer into a structure value return block. */
3066 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3067 && current_function_returns_struct
3068 && !current_function_returns_pcc_struct)
3070 rtx from_rtx, size;
3072 push_temp_slots ();
3073 size = expr_size (from);
3074 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3075 EXPAND_MEMORY_USE_DONT);
3077 /* Copy the rights of the bitmap. */
3078 if (flag_check_memory_usage)
3079 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3080 XEXP (to_rtx, 0), ptr_mode,
3081 XEXP (from_rtx, 0), ptr_mode,
3082 convert_to_mode (TYPE_MODE (sizetype),
3083 size, TREE_UNSIGNED (sizetype)),
3084 TYPE_MODE (sizetype));
3086 #ifdef TARGET_MEM_FUNCTIONS
3087 emit_library_call (memcpy_libfunc, 0,
3088 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3089 XEXP (from_rtx, 0), Pmode,
3090 convert_to_mode (TYPE_MODE (sizetype),
3091 size, TREE_UNSIGNED (sizetype)),
3092 TYPE_MODE (sizetype));
3093 #else
3094 emit_library_call (bcopy_libfunc, 0,
3095 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3096 XEXP (to_rtx, 0), Pmode,
3097 convert_to_mode (TYPE_MODE (integer_type_node),
3098 size, TREE_UNSIGNED (integer_type_node)),
3099 TYPE_MODE (integer_type_node));
3100 #endif
3102 preserve_temp_slots (to_rtx);
3103 free_temp_slots ();
3104 pop_temp_slots ();
3105 return want_value ? to_rtx : NULL_RTX;
3108 /* Compute FROM and store the value in the rtx we got. */
3110 push_temp_slots ();
3111 result = store_expr (from, to_rtx, want_value);
3112 preserve_temp_slots (result);
3113 free_temp_slots ();
3114 pop_temp_slots ();
3115 return want_value ? result : NULL_RTX;
3118 /* Generate code for computing expression EXP,
3119 and storing the value into TARGET.
3120 TARGET may contain a QUEUED rtx.
3122 If WANT_VALUE is nonzero, return a copy of the value
3123 not in TARGET, so that we can be sure to use the proper
3124 value in a containing expression even if TARGET has something
3125 else stored in it. If possible, we copy the value through a pseudo
3126 and return that pseudo. Or, if the value is constant, we try to
3127 return the constant. In some cases, we return a pseudo
3128 copied *from* TARGET.
3130 If the mode is BLKmode then we may return TARGET itself.
3131 It turns out that in BLKmode it doesn't cause a problem.
3132 because C has no operators that could combine two different
3133 assignments into the same BLKmode object with different values
3134 with no sequence point. Will other languages need this to
3135 be more thorough?
3137 If WANT_VALUE is 0, we return NULL, to make sure
3138 to catch quickly any cases where the caller uses the value
3139 and fails to set WANT_VALUE. */
3142 store_expr (exp, target, want_value)
3143 register tree exp;
3144 register rtx target;
3145 int want_value;
3147 register rtx temp;
3148 int dont_return_target = 0;
3150 if (TREE_CODE (exp) == COMPOUND_EXPR)
3152 /* Perform first part of compound expression, then assign from second
3153 part. */
3154 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3155 emit_queue ();
3156 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3158 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3160 /* For conditional expression, get safe form of the target. Then
3161 test the condition, doing the appropriate assignment on either
3162 side. This avoids the creation of unnecessary temporaries.
3163 For non-BLKmode, it is more efficient not to do this. */
3165 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3167 emit_queue ();
3168 target = protect_from_queue (target, 1);
3170 do_pending_stack_adjust ();
3171 NO_DEFER_POP;
3172 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3173 start_cleanup_deferral ();
3174 store_expr (TREE_OPERAND (exp, 1), target, 0);
3175 end_cleanup_deferral ();
3176 emit_queue ();
3177 emit_jump_insn (gen_jump (lab2));
3178 emit_barrier ();
3179 emit_label (lab1);
3180 start_cleanup_deferral ();
3181 store_expr (TREE_OPERAND (exp, 2), target, 0);
3182 end_cleanup_deferral ();
3183 emit_queue ();
3184 emit_label (lab2);
3185 OK_DEFER_POP;
3187 return want_value ? target : NULL_RTX;
3189 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3190 && GET_MODE (target) != BLKmode)
3191 /* If target is in memory and caller wants value in a register instead,
3192 arrange that. Pass TARGET as target for expand_expr so that,
3193 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3194 We know expand_expr will not use the target in that case.
3195 Don't do this if TARGET is volatile because we are supposed
3196 to write it and then read it. */
3198 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3199 GET_MODE (target), 0);
3200 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3201 temp = copy_to_reg (temp);
3202 dont_return_target = 1;
3204 else if (queued_subexp_p (target))
3205 /* If target contains a postincrement, let's not risk
3206 using it as the place to generate the rhs. */
3208 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3210 /* Expand EXP into a new pseudo. */
3211 temp = gen_reg_rtx (GET_MODE (target));
3212 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3214 else
3215 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3217 /* If target is volatile, ANSI requires accessing the value
3218 *from* the target, if it is accessed. So make that happen.
3219 In no case return the target itself. */
3220 if (! MEM_VOLATILE_P (target) && want_value)
3221 dont_return_target = 1;
3223 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3224 /* If this is an scalar in a register that is stored in a wider mode
3225 than the declared mode, compute the result into its declared mode
3226 and then convert to the wider mode. Our value is the computed
3227 expression. */
3229 /* If we don't want a value, we can do the conversion inside EXP,
3230 which will often result in some optimizations. Do the conversion
3231 in two steps: first change the signedness, if needed, then
3232 the extend. But don't do this if the type of EXP is a subtype
3233 of something else since then the conversion might involve
3234 more than just converting modes. */
3235 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3236 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3238 if (TREE_UNSIGNED (TREE_TYPE (exp))
3239 != SUBREG_PROMOTED_UNSIGNED_P (target))
3241 = convert
3242 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3243 TREE_TYPE (exp)),
3244 exp);
3246 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3247 SUBREG_PROMOTED_UNSIGNED_P (target)),
3248 exp);
3251 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3253 /* If TEMP is a volatile MEM and we want a result value, make
3254 the access now so it gets done only once. Likewise if
3255 it contains TARGET. */
3256 if (GET_CODE (temp) == MEM && want_value
3257 && (MEM_VOLATILE_P (temp)
3258 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3259 temp = copy_to_reg (temp);
3261 /* If TEMP is a VOIDmode constant, use convert_modes to make
3262 sure that we properly convert it. */
3263 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3264 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3265 TYPE_MODE (TREE_TYPE (exp)), temp,
3266 SUBREG_PROMOTED_UNSIGNED_P (target));
3268 convert_move (SUBREG_REG (target), temp,
3269 SUBREG_PROMOTED_UNSIGNED_P (target));
3270 return want_value ? temp : NULL_RTX;
3272 else
3274 temp = expand_expr (exp, target, GET_MODE (target), 0);
3275 /* Return TARGET if it's a specified hardware register.
3276 If TARGET is a volatile mem ref, either return TARGET
3277 or return a reg copied *from* TARGET; ANSI requires this.
3279 Otherwise, if TEMP is not TARGET, return TEMP
3280 if it is constant (for efficiency),
3281 or if we really want the correct value. */
3282 if (!(target && GET_CODE (target) == REG
3283 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3284 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3285 && ! rtx_equal_p (temp, target)
3286 && (CONSTANT_P (temp) || want_value))
3287 dont_return_target = 1;
3290 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3291 the same as that of TARGET, adjust the constant. This is needed, for
3292 example, in case it is a CONST_DOUBLE and we want only a word-sized
3293 value. */
3294 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3295 && TREE_CODE (exp) != ERROR_MARK
3296 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3297 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3298 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3300 if (flag_check_memory_usage
3301 && GET_CODE (target) == MEM
3302 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3304 if (GET_CODE (temp) == MEM)
3305 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3306 XEXP (target, 0), ptr_mode,
3307 XEXP (temp, 0), ptr_mode,
3308 expr_size (exp), TYPE_MODE (sizetype));
3309 else
3310 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3311 XEXP (target, 0), ptr_mode,
3312 expr_size (exp), TYPE_MODE (sizetype),
3313 GEN_INT (MEMORY_USE_WO),
3314 TYPE_MODE (integer_type_node));
3317 /* If value was not generated in the target, store it there.
3318 Convert the value to TARGET's type first if nec. */
3320 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3322 target = protect_from_queue (target, 1);
3323 if (GET_MODE (temp) != GET_MODE (target)
3324 && GET_MODE (temp) != VOIDmode)
3326 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3327 if (dont_return_target)
3329 /* In this case, we will return TEMP,
3330 so make sure it has the proper mode.
3331 But don't forget to store the value into TARGET. */
3332 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3333 emit_move_insn (target, temp);
3335 else
3336 convert_move (target, temp, unsignedp);
3339 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3341 /* Handle copying a string constant into an array.
3342 The string constant may be shorter than the array.
3343 So copy just the string's actual length, and clear the rest. */
3344 rtx size;
3345 rtx addr;
3347 /* Get the size of the data type of the string,
3348 which is actually the size of the target. */
3349 size = expr_size (exp);
3350 if (GET_CODE (size) == CONST_INT
3351 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3352 emit_block_move (target, temp, size,
3353 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3354 else
3356 /* Compute the size of the data to copy from the string. */
3357 tree copy_size
3358 = size_binop (MIN_EXPR,
3359 make_tree (sizetype, size),
3360 convert (sizetype,
3361 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3362 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3363 VOIDmode, 0);
3364 rtx label = 0;
3366 /* Copy that much. */
3367 emit_block_move (target, temp, copy_size_rtx,
3368 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3370 /* Figure out how much is left in TARGET that we have to clear.
3371 Do all calculations in ptr_mode. */
3373 addr = XEXP (target, 0);
3374 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3376 if (GET_CODE (copy_size_rtx) == CONST_INT)
3378 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3379 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3381 else
3383 addr = force_reg (ptr_mode, addr);
3384 addr = expand_binop (ptr_mode, add_optab, addr,
3385 copy_size_rtx, NULL_RTX, 0,
3386 OPTAB_LIB_WIDEN);
3388 size = expand_binop (ptr_mode, sub_optab, size,
3389 copy_size_rtx, NULL_RTX, 0,
3390 OPTAB_LIB_WIDEN);
3392 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3393 GET_MODE (size), 0, 0);
3394 label = gen_label_rtx ();
3395 emit_jump_insn (gen_blt (label));
3398 if (size != const0_rtx)
3400 /* Be sure we can write on ADDR. */
3401 if (flag_check_memory_usage)
3402 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3403 addr, ptr_mode,
3404 size, TYPE_MODE (sizetype),
3405 GEN_INT (MEMORY_USE_WO),
3406 TYPE_MODE (integer_type_node));
3407 #ifdef TARGET_MEM_FUNCTIONS
3408 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3409 addr, ptr_mode,
3410 const0_rtx, TYPE_MODE (integer_type_node),
3411 convert_to_mode (TYPE_MODE (sizetype),
3412 size,
3413 TREE_UNSIGNED (sizetype)),
3414 TYPE_MODE (sizetype));
3415 #else
3416 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3417 addr, ptr_mode,
3418 convert_to_mode (TYPE_MODE (integer_type_node),
3419 size,
3420 TREE_UNSIGNED (integer_type_node)),
3421 TYPE_MODE (integer_type_node));
3422 #endif
3425 if (label)
3426 emit_label (label);
3429 /* Handle calls that return values in multiple non-contiguous locations.
3430 The Irix 6 ABI has examples of this. */
3431 else if (GET_CODE (target) == PARALLEL)
3432 emit_group_load (target, temp);
3433 else if (GET_MODE (temp) == BLKmode)
3434 emit_block_move (target, temp, expr_size (exp),
3435 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3436 else
3437 emit_move_insn (target, temp);
3440 /* If we don't want a value, return NULL_RTX. */
3441 if (! want_value)
3442 return NULL_RTX;
3444 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3445 ??? The latter test doesn't seem to make sense. */
3446 else if (dont_return_target && GET_CODE (temp) != MEM)
3447 return temp;
3449 /* Return TARGET itself if it is a hard register. */
3450 else if (want_value && GET_MODE (target) != BLKmode
3451 && ! (GET_CODE (target) == REG
3452 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3453 return copy_to_reg (target);
3455 else
3456 return target;
3459 /* Return 1 if EXP just contains zeros. */
3461 static int
3462 is_zeros_p (exp)
3463 tree exp;
3465 tree elt;
3467 switch (TREE_CODE (exp))
3469 case CONVERT_EXPR:
3470 case NOP_EXPR:
3471 case NON_LVALUE_EXPR:
3472 return is_zeros_p (TREE_OPERAND (exp, 0));
3474 case INTEGER_CST:
3475 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3477 case COMPLEX_CST:
3478 return
3479 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3481 case REAL_CST:
3482 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3484 case CONSTRUCTOR:
3485 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3486 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3487 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3488 if (! is_zeros_p (TREE_VALUE (elt)))
3489 return 0;
3491 return 1;
3493 default:
3494 return 0;
3498 /* Return 1 if EXP contains mostly (3/4) zeros. */
3500 static int
3501 mostly_zeros_p (exp)
3502 tree exp;
3504 if (TREE_CODE (exp) == CONSTRUCTOR)
3506 int elts = 0, zeros = 0;
3507 tree elt = CONSTRUCTOR_ELTS (exp);
3508 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3510 /* If there are no ranges of true bits, it is all zero. */
3511 return elt == NULL_TREE;
3513 for (; elt; elt = TREE_CHAIN (elt))
3515 /* We do not handle the case where the index is a RANGE_EXPR,
3516 so the statistic will be somewhat inaccurate.
3517 We do make a more accurate count in store_constructor itself,
3518 so since this function is only used for nested array elements,
3519 this should be close enough. */
3520 if (mostly_zeros_p (TREE_VALUE (elt)))
3521 zeros++;
3522 elts++;
3525 return 4 * zeros >= 3 * elts;
3528 return is_zeros_p (exp);
3531 /* Helper function for store_constructor.
3532 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3533 TYPE is the type of the CONSTRUCTOR, not the element type.
3534 CLEARED is as for store_constructor.
3536 This provides a recursive shortcut back to store_constructor when it isn't
3537 necessary to go through store_field. This is so that we can pass through
3538 the cleared field to let store_constructor know that we may not have to
3539 clear a substructure if the outer structure has already been cleared. */
3541 static void
3542 store_constructor_field (target, bitsize, bitpos,
3543 mode, exp, type, cleared)
3544 rtx target;
3545 int bitsize, bitpos;
3546 enum machine_mode mode;
3547 tree exp, type;
3548 int cleared;
3550 if (TREE_CODE (exp) == CONSTRUCTOR
3551 && bitpos % BITS_PER_UNIT == 0
3552 /* If we have a non-zero bitpos for a register target, then we just
3553 let store_field do the bitfield handling. This is unlikely to
3554 generate unnecessary clear instructions anyways. */
3555 && (bitpos == 0 || GET_CODE (target) == MEM))
3557 if (bitpos != 0)
3558 target = change_address (target, VOIDmode,
3559 plus_constant (XEXP (target, 0),
3560 bitpos / BITS_PER_UNIT));
3561 store_constructor (exp, target, cleared);
3563 else
3564 store_field (target, bitsize, bitpos, mode, exp,
3565 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3566 int_size_in_bytes (type));
3569 /* Store the value of constructor EXP into the rtx TARGET.
3570 TARGET is either a REG or a MEM.
3571 CLEARED is true if TARGET is known to have been zero'd. */
3573 static void
3574 store_constructor (exp, target, cleared)
3575 tree exp;
3576 rtx target;
3577 int cleared;
3579 tree type = TREE_TYPE (exp);
3581 /* We know our target cannot conflict, since safe_from_p has been called. */
3582 #if 0
3583 /* Don't try copying piece by piece into a hard register
3584 since that is vulnerable to being clobbered by EXP.
3585 Instead, construct in a pseudo register and then copy it all. */
3586 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3588 rtx temp = gen_reg_rtx (GET_MODE (target));
3589 store_constructor (exp, temp, 0);
3590 emit_move_insn (target, temp);
3591 return;
3593 #endif
3595 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3596 || TREE_CODE (type) == QUAL_UNION_TYPE)
3598 register tree elt;
3600 /* Inform later passes that the whole union value is dead. */
3601 if (TREE_CODE (type) == UNION_TYPE
3602 || TREE_CODE (type) == QUAL_UNION_TYPE)
3603 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3605 /* If we are building a static constructor into a register,
3606 set the initial value as zero so we can fold the value into
3607 a constant. But if more than one register is involved,
3608 this probably loses. */
3609 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3610 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3612 if (! cleared)
3613 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3615 cleared = 1;
3618 /* If the constructor has fewer fields than the structure
3619 or if we are initializing the structure to mostly zeros,
3620 clear the whole structure first. */
3621 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3622 != list_length (TYPE_FIELDS (type)))
3623 || mostly_zeros_p (exp))
3625 if (! cleared)
3626 clear_storage (target, expr_size (exp),
3627 TYPE_ALIGN (type) / BITS_PER_UNIT);
3629 cleared = 1;
3631 else
3632 /* Inform later passes that the old value is dead. */
3633 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3635 /* Store each element of the constructor into
3636 the corresponding field of TARGET. */
3638 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3640 register tree field = TREE_PURPOSE (elt);
3641 register enum machine_mode mode;
3642 int bitsize;
3643 int bitpos = 0;
3644 int unsignedp;
3645 tree pos, constant = 0, offset = 0;
3646 rtx to_rtx = target;
3648 /* Just ignore missing fields.
3649 We cleared the whole structure, above,
3650 if any fields are missing. */
3651 if (field == 0)
3652 continue;
3654 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3655 continue;
3657 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3658 unsignedp = TREE_UNSIGNED (field);
3659 mode = DECL_MODE (field);
3660 if (DECL_BIT_FIELD (field))
3661 mode = VOIDmode;
3663 pos = DECL_FIELD_BITPOS (field);
3664 if (TREE_CODE (pos) == INTEGER_CST)
3665 constant = pos;
3666 else if (TREE_CODE (pos) == PLUS_EXPR
3667 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3668 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3669 else
3670 offset = pos;
3672 if (constant)
3673 bitpos = TREE_INT_CST_LOW (constant);
3675 if (offset)
3677 rtx offset_rtx;
3679 if (contains_placeholder_p (offset))
3680 offset = build (WITH_RECORD_EXPR, sizetype,
3681 offset, make_tree (TREE_TYPE (exp), target));
3683 offset = size_binop (FLOOR_DIV_EXPR, offset,
3684 size_int (BITS_PER_UNIT));
3686 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3687 if (GET_CODE (to_rtx) != MEM)
3688 abort ();
3690 if (GET_MODE (offset_rtx) != ptr_mode)
3692 #ifdef POINTERS_EXTEND_UNSIGNED
3693 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
3694 #else
3695 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3696 #endif
3699 to_rtx
3700 = change_address (to_rtx, VOIDmode,
3701 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3702 force_reg (ptr_mode, offset_rtx)));
3704 if (TREE_READONLY (field))
3706 if (GET_CODE (to_rtx) == MEM)
3707 to_rtx = copy_rtx (to_rtx);
3709 RTX_UNCHANGING_P (to_rtx) = 1;
3712 store_constructor_field (to_rtx, bitsize, bitpos,
3713 mode, TREE_VALUE (elt), type, cleared);
3716 else if (TREE_CODE (type) == ARRAY_TYPE)
3718 register tree elt;
3719 register int i;
3720 int need_to_clear;
3721 tree domain = TYPE_DOMAIN (type);
3722 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3723 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3724 tree elttype = TREE_TYPE (type);
3726 /* If the constructor has fewer elements than the array,
3727 clear the whole array first. Similarly if this is
3728 static constructor of a non-BLKmode object. */
3729 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3730 need_to_clear = 1;
3731 else
3733 HOST_WIDE_INT count = 0, zero_count = 0;
3734 need_to_clear = 0;
3735 /* This loop is a more accurate version of the loop in
3736 mostly_zeros_p (it handles RANGE_EXPR in an index).
3737 It is also needed to check for missing elements. */
3738 for (elt = CONSTRUCTOR_ELTS (exp);
3739 elt != NULL_TREE;
3740 elt = TREE_CHAIN (elt))
3742 tree index = TREE_PURPOSE (elt);
3743 HOST_WIDE_INT this_node_count;
3744 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3746 tree lo_index = TREE_OPERAND (index, 0);
3747 tree hi_index = TREE_OPERAND (index, 1);
3748 if (TREE_CODE (lo_index) != INTEGER_CST
3749 || TREE_CODE (hi_index) != INTEGER_CST)
3751 need_to_clear = 1;
3752 break;
3754 this_node_count = TREE_INT_CST_LOW (hi_index)
3755 - TREE_INT_CST_LOW (lo_index) + 1;
3757 else
3758 this_node_count = 1;
3759 count += this_node_count;
3760 if (mostly_zeros_p (TREE_VALUE (elt)))
3761 zero_count += this_node_count;
3763 /* Clear the entire array first if there are any missing elements,
3764 or if the incidence of zero elements is >= 75%. */
3765 if (count < maxelt - minelt + 1
3766 || 4 * zero_count >= 3 * count)
3767 need_to_clear = 1;
3769 if (need_to_clear)
3771 if (! cleared)
3772 clear_storage (target, expr_size (exp),
3773 TYPE_ALIGN (type) / BITS_PER_UNIT);
3774 cleared = 1;
3776 else
3777 /* Inform later passes that the old value is dead. */
3778 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3780 /* Store each element of the constructor into
3781 the corresponding element of TARGET, determined
3782 by counting the elements. */
3783 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3784 elt;
3785 elt = TREE_CHAIN (elt), i++)
3787 register enum machine_mode mode;
3788 int bitsize;
3789 int bitpos;
3790 int unsignedp;
3791 tree value = TREE_VALUE (elt);
3792 tree index = TREE_PURPOSE (elt);
3793 rtx xtarget = target;
3795 if (cleared && is_zeros_p (value))
3796 continue;
3798 mode = TYPE_MODE (elttype);
3799 bitsize = GET_MODE_BITSIZE (mode);
3800 unsignedp = TREE_UNSIGNED (elttype);
3802 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3804 tree lo_index = TREE_OPERAND (index, 0);
3805 tree hi_index = TREE_OPERAND (index, 1);
3806 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3807 struct nesting *loop;
3808 HOST_WIDE_INT lo, hi, count;
3809 tree position;
3811 /* If the range is constant and "small", unroll the loop. */
3812 if (TREE_CODE (lo_index) == INTEGER_CST
3813 && TREE_CODE (hi_index) == INTEGER_CST
3814 && (lo = TREE_INT_CST_LOW (lo_index),
3815 hi = TREE_INT_CST_LOW (hi_index),
3816 count = hi - lo + 1,
3817 (GET_CODE (target) != MEM
3818 || count <= 2
3819 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3820 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3821 <= 40 * 8))))
3823 lo -= minelt; hi -= minelt;
3824 for (; lo <= hi; lo++)
3826 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3827 store_constructor_field (target, bitsize, bitpos,
3828 mode, value, type, cleared);
3831 else
3833 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3834 loop_top = gen_label_rtx ();
3835 loop_end = gen_label_rtx ();
3837 unsignedp = TREE_UNSIGNED (domain);
3839 index = build_decl (VAR_DECL, NULL_TREE, domain);
3841 DECL_RTL (index) = index_r
3842 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3843 &unsignedp, 0));
3845 if (TREE_CODE (value) == SAVE_EXPR
3846 && SAVE_EXPR_RTL (value) == 0)
3848 /* Make sure value gets expanded once before the
3849 loop. */
3850 expand_expr (value, const0_rtx, VOIDmode, 0);
3851 emit_queue ();
3853 store_expr (lo_index, index_r, 0);
3854 loop = expand_start_loop (0);
3856 /* Assign value to element index. */
3857 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3858 size_int (BITS_PER_UNIT));
3859 position = size_binop (MULT_EXPR,
3860 size_binop (MINUS_EXPR, index,
3861 TYPE_MIN_VALUE (domain)),
3862 position);
3863 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3864 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3865 xtarget = change_address (target, mode, addr);
3866 if (TREE_CODE (value) == CONSTRUCTOR)
3867 store_constructor (value, xtarget, cleared);
3868 else
3869 store_expr (value, xtarget, 0);
3871 expand_exit_loop_if_false (loop,
3872 build (LT_EXPR, integer_type_node,
3873 index, hi_index));
3875 expand_increment (build (PREINCREMENT_EXPR,
3876 TREE_TYPE (index),
3877 index, integer_one_node), 0, 0);
3878 expand_end_loop ();
3879 emit_label (loop_end);
3881 /* Needed by stupid register allocation. to extend the
3882 lifetime of pseudo-regs used by target past the end
3883 of the loop. */
3884 emit_insn (gen_rtx_USE (GET_MODE (target), target));
3887 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3888 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3890 rtx pos_rtx, addr;
3891 tree position;
3893 if (index == 0)
3894 index = size_int (i);
3896 if (minelt)
3897 index = size_binop (MINUS_EXPR, index,
3898 TYPE_MIN_VALUE (domain));
3899 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3900 size_int (BITS_PER_UNIT));
3901 position = size_binop (MULT_EXPR, index, position);
3902 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3903 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3904 xtarget = change_address (target, mode, addr);
3905 store_expr (value, xtarget, 0);
3907 else
3909 if (index != 0)
3910 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3911 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3912 else
3913 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3914 store_constructor_field (target, bitsize, bitpos,
3915 mode, value, type, cleared);
3919 /* set constructor assignments */
3920 else if (TREE_CODE (type) == SET_TYPE)
3922 tree elt = CONSTRUCTOR_ELTS (exp);
3923 int nbytes = int_size_in_bytes (type), nbits;
3924 tree domain = TYPE_DOMAIN (type);
3925 tree domain_min, domain_max, bitlength;
3927 /* The default implementation strategy is to extract the constant
3928 parts of the constructor, use that to initialize the target,
3929 and then "or" in whatever non-constant ranges we need in addition.
3931 If a large set is all zero or all ones, it is
3932 probably better to set it using memset (if available) or bzero.
3933 Also, if a large set has just a single range, it may also be
3934 better to first clear all the first clear the set (using
3935 bzero/memset), and set the bits we want. */
3937 /* Check for all zeros. */
3938 if (elt == NULL_TREE)
3940 if (!cleared)
3941 clear_storage (target, expr_size (exp),
3942 TYPE_ALIGN (type) / BITS_PER_UNIT);
3943 return;
3946 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3947 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3948 bitlength = size_binop (PLUS_EXPR,
3949 size_binop (MINUS_EXPR, domain_max, domain_min),
3950 size_one_node);
3952 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3953 abort ();
3954 nbits = TREE_INT_CST_LOW (bitlength);
3956 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3957 are "complicated" (more than one range), initialize (the
3958 constant parts) by copying from a constant. */
3959 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3960 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3962 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3963 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3964 char *bit_buffer = (char *) alloca (nbits);
3965 HOST_WIDE_INT word = 0;
3966 int bit_pos = 0;
3967 int ibit = 0;
3968 int offset = 0; /* In bytes from beginning of set. */
3969 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3970 for (;;)
3972 if (bit_buffer[ibit])
3974 if (BYTES_BIG_ENDIAN)
3975 word |= (1 << (set_word_size - 1 - bit_pos));
3976 else
3977 word |= 1 << bit_pos;
3979 bit_pos++; ibit++;
3980 if (bit_pos >= set_word_size || ibit == nbits)
3982 if (word != 0 || ! cleared)
3984 rtx datum = GEN_INT (word);
3985 rtx to_rtx;
3986 /* The assumption here is that it is safe to use
3987 XEXP if the set is multi-word, but not if
3988 it's single-word. */
3989 if (GET_CODE (target) == MEM)
3991 to_rtx = plus_constant (XEXP (target, 0), offset);
3992 to_rtx = change_address (target, mode, to_rtx);
3994 else if (offset == 0)
3995 to_rtx = target;
3996 else
3997 abort ();
3998 emit_move_insn (to_rtx, datum);
4000 if (ibit == nbits)
4001 break;
4002 word = 0;
4003 bit_pos = 0;
4004 offset += set_word_size / BITS_PER_UNIT;
4008 else if (!cleared)
4010 /* Don't bother clearing storage if the set is all ones. */
4011 if (TREE_CHAIN (elt) != NULL_TREE
4012 || (TREE_PURPOSE (elt) == NULL_TREE
4013 ? nbits != 1
4014 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4015 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4016 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4017 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4018 != nbits))))
4019 clear_storage (target, expr_size (exp),
4020 TYPE_ALIGN (type) / BITS_PER_UNIT);
4023 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4025 /* start of range of element or NULL */
4026 tree startbit = TREE_PURPOSE (elt);
4027 /* end of range of element, or element value */
4028 tree endbit = TREE_VALUE (elt);
4029 #ifdef TARGET_MEM_FUNCTIONS
4030 HOST_WIDE_INT startb, endb;
4031 #endif
4032 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4034 bitlength_rtx = expand_expr (bitlength,
4035 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4037 /* handle non-range tuple element like [ expr ] */
4038 if (startbit == NULL_TREE)
4040 startbit = save_expr (endbit);
4041 endbit = startbit;
4043 startbit = convert (sizetype, startbit);
4044 endbit = convert (sizetype, endbit);
4045 if (! integer_zerop (domain_min))
4047 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4048 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4050 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4051 EXPAND_CONST_ADDRESS);
4052 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4053 EXPAND_CONST_ADDRESS);
4055 if (REG_P (target))
4057 targetx = assign_stack_temp (GET_MODE (target),
4058 GET_MODE_SIZE (GET_MODE (target)),
4060 emit_move_insn (targetx, target);
4062 else if (GET_CODE (target) == MEM)
4063 targetx = target;
4064 else
4065 abort ();
4067 #ifdef TARGET_MEM_FUNCTIONS
4068 /* Optimization: If startbit and endbit are
4069 constants divisible by BITS_PER_UNIT,
4070 call memset instead. */
4071 if (TREE_CODE (startbit) == INTEGER_CST
4072 && TREE_CODE (endbit) == INTEGER_CST
4073 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4074 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4076 emit_library_call (memset_libfunc, 0,
4077 VOIDmode, 3,
4078 plus_constant (XEXP (targetx, 0),
4079 startb / BITS_PER_UNIT),
4080 Pmode,
4081 constm1_rtx, TYPE_MODE (integer_type_node),
4082 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4083 TYPE_MODE (sizetype));
4085 else
4086 #endif
4088 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4089 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4090 bitlength_rtx, TYPE_MODE (sizetype),
4091 startbit_rtx, TYPE_MODE (sizetype),
4092 endbit_rtx, TYPE_MODE (sizetype));
4094 if (REG_P (target))
4095 emit_move_insn (target, targetx);
4099 else
4100 abort ();
4103 /* Store the value of EXP (an expression tree)
4104 into a subfield of TARGET which has mode MODE and occupies
4105 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4106 If MODE is VOIDmode, it means that we are storing into a bit-field.
4108 If VALUE_MODE is VOIDmode, return nothing in particular.
4109 UNSIGNEDP is not used in this case.
4111 Otherwise, return an rtx for the value stored. This rtx
4112 has mode VALUE_MODE if that is convenient to do.
4113 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4115 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4116 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4118 static rtx
4119 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4120 unsignedp, align, total_size)
4121 rtx target;
4122 int bitsize, bitpos;
4123 enum machine_mode mode;
4124 tree exp;
4125 enum machine_mode value_mode;
4126 int unsignedp;
4127 int align;
4128 int total_size;
4130 HOST_WIDE_INT width_mask = 0;
4132 if (TREE_CODE (exp) == ERROR_MARK)
4133 return const0_rtx;
4135 if (bitsize < HOST_BITS_PER_WIDE_INT)
4136 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4138 /* If we are storing into an unaligned field of an aligned union that is
4139 in a register, we may have the mode of TARGET being an integer mode but
4140 MODE == BLKmode. In that case, get an aligned object whose size and
4141 alignment are the same as TARGET and store TARGET into it (we can avoid
4142 the store if the field being stored is the entire width of TARGET). Then
4143 call ourselves recursively to store the field into a BLKmode version of
4144 that object. Finally, load from the object into TARGET. This is not
4145 very efficient in general, but should only be slightly more expensive
4146 than the otherwise-required unaligned accesses. Perhaps this can be
4147 cleaned up later. */
4149 if (mode == BLKmode
4150 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4152 rtx object = assign_stack_temp (GET_MODE (target),
4153 GET_MODE_SIZE (GET_MODE (target)), 0);
4154 rtx blk_object = copy_rtx (object);
4156 MEM_IN_STRUCT_P (object) = 1;
4157 MEM_IN_STRUCT_P (blk_object) = 1;
4158 PUT_MODE (blk_object, BLKmode);
4160 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4161 emit_move_insn (object, target);
4163 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4164 align, total_size);
4166 /* Even though we aren't returning target, we need to
4167 give it the updated value. */
4168 emit_move_insn (target, object);
4170 return blk_object;
4173 /* If the structure is in a register or if the component
4174 is a bit field, we cannot use addressing to access it.
4175 Use bit-field techniques or SUBREG to store in it. */
4177 if (mode == VOIDmode
4178 || (mode != BLKmode && ! direct_store[(int) mode])
4179 || GET_CODE (target) == REG
4180 || GET_CODE (target) == SUBREG
4181 /* If the field isn't aligned enough to store as an ordinary memref,
4182 store it as a bit field. */
4183 || (SLOW_UNALIGNED_ACCESS
4184 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4185 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4187 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4189 /* If BITSIZE is narrower than the size of the type of EXP
4190 we will be narrowing TEMP. Normally, what's wanted are the
4191 low-order bits. However, if EXP's type is a record and this is
4192 big-endian machine, we want the upper BITSIZE bits. */
4193 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4194 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4195 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4196 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4197 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4198 - bitsize),
4199 temp, 1);
4201 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4202 MODE. */
4203 if (mode != VOIDmode && mode != BLKmode
4204 && mode != TYPE_MODE (TREE_TYPE (exp)))
4205 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4207 /* If the modes of TARGET and TEMP are both BLKmode, both
4208 must be in memory and BITPOS must be aligned on a byte
4209 boundary. If so, we simply do a block copy. */
4210 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4212 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4213 || bitpos % BITS_PER_UNIT != 0)
4214 abort ();
4216 target = change_address (target, VOIDmode,
4217 plus_constant (XEXP (target, 0),
4218 bitpos / BITS_PER_UNIT));
4220 emit_block_move (target, temp,
4221 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4222 / BITS_PER_UNIT),
4225 return value_mode == VOIDmode ? const0_rtx : target;
4228 /* Store the value in the bitfield. */
4229 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4230 if (value_mode != VOIDmode)
4232 /* The caller wants an rtx for the value. */
4233 /* If possible, avoid refetching from the bitfield itself. */
4234 if (width_mask != 0
4235 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4237 tree count;
4238 enum machine_mode tmode;
4240 if (unsignedp)
4241 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4242 tmode = GET_MODE (temp);
4243 if (tmode == VOIDmode)
4244 tmode = value_mode;
4245 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4246 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4247 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4249 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4250 NULL_RTX, value_mode, 0, align,
4251 total_size);
4253 return const0_rtx;
4255 else
4257 rtx addr = XEXP (target, 0);
4258 rtx to_rtx;
4260 /* If a value is wanted, it must be the lhs;
4261 so make the address stable for multiple use. */
4263 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4264 && ! CONSTANT_ADDRESS_P (addr)
4265 /* A frame-pointer reference is already stable. */
4266 && ! (GET_CODE (addr) == PLUS
4267 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4268 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4269 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4270 addr = copy_to_reg (addr);
4272 /* Now build a reference to just the desired component. */
4274 to_rtx = copy_rtx (change_address (target, mode,
4275 plus_constant (addr,
4276 (bitpos
4277 / BITS_PER_UNIT))));
4278 MEM_IN_STRUCT_P (to_rtx) = 1;
4280 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4284 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4285 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4286 ARRAY_REFs and find the ultimate containing object, which we return.
4288 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4289 bit position, and *PUNSIGNEDP to the signedness of the field.
4290 If the position of the field is variable, we store a tree
4291 giving the variable offset (in units) in *POFFSET.
4292 This offset is in addition to the bit position.
4293 If the position is not variable, we store 0 in *POFFSET.
4294 We set *PALIGNMENT to the alignment in bytes of the address that will be
4295 computed. This is the alignment of the thing we return if *POFFSET
4296 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4298 If any of the extraction expressions is volatile,
4299 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4301 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4302 is a mode that can be used to access the field. In that case, *PBITSIZE
4303 is redundant.
4305 If the field describes a variable-sized object, *PMODE is set to
4306 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4307 this case, but the address of the object can be found. */
4309 tree
4310 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4311 punsignedp, pvolatilep, palignment)
4312 tree exp;
4313 int *pbitsize;
4314 int *pbitpos;
4315 tree *poffset;
4316 enum machine_mode *pmode;
4317 int *punsignedp;
4318 int *pvolatilep;
4319 int *palignment;
4321 tree orig_exp = exp;
4322 tree size_tree = 0;
4323 enum machine_mode mode = VOIDmode;
4324 tree offset = integer_zero_node;
4325 int alignment = BIGGEST_ALIGNMENT;
4327 if (TREE_CODE (exp) == COMPONENT_REF)
4329 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4330 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4331 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4332 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4334 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4336 size_tree = TREE_OPERAND (exp, 1);
4337 *punsignedp = TREE_UNSIGNED (exp);
4339 else
4341 mode = TYPE_MODE (TREE_TYPE (exp));
4342 *pbitsize = GET_MODE_BITSIZE (mode);
4343 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4346 if (size_tree)
4348 if (TREE_CODE (size_tree) != INTEGER_CST)
4349 mode = BLKmode, *pbitsize = -1;
4350 else
4351 *pbitsize = TREE_INT_CST_LOW (size_tree);
4354 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4355 and find the ultimate containing object. */
4357 *pbitpos = 0;
4359 while (1)
4361 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4363 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4364 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4365 : TREE_OPERAND (exp, 2));
4366 tree constant = integer_zero_node, var = pos;
4368 /* If this field hasn't been filled in yet, don't go
4369 past it. This should only happen when folding expressions
4370 made during type construction. */
4371 if (pos == 0)
4372 break;
4374 /* Assume here that the offset is a multiple of a unit.
4375 If not, there should be an explicitly added constant. */
4376 if (TREE_CODE (pos) == PLUS_EXPR
4377 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4378 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4379 else if (TREE_CODE (pos) == INTEGER_CST)
4380 constant = pos, var = integer_zero_node;
4382 *pbitpos += TREE_INT_CST_LOW (constant);
4383 offset = size_binop (PLUS_EXPR, offset,
4384 size_binop (EXACT_DIV_EXPR, var,
4385 size_int (BITS_PER_UNIT)));
4388 else if (TREE_CODE (exp) == ARRAY_REF)
4390 /* This code is based on the code in case ARRAY_REF in expand_expr
4391 below. We assume here that the size of an array element is
4392 always an integral multiple of BITS_PER_UNIT. */
4394 tree index = TREE_OPERAND (exp, 1);
4395 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4396 tree low_bound
4397 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4398 tree index_type = TREE_TYPE (index);
4399 tree xindex;
4401 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4403 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4404 index);
4405 index_type = TREE_TYPE (index);
4408 if (! integer_zerop (low_bound))
4409 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4411 if (TREE_CODE (index) == INTEGER_CST)
4413 index = convert (sbitsizetype, index);
4414 index_type = TREE_TYPE (index);
4417 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4418 convert (sbitsizetype,
4419 TYPE_SIZE (TREE_TYPE (exp)))));
4421 if (TREE_CODE (xindex) == INTEGER_CST
4422 && TREE_INT_CST_HIGH (xindex) == 0)
4423 *pbitpos += TREE_INT_CST_LOW (xindex);
4424 else
4426 /* Either the bit offset calculated above is not constant, or
4427 it overflowed. In either case, redo the multiplication
4428 against the size in units. This is especially important
4429 in the non-constant case to avoid a division at runtime. */
4430 xindex = fold (build (MULT_EXPR, ssizetype, index,
4431 convert (ssizetype,
4432 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4434 if (contains_placeholder_p (xindex))
4435 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4437 offset = size_binop (PLUS_EXPR, offset, xindex);
4440 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4441 && ! ((TREE_CODE (exp) == NOP_EXPR
4442 || TREE_CODE (exp) == CONVERT_EXPR)
4443 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4444 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4445 != UNION_TYPE))
4446 && (TYPE_MODE (TREE_TYPE (exp))
4447 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4448 break;
4450 /* If any reference in the chain is volatile, the effect is volatile. */
4451 if (TREE_THIS_VOLATILE (exp))
4452 *pvolatilep = 1;
4454 /* If the offset is non-constant already, then we can't assume any
4455 alignment more than the alignment here. */
4456 if (! integer_zerop (offset))
4457 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4459 exp = TREE_OPERAND (exp, 0);
4462 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4463 alignment = MIN (alignment, DECL_ALIGN (exp));
4464 else if (TREE_TYPE (exp) != 0)
4465 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4467 if (integer_zerop (offset))
4468 offset = 0;
4470 if (offset != 0 && contains_placeholder_p (offset))
4471 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4473 *pmode = mode;
4474 *poffset = offset;
4475 *palignment = alignment / BITS_PER_UNIT;
4476 return exp;
4479 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4480 static enum memory_use_mode
4481 get_memory_usage_from_modifier (modifier)
4482 enum expand_modifier modifier;
4484 switch (modifier)
4486 case EXPAND_NORMAL:
4487 case EXPAND_SUM:
4488 return MEMORY_USE_RO;
4489 break;
4490 case EXPAND_MEMORY_USE_WO:
4491 return MEMORY_USE_WO;
4492 break;
4493 case EXPAND_MEMORY_USE_RW:
4494 return MEMORY_USE_RW;
4495 break;
4496 case EXPAND_MEMORY_USE_DONT:
4497 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4498 MEMORY_USE_DONT, because they are modifiers to a call of
4499 expand_expr in the ADDR_EXPR case of expand_expr. */
4500 case EXPAND_CONST_ADDRESS:
4501 case EXPAND_INITIALIZER:
4502 return MEMORY_USE_DONT;
4503 case EXPAND_MEMORY_USE_BAD:
4504 default:
4505 abort ();
4509 /* Given an rtx VALUE that may contain additions and multiplications,
4510 return an equivalent value that just refers to a register or memory.
4511 This is done by generating instructions to perform the arithmetic
4512 and returning a pseudo-register containing the value.
4514 The returned value may be a REG, SUBREG, MEM or constant. */
4517 force_operand (value, target)
4518 rtx value, target;
4520 register optab binoptab = 0;
4521 /* Use a temporary to force order of execution of calls to
4522 `force_operand'. */
4523 rtx tmp;
4524 register rtx op2;
4525 /* Use subtarget as the target for operand 0 of a binary operation. */
4526 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4528 /* Check for a PIC address load. */
4529 if (flag_pic
4530 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4531 && XEXP (value, 0) == pic_offset_table_rtx
4532 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4533 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4534 || GET_CODE (XEXP (value, 1)) == CONST))
4536 if (!subtarget)
4537 subtarget = gen_reg_rtx (GET_MODE (value));
4538 emit_move_insn (subtarget, value);
4539 return subtarget;
4542 if (GET_CODE (value) == PLUS)
4543 binoptab = add_optab;
4544 else if (GET_CODE (value) == MINUS)
4545 binoptab = sub_optab;
4546 else if (GET_CODE (value) == MULT)
4548 op2 = XEXP (value, 1);
4549 if (!CONSTANT_P (op2)
4550 && !(GET_CODE (op2) == REG && op2 != subtarget))
4551 subtarget = 0;
4552 tmp = force_operand (XEXP (value, 0), subtarget);
4553 return expand_mult (GET_MODE (value), tmp,
4554 force_operand (op2, NULL_RTX),
4555 target, 0);
4558 if (binoptab)
4560 op2 = XEXP (value, 1);
4561 if (!CONSTANT_P (op2)
4562 && !(GET_CODE (op2) == REG && op2 != subtarget))
4563 subtarget = 0;
4564 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4566 binoptab = add_optab;
4567 op2 = negate_rtx (GET_MODE (value), op2);
4570 /* Check for an addition with OP2 a constant integer and our first
4571 operand a PLUS of a virtual register and something else. In that
4572 case, we want to emit the sum of the virtual register and the
4573 constant first and then add the other value. This allows virtual
4574 register instantiation to simply modify the constant rather than
4575 creating another one around this addition. */
4576 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4577 && GET_CODE (XEXP (value, 0)) == PLUS
4578 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4579 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4580 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4582 rtx temp = expand_binop (GET_MODE (value), binoptab,
4583 XEXP (XEXP (value, 0), 0), op2,
4584 subtarget, 0, OPTAB_LIB_WIDEN);
4585 return expand_binop (GET_MODE (value), binoptab, temp,
4586 force_operand (XEXP (XEXP (value, 0), 1), 0),
4587 target, 0, OPTAB_LIB_WIDEN);
4590 tmp = force_operand (XEXP (value, 0), subtarget);
4591 return expand_binop (GET_MODE (value), binoptab, tmp,
4592 force_operand (op2, NULL_RTX),
4593 target, 0, OPTAB_LIB_WIDEN);
4594 /* We give UNSIGNEDP = 0 to expand_binop
4595 because the only operations we are expanding here are signed ones. */
4597 return value;
4600 /* Subroutine of expand_expr:
4601 save the non-copied parts (LIST) of an expr (LHS), and return a list
4602 which can restore these values to their previous values,
4603 should something modify their storage. */
4605 static tree
4606 save_noncopied_parts (lhs, list)
4607 tree lhs;
4608 tree list;
4610 tree tail;
4611 tree parts = 0;
4613 for (tail = list; tail; tail = TREE_CHAIN (tail))
4614 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4615 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4616 else
4618 tree part = TREE_VALUE (tail);
4619 tree part_type = TREE_TYPE (part);
4620 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4621 rtx target = assign_temp (part_type, 0, 1, 1);
4622 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4623 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4624 parts = tree_cons (to_be_saved,
4625 build (RTL_EXPR, part_type, NULL_TREE,
4626 (tree) target),
4627 parts);
4628 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4630 return parts;
4633 /* Subroutine of expand_expr:
4634 record the non-copied parts (LIST) of an expr (LHS), and return a list
4635 which specifies the initial values of these parts. */
4637 static tree
4638 init_noncopied_parts (lhs, list)
4639 tree lhs;
4640 tree list;
4642 tree tail;
4643 tree parts = 0;
4645 for (tail = list; tail; tail = TREE_CHAIN (tail))
4646 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4647 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4648 else
4650 tree part = TREE_VALUE (tail);
4651 tree part_type = TREE_TYPE (part);
4652 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4653 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4655 return parts;
4658 /* Subroutine of expand_expr: return nonzero iff there is no way that
4659 EXP can reference X, which is being modified. TOP_P is nonzero if this
4660 call is going to be used to determine whether we need a temporary
4661 for EXP, as opposed to a recursive call to this function.
4663 It is always safe for this routine to return zero since it merely
4664 searches for optimization opportunities. */
4666 static int
4667 safe_from_p (x, exp, top_p)
4668 rtx x;
4669 tree exp;
4670 int top_p;
4672 rtx exp_rtl = 0;
4673 int i, nops;
4674 static int save_expr_count;
4675 static int save_expr_size = 0;
4676 static tree *save_expr_rewritten;
4677 static tree save_expr_trees[256];
4679 if (x == 0
4680 /* If EXP has varying size, we MUST use a target since we currently
4681 have no way of allocating temporaries of variable size
4682 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4683 So we assume here that something at a higher level has prevented a
4684 clash. This is somewhat bogus, but the best we can do. Only
4685 do this when X is BLKmode and when we are at the top level. */
4686 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4687 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4688 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4689 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4690 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4691 != INTEGER_CST)
4692 && GET_MODE (x) == BLKmode))
4693 return 1;
4695 if (top_p && save_expr_size == 0)
4697 int rtn;
4699 save_expr_count = 0;
4700 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
4701 save_expr_rewritten = &save_expr_trees[0];
4703 rtn = safe_from_p (x, exp, 1);
4705 for (i = 0; i < save_expr_count; ++i)
4707 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
4708 abort ();
4709 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
4712 save_expr_size = 0;
4714 return rtn;
4717 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4718 find the underlying pseudo. */
4719 if (GET_CODE (x) == SUBREG)
4721 x = SUBREG_REG (x);
4722 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4723 return 0;
4726 /* If X is a location in the outgoing argument area, it is always safe. */
4727 if (GET_CODE (x) == MEM
4728 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4729 || (GET_CODE (XEXP (x, 0)) == PLUS
4730 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4731 return 1;
4733 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4735 case 'd':
4736 exp_rtl = DECL_RTL (exp);
4737 break;
4739 case 'c':
4740 return 1;
4742 case 'x':
4743 if (TREE_CODE (exp) == TREE_LIST)
4744 return ((TREE_VALUE (exp) == 0
4745 || safe_from_p (x, TREE_VALUE (exp), 0))
4746 && (TREE_CHAIN (exp) == 0
4747 || safe_from_p (x, TREE_CHAIN (exp), 0)));
4748 else if (TREE_CODE (exp) == ERROR_MARK)
4749 return 1; /* An already-visited SAVE_EXPR? */
4750 else
4751 return 0;
4753 case '1':
4754 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4756 case '2':
4757 case '<':
4758 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4759 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
4761 case 'e':
4762 case 'r':
4763 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4764 the expression. If it is set, we conflict iff we are that rtx or
4765 both are in memory. Otherwise, we check all operands of the
4766 expression recursively. */
4768 switch (TREE_CODE (exp))
4770 case ADDR_EXPR:
4771 return (staticp (TREE_OPERAND (exp, 0))
4772 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4773 || TREE_STATIC (exp));
4775 case INDIRECT_REF:
4776 if (GET_CODE (x) == MEM)
4777 return 0;
4778 break;
4780 case CALL_EXPR:
4781 exp_rtl = CALL_EXPR_RTL (exp);
4782 if (exp_rtl == 0)
4784 /* Assume that the call will clobber all hard registers and
4785 all of memory. */
4786 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4787 || GET_CODE (x) == MEM)
4788 return 0;
4791 break;
4793 case RTL_EXPR:
4794 /* If a sequence exists, we would have to scan every instruction
4795 in the sequence to see if it was safe. This is probably not
4796 worthwhile. */
4797 if (RTL_EXPR_SEQUENCE (exp))
4798 return 0;
4800 exp_rtl = RTL_EXPR_RTL (exp);
4801 break;
4803 case WITH_CLEANUP_EXPR:
4804 exp_rtl = RTL_EXPR_RTL (exp);
4805 break;
4807 case CLEANUP_POINT_EXPR:
4808 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4810 case SAVE_EXPR:
4811 exp_rtl = SAVE_EXPR_RTL (exp);
4812 if (exp_rtl)
4813 break;
4815 /* This SAVE_EXPR might appear many times in the top-level
4816 safe_from_p() expression, and if it has a complex
4817 subexpression, examining it multiple times could result
4818 in a combinatorial explosion. E.g. on an Alpha
4819 running at least 200MHz, a Fortran test case compiled with
4820 optimization took about 28 minutes to compile -- even though
4821 it was only a few lines long, and the complicated line causing
4822 so much time to be spent in the earlier version of safe_from_p()
4823 had only 293 or so unique nodes.
4825 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
4826 where it is so we can turn it back in the top-level safe_from_p()
4827 when we're done. */
4829 /* For now, don't bother re-sizing the array. */
4830 if (save_expr_count >= save_expr_size)
4831 return 0;
4832 save_expr_rewritten[save_expr_count++] = exp;
4833 TREE_SET_CODE (exp, ERROR_MARK);
4835 nops = tree_code_length[(int) SAVE_EXPR];
4836 for (i = 0; i < nops; i++)
4837 if (TREE_OPERAND (exp, i) != 0
4838 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
4839 return 0;
4840 return 1;
4842 case BIND_EXPR:
4843 /* The only operand we look at is operand 1. The rest aren't
4844 part of the expression. */
4845 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
4847 case METHOD_CALL_EXPR:
4848 /* This takes a rtx argument, but shouldn't appear here. */
4849 abort ();
4851 default:
4852 break;
4855 /* If we have an rtx, we do not need to scan our operands. */
4856 if (exp_rtl)
4857 break;
4859 nops = tree_code_length[(int) TREE_CODE (exp)];
4860 for (i = 0; i < nops; i++)
4861 if (TREE_OPERAND (exp, i) != 0
4862 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
4863 return 0;
4866 /* If we have an rtl, find any enclosed object. Then see if we conflict
4867 with it. */
4868 if (exp_rtl)
4870 if (GET_CODE (exp_rtl) == SUBREG)
4872 exp_rtl = SUBREG_REG (exp_rtl);
4873 if (GET_CODE (exp_rtl) == REG
4874 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4875 return 0;
4878 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4879 are memory and EXP is not readonly. */
4880 return ! (rtx_equal_p (x, exp_rtl)
4881 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4882 && ! TREE_READONLY (exp)));
4885 /* If we reach here, it is safe. */
4886 return 1;
4889 /* Subroutine of expand_expr: return nonzero iff EXP is an
4890 expression whose type is statically determinable. */
4892 static int
4893 fixed_type_p (exp)
4894 tree exp;
4896 if (TREE_CODE (exp) == PARM_DECL
4897 || TREE_CODE (exp) == VAR_DECL
4898 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4899 || TREE_CODE (exp) == COMPONENT_REF
4900 || TREE_CODE (exp) == ARRAY_REF)
4901 return 1;
4902 return 0;
4905 /* Subroutine of expand_expr: return rtx if EXP is a
4906 variable or parameter; else return 0. */
4908 static rtx
4909 var_rtx (exp)
4910 tree exp;
4912 STRIP_NOPS (exp);
4913 switch (TREE_CODE (exp))
4915 case PARM_DECL:
4916 case VAR_DECL:
4917 return DECL_RTL (exp);
4918 default:
4919 return 0;
4923 /* expand_expr: generate code for computing expression EXP.
4924 An rtx for the computed value is returned. The value is never null.
4925 In the case of a void EXP, const0_rtx is returned.
4927 The value may be stored in TARGET if TARGET is nonzero.
4928 TARGET is just a suggestion; callers must assume that
4929 the rtx returned may not be the same as TARGET.
4931 If TARGET is CONST0_RTX, it means that the value will be ignored.
4933 If TMODE is not VOIDmode, it suggests generating the
4934 result in mode TMODE. But this is done only when convenient.
4935 Otherwise, TMODE is ignored and the value generated in its natural mode.
4936 TMODE is just a suggestion; callers must assume that
4937 the rtx returned may not have mode TMODE.
4939 Note that TARGET may have neither TMODE nor MODE. In that case, it
4940 probably will not be used.
4942 If MODIFIER is EXPAND_SUM then when EXP is an addition
4943 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4944 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4945 products as above, or REG or MEM, or constant.
4946 Ordinarily in such cases we would output mul or add instructions
4947 and then return a pseudo reg containing the sum.
4949 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4950 it also marks a label as absolutely required (it can't be dead).
4951 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4952 This is used for outputting expressions used in initializers.
4954 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4955 with a constant address even if that address is not normally legitimate.
4956 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4959 expand_expr (exp, target, tmode, modifier)
4960 register tree exp;
4961 rtx target;
4962 enum machine_mode tmode;
4963 enum expand_modifier modifier;
4965 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4966 This is static so it will be accessible to our recursive callees. */
4967 static tree placeholder_list = 0;
4968 register rtx op0, op1, temp;
4969 tree type = TREE_TYPE (exp);
4970 int unsignedp = TREE_UNSIGNED (type);
4971 register enum machine_mode mode = TYPE_MODE (type);
4972 register enum tree_code code = TREE_CODE (exp);
4973 optab this_optab;
4974 /* Use subtarget as the target for operand 0 of a binary operation. */
4975 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4976 rtx original_target = target;
4977 int ignore = (target == const0_rtx
4978 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4979 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4980 || code == COND_EXPR)
4981 && TREE_CODE (type) == VOID_TYPE));
4982 tree context;
4983 /* Used by check-memory-usage to make modifier read only. */
4984 enum expand_modifier ro_modifier;
4986 /* Make a read-only version of the modifier. */
4987 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4988 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4989 ro_modifier = modifier;
4990 else
4991 ro_modifier = EXPAND_NORMAL;
4993 /* Don't use hard regs as subtargets, because the combiner
4994 can only handle pseudo regs. */
4995 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4996 subtarget = 0;
4997 /* Avoid subtargets inside loops,
4998 since they hide some invariant expressions. */
4999 if (preserve_subexpressions_p ())
5000 subtarget = 0;
5002 /* If we are going to ignore this result, we need only do something
5003 if there is a side-effect somewhere in the expression. If there
5004 is, short-circuit the most common cases here. Note that we must
5005 not call expand_expr with anything but const0_rtx in case this
5006 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5008 if (ignore)
5010 if (! TREE_SIDE_EFFECTS (exp))
5011 return const0_rtx;
5013 /* Ensure we reference a volatile object even if value is ignored. */
5014 if (TREE_THIS_VOLATILE (exp)
5015 && TREE_CODE (exp) != FUNCTION_DECL
5016 && mode != VOIDmode && mode != BLKmode)
5018 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5019 if (GET_CODE (temp) == MEM)
5020 temp = copy_to_reg (temp);
5021 return const0_rtx;
5024 if (TREE_CODE_CLASS (code) == '1')
5025 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5026 VOIDmode, ro_modifier);
5027 else if (TREE_CODE_CLASS (code) == '2'
5028 || TREE_CODE_CLASS (code) == '<')
5030 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5031 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5032 return const0_rtx;
5034 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5035 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5036 /* If the second operand has no side effects, just evaluate
5037 the first. */
5038 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5039 VOIDmode, ro_modifier);
5041 target = 0;
5044 /* If will do cse, generate all results into pseudo registers
5045 since 1) that allows cse to find more things
5046 and 2) otherwise cse could produce an insn the machine
5047 cannot support. */
5049 if (! cse_not_expected && mode != BLKmode && target
5050 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5051 target = subtarget;
5053 switch (code)
5055 case LABEL_DECL:
5057 tree function = decl_function_context (exp);
5058 /* Handle using a label in a containing function. */
5059 if (function != current_function_decl
5060 && function != inline_function_decl && function != 0)
5062 struct function *p = find_function_data (function);
5063 /* Allocate in the memory associated with the function
5064 that the label is in. */
5065 push_obstacks (p->function_obstack,
5066 p->function_maybepermanent_obstack);
5068 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5069 label_rtx (exp),
5070 p->forced_labels);
5071 pop_obstacks ();
5073 else if (modifier == EXPAND_INITIALIZER)
5074 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5075 label_rtx (exp), forced_labels);
5076 temp = gen_rtx_MEM (FUNCTION_MODE,
5077 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5078 if (function != current_function_decl
5079 && function != inline_function_decl && function != 0)
5080 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5081 return temp;
5084 case PARM_DECL:
5085 if (DECL_RTL (exp) == 0)
5087 error_with_decl (exp, "prior parameter's size depends on `%s'");
5088 return CONST0_RTX (mode);
5091 /* ... fall through ... */
5093 case VAR_DECL:
5094 /* If a static var's type was incomplete when the decl was written,
5095 but the type is complete now, lay out the decl now. */
5096 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5097 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5099 push_obstacks_nochange ();
5100 end_temporary_allocation ();
5101 layout_decl (exp, 0);
5102 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5103 pop_obstacks ();
5106 /* Only check automatic variables. Currently, function arguments are
5107 not checked (this can be done at compile-time with prototypes).
5108 Aggregates are not checked. */
5109 if (flag_check_memory_usage && code == VAR_DECL
5110 && GET_CODE (DECL_RTL (exp)) == MEM
5111 && DECL_CONTEXT (exp) != NULL_TREE
5112 && ! TREE_STATIC (exp)
5113 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5115 enum memory_use_mode memory_usage;
5116 memory_usage = get_memory_usage_from_modifier (modifier);
5118 if (memory_usage != MEMORY_USE_DONT)
5119 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5120 XEXP (DECL_RTL (exp), 0), ptr_mode,
5121 GEN_INT (int_size_in_bytes (type)),
5122 TYPE_MODE (sizetype),
5123 GEN_INT (memory_usage),
5124 TYPE_MODE (integer_type_node));
5127 /* ... fall through ... */
5129 case FUNCTION_DECL:
5130 case RESULT_DECL:
5131 if (DECL_RTL (exp) == 0)
5132 abort ();
5134 /* Ensure variable marked as used even if it doesn't go through
5135 a parser. If it hasn't be used yet, write out an external
5136 definition. */
5137 if (! TREE_USED (exp))
5139 assemble_external (exp);
5140 TREE_USED (exp) = 1;
5143 /* Show we haven't gotten RTL for this yet. */
5144 temp = 0;
5146 /* Handle variables inherited from containing functions. */
5147 context = decl_function_context (exp);
5149 /* We treat inline_function_decl as an alias for the current function
5150 because that is the inline function whose vars, types, etc.
5151 are being merged into the current function.
5152 See expand_inline_function. */
5154 if (context != 0 && context != current_function_decl
5155 && context != inline_function_decl
5156 /* If var is static, we don't need a static chain to access it. */
5157 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5158 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5160 rtx addr;
5162 /* Mark as non-local and addressable. */
5163 DECL_NONLOCAL (exp) = 1;
5164 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5165 abort ();
5166 mark_addressable (exp);
5167 if (GET_CODE (DECL_RTL (exp)) != MEM)
5168 abort ();
5169 addr = XEXP (DECL_RTL (exp), 0);
5170 if (GET_CODE (addr) == MEM)
5171 addr = gen_rtx_MEM (Pmode,
5172 fix_lexical_addr (XEXP (addr, 0), exp));
5173 else
5174 addr = fix_lexical_addr (addr, exp);
5175 temp = change_address (DECL_RTL (exp), mode, addr);
5178 /* This is the case of an array whose size is to be determined
5179 from its initializer, while the initializer is still being parsed.
5180 See expand_decl. */
5182 else if (GET_CODE (DECL_RTL (exp)) == MEM
5183 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5184 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5185 XEXP (DECL_RTL (exp), 0));
5187 /* If DECL_RTL is memory, we are in the normal case and either
5188 the address is not valid or it is not a register and -fforce-addr
5189 is specified, get the address into a register. */
5191 else if (GET_CODE (DECL_RTL (exp)) == MEM
5192 && modifier != EXPAND_CONST_ADDRESS
5193 && modifier != EXPAND_SUM
5194 && modifier != EXPAND_INITIALIZER
5195 && (! memory_address_p (DECL_MODE (exp),
5196 XEXP (DECL_RTL (exp), 0))
5197 || (flag_force_addr
5198 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5199 temp = change_address (DECL_RTL (exp), VOIDmode,
5200 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5202 /* If we got something, return it. But first, set the alignment
5203 the address is a register. */
5204 if (temp != 0)
5206 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5207 mark_reg_pointer (XEXP (temp, 0),
5208 DECL_ALIGN (exp) / BITS_PER_UNIT);
5210 return temp;
5213 /* If the mode of DECL_RTL does not match that of the decl, it
5214 must be a promoted value. We return a SUBREG of the wanted mode,
5215 but mark it so that we know that it was already extended. */
5217 if (GET_CODE (DECL_RTL (exp)) == REG
5218 && GET_MODE (DECL_RTL (exp)) != mode)
5220 /* Get the signedness used for this variable. Ensure we get the
5221 same mode we got when the variable was declared. */
5222 if (GET_MODE (DECL_RTL (exp))
5223 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5224 abort ();
5226 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5227 SUBREG_PROMOTED_VAR_P (temp) = 1;
5228 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5229 return temp;
5232 return DECL_RTL (exp);
5234 case INTEGER_CST:
5235 return immed_double_const (TREE_INT_CST_LOW (exp),
5236 TREE_INT_CST_HIGH (exp),
5237 mode);
5239 case CONST_DECL:
5240 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5241 EXPAND_MEMORY_USE_BAD);
5243 case REAL_CST:
5244 /* If optimized, generate immediate CONST_DOUBLE
5245 which will be turned into memory by reload if necessary.
5247 We used to force a register so that loop.c could see it. But
5248 this does not allow gen_* patterns to perform optimizations with
5249 the constants. It also produces two insns in cases like "x = 1.0;".
5250 On most machines, floating-point constants are not permitted in
5251 many insns, so we'd end up copying it to a register in any case.
5253 Now, we do the copying in expand_binop, if appropriate. */
5254 return immed_real_const (exp);
5256 case COMPLEX_CST:
5257 case STRING_CST:
5258 if (! TREE_CST_RTL (exp))
5259 output_constant_def (exp);
5261 /* TREE_CST_RTL probably contains a constant address.
5262 On RISC machines where a constant address isn't valid,
5263 make some insns to get that address into a register. */
5264 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5265 && modifier != EXPAND_CONST_ADDRESS
5266 && modifier != EXPAND_INITIALIZER
5267 && modifier != EXPAND_SUM
5268 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5269 || (flag_force_addr
5270 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5271 return change_address (TREE_CST_RTL (exp), VOIDmode,
5272 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5273 return TREE_CST_RTL (exp);
5275 case EXPR_WITH_FILE_LOCATION:
5277 rtx to_return;
5278 char *saved_input_filename = input_filename;
5279 int saved_lineno = lineno;
5280 input_filename = EXPR_WFL_FILENAME (exp);
5281 lineno = EXPR_WFL_LINENO (exp);
5282 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5283 emit_line_note (input_filename, lineno);
5284 /* Possibly avoid switching back and force here */
5285 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5286 input_filename = saved_input_filename;
5287 lineno = saved_lineno;
5288 return to_return;
5291 case SAVE_EXPR:
5292 context = decl_function_context (exp);
5294 /* If this SAVE_EXPR was at global context, assume we are an
5295 initialization function and move it into our context. */
5296 if (context == 0)
5297 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5299 /* We treat inline_function_decl as an alias for the current function
5300 because that is the inline function whose vars, types, etc.
5301 are being merged into the current function.
5302 See expand_inline_function. */
5303 if (context == current_function_decl || context == inline_function_decl)
5304 context = 0;
5306 /* If this is non-local, handle it. */
5307 if (context)
5309 /* The following call just exists to abort if the context is
5310 not of a containing function. */
5311 find_function_data (context);
5313 temp = SAVE_EXPR_RTL (exp);
5314 if (temp && GET_CODE (temp) == REG)
5316 put_var_into_stack (exp);
5317 temp = SAVE_EXPR_RTL (exp);
5319 if (temp == 0 || GET_CODE (temp) != MEM)
5320 abort ();
5321 return change_address (temp, mode,
5322 fix_lexical_addr (XEXP (temp, 0), exp));
5324 if (SAVE_EXPR_RTL (exp) == 0)
5326 if (mode == VOIDmode)
5327 temp = const0_rtx;
5328 else
5329 temp = assign_temp (type, 3, 0, 0);
5331 SAVE_EXPR_RTL (exp) = temp;
5332 if (!optimize && GET_CODE (temp) == REG)
5333 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5334 save_expr_regs);
5336 /* If the mode of TEMP does not match that of the expression, it
5337 must be a promoted value. We pass store_expr a SUBREG of the
5338 wanted mode but mark it so that we know that it was already
5339 extended. Note that `unsignedp' was modified above in
5340 this case. */
5342 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5344 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5345 SUBREG_PROMOTED_VAR_P (temp) = 1;
5346 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5349 if (temp == const0_rtx)
5350 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5351 EXPAND_MEMORY_USE_BAD);
5352 else
5353 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5355 TREE_USED (exp) = 1;
5358 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5359 must be a promoted value. We return a SUBREG of the wanted mode,
5360 but mark it so that we know that it was already extended. */
5362 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5363 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5365 /* Compute the signedness and make the proper SUBREG. */
5366 promote_mode (type, mode, &unsignedp, 0);
5367 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5368 SUBREG_PROMOTED_VAR_P (temp) = 1;
5369 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5370 return temp;
5373 return SAVE_EXPR_RTL (exp);
5375 case UNSAVE_EXPR:
5377 rtx temp;
5378 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5379 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5380 return temp;
5383 case PLACEHOLDER_EXPR:
5385 tree placeholder_expr;
5387 /* If there is an object on the head of the placeholder list,
5388 see if some object in it of type TYPE or a pointer to it. For
5389 further information, see tree.def. */
5390 for (placeholder_expr = placeholder_list;
5391 placeholder_expr != 0;
5392 placeholder_expr = TREE_CHAIN (placeholder_expr))
5394 tree need_type = TYPE_MAIN_VARIANT (type);
5395 tree object = 0;
5396 tree old_list = placeholder_list;
5397 tree elt;
5399 /* Find the outermost reference that is of the type we want.
5400 If none, see if any object has a type that is a pointer to
5401 the type we want. */
5402 for (elt = TREE_PURPOSE (placeholder_expr);
5403 elt != 0 && object == 0;
5405 = ((TREE_CODE (elt) == COMPOUND_EXPR
5406 || TREE_CODE (elt) == COND_EXPR)
5407 ? TREE_OPERAND (elt, 1)
5408 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5409 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5410 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5411 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5412 ? TREE_OPERAND (elt, 0) : 0))
5413 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5414 object = elt;
5416 for (elt = TREE_PURPOSE (placeholder_expr);
5417 elt != 0 && object == 0;
5419 = ((TREE_CODE (elt) == COMPOUND_EXPR
5420 || TREE_CODE (elt) == COND_EXPR)
5421 ? TREE_OPERAND (elt, 1)
5422 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5423 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5424 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5425 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5426 ? TREE_OPERAND (elt, 0) : 0))
5427 if (POINTER_TYPE_P (TREE_TYPE (elt))
5428 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5429 == need_type))
5430 object = build1 (INDIRECT_REF, need_type, elt);
5432 if (object != 0)
5434 /* Expand this object skipping the list entries before
5435 it was found in case it is also a PLACEHOLDER_EXPR.
5436 In that case, we want to translate it using subsequent
5437 entries. */
5438 placeholder_list = TREE_CHAIN (placeholder_expr);
5439 temp = expand_expr (object, original_target, tmode,
5440 ro_modifier);
5441 placeholder_list = old_list;
5442 return temp;
5447 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5448 abort ();
5450 case WITH_RECORD_EXPR:
5451 /* Put the object on the placeholder list, expand our first operand,
5452 and pop the list. */
5453 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5454 placeholder_list);
5455 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5456 tmode, ro_modifier);
5457 placeholder_list = TREE_CHAIN (placeholder_list);
5458 return target;
5460 case EXIT_EXPR:
5461 expand_exit_loop_if_false (NULL_PTR,
5462 invert_truthvalue (TREE_OPERAND (exp, 0)));
5463 return const0_rtx;
5465 case LOOP_EXPR:
5466 push_temp_slots ();
5467 expand_start_loop (1);
5468 expand_expr_stmt (TREE_OPERAND (exp, 0));
5469 expand_end_loop ();
5470 pop_temp_slots ();
5472 return const0_rtx;
5474 case BIND_EXPR:
5476 tree vars = TREE_OPERAND (exp, 0);
5477 int vars_need_expansion = 0;
5479 /* Need to open a binding contour here because
5480 if there are any cleanups they must be contained here. */
5481 expand_start_bindings (0);
5483 /* Mark the corresponding BLOCK for output in its proper place. */
5484 if (TREE_OPERAND (exp, 2) != 0
5485 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5486 insert_block (TREE_OPERAND (exp, 2));
5488 /* If VARS have not yet been expanded, expand them now. */
5489 while (vars)
5491 if (DECL_RTL (vars) == 0)
5493 vars_need_expansion = 1;
5494 expand_decl (vars);
5496 expand_decl_init (vars);
5497 vars = TREE_CHAIN (vars);
5500 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5502 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5504 return temp;
5507 case RTL_EXPR:
5508 if (RTL_EXPR_SEQUENCE (exp))
5510 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5511 abort ();
5512 emit_insns (RTL_EXPR_SEQUENCE (exp));
5513 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5515 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5516 free_temps_for_rtl_expr (exp);
5517 return RTL_EXPR_RTL (exp);
5519 case CONSTRUCTOR:
5520 /* If we don't need the result, just ensure we evaluate any
5521 subexpressions. */
5522 if (ignore)
5524 tree elt;
5525 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5526 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5527 EXPAND_MEMORY_USE_BAD);
5528 return const0_rtx;
5531 /* All elts simple constants => refer to a constant in memory. But
5532 if this is a non-BLKmode mode, let it store a field at a time
5533 since that should make a CONST_INT or CONST_DOUBLE when we
5534 fold. Likewise, if we have a target we can use, it is best to
5535 store directly into the target unless the type is large enough
5536 that memcpy will be used. If we are making an initializer and
5537 all operands are constant, put it in memory as well. */
5538 else if ((TREE_STATIC (exp)
5539 && ((mode == BLKmode
5540 && ! (target != 0 && safe_from_p (target, exp, 1)))
5541 || TREE_ADDRESSABLE (exp)
5542 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5543 && (move_by_pieces_ninsns
5544 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5545 TYPE_ALIGN (type) / BITS_PER_UNIT)
5546 > MOVE_RATIO)
5547 && ! mostly_zeros_p (exp))))
5548 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5550 rtx constructor = output_constant_def (exp);
5551 if (modifier != EXPAND_CONST_ADDRESS
5552 && modifier != EXPAND_INITIALIZER
5553 && modifier != EXPAND_SUM
5554 && (! memory_address_p (GET_MODE (constructor),
5555 XEXP (constructor, 0))
5556 || (flag_force_addr
5557 && GET_CODE (XEXP (constructor, 0)) != REG)))
5558 constructor = change_address (constructor, VOIDmode,
5559 XEXP (constructor, 0));
5560 return constructor;
5563 else
5565 /* Handle calls that pass values in multiple non-contiguous
5566 locations. The Irix 6 ABI has examples of this. */
5567 if (target == 0 || ! safe_from_p (target, exp, 1)
5568 || GET_CODE (target) == PARALLEL)
5570 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5571 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5572 else
5573 target = assign_temp (type, 0, 1, 1);
5576 if (TREE_READONLY (exp))
5578 if (GET_CODE (target) == MEM)
5579 target = copy_rtx (target);
5581 RTX_UNCHANGING_P (target) = 1;
5584 store_constructor (exp, target, 0);
5585 return target;
5588 case INDIRECT_REF:
5590 tree exp1 = TREE_OPERAND (exp, 0);
5591 tree exp2;
5592 tree index;
5593 tree string = string_constant (exp1, &index);
5594 int i;
5596 /* Try to optimize reads from const strings. */
5597 if (string
5598 && TREE_CODE (string) == STRING_CST
5599 && TREE_CODE (index) == INTEGER_CST
5600 && !TREE_INT_CST_HIGH (index)
5601 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5602 && GET_MODE_CLASS (mode) == MODE_INT
5603 && GET_MODE_SIZE (mode) == 1
5604 && modifier != EXPAND_MEMORY_USE_WO)
5605 return GEN_INT (TREE_STRING_POINTER (string)[i]);
5607 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5608 op0 = memory_address (mode, op0);
5610 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5612 enum memory_use_mode memory_usage;
5613 memory_usage = get_memory_usage_from_modifier (modifier);
5615 if (memory_usage != MEMORY_USE_DONT)
5617 in_check_memory_usage = 1;
5618 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5619 op0, ptr_mode,
5620 GEN_INT (int_size_in_bytes (type)),
5621 TYPE_MODE (sizetype),
5622 GEN_INT (memory_usage),
5623 TYPE_MODE (integer_type_node));
5624 in_check_memory_usage = 0;
5628 temp = gen_rtx_MEM (mode, op0);
5629 /* If address was computed by addition,
5630 mark this as an element of an aggregate. */
5631 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5632 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5633 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5634 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5635 || (TREE_CODE (exp1) == ADDR_EXPR
5636 && (exp2 = TREE_OPERAND (exp1, 0))
5637 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5638 MEM_IN_STRUCT_P (temp) = 1;
5639 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5641 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5642 here, because, in C and C++, the fact that a location is accessed
5643 through a pointer to const does not mean that the value there can
5644 never change. Languages where it can never change should
5645 also set TREE_STATIC. */
5646 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5647 return temp;
5650 case ARRAY_REF:
5651 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5652 abort ();
5655 tree array = TREE_OPERAND (exp, 0);
5656 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5657 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5658 tree index = TREE_OPERAND (exp, 1);
5659 tree index_type = TREE_TYPE (index);
5660 HOST_WIDE_INT i;
5662 /* Optimize the special-case of a zero lower bound.
5664 We convert the low_bound to sizetype to avoid some problems
5665 with constant folding. (E.g. suppose the lower bound is 1,
5666 and its mode is QI. Without the conversion, (ARRAY
5667 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5668 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5670 But sizetype isn't quite right either (especially if
5671 the lowbound is negative). FIXME */
5673 if (! integer_zerop (low_bound))
5674 index = fold (build (MINUS_EXPR, index_type, index,
5675 convert (sizetype, low_bound)));
5677 /* Fold an expression like: "foo"[2].
5678 This is not done in fold so it won't happen inside &.
5679 Don't fold if this is for wide characters since it's too
5680 difficult to do correctly and this is a very rare case. */
5682 if (TREE_CODE (array) == STRING_CST
5683 && TREE_CODE (index) == INTEGER_CST
5684 && !TREE_INT_CST_HIGH (index)
5685 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5686 && GET_MODE_CLASS (mode) == MODE_INT
5687 && GET_MODE_SIZE (mode) == 1)
5688 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5690 /* If this is a constant index into a constant array,
5691 just get the value from the array. Handle both the cases when
5692 we have an explicit constructor and when our operand is a variable
5693 that was declared const. */
5695 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5697 if (TREE_CODE (index) == INTEGER_CST
5698 && TREE_INT_CST_HIGH (index) == 0)
5700 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5702 i = TREE_INT_CST_LOW (index);
5703 while (elem && i--)
5704 elem = TREE_CHAIN (elem);
5705 if (elem)
5706 return expand_expr (fold (TREE_VALUE (elem)), target,
5707 tmode, ro_modifier);
5711 else if (optimize >= 1
5712 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5713 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5714 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5716 if (TREE_CODE (index) == INTEGER_CST)
5718 tree init = DECL_INITIAL (array);
5720 i = TREE_INT_CST_LOW (index);
5721 if (TREE_CODE (init) == CONSTRUCTOR)
5723 tree elem = CONSTRUCTOR_ELTS (init);
5725 while (elem
5726 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5727 elem = TREE_CHAIN (elem);
5728 if (elem)
5729 return expand_expr (fold (TREE_VALUE (elem)), target,
5730 tmode, ro_modifier);
5732 else if (TREE_CODE (init) == STRING_CST
5733 && TREE_INT_CST_HIGH (index) == 0
5734 && (TREE_INT_CST_LOW (index)
5735 < TREE_STRING_LENGTH (init)))
5736 return (GEN_INT
5737 (TREE_STRING_POINTER
5738 (init)[TREE_INT_CST_LOW (index)]));
5743 /* ... fall through ... */
5745 case COMPONENT_REF:
5746 case BIT_FIELD_REF:
5747 /* If the operand is a CONSTRUCTOR, we can just extract the
5748 appropriate field if it is present. Don't do this if we have
5749 already written the data since we want to refer to that copy
5750 and varasm.c assumes that's what we'll do. */
5751 if (code != ARRAY_REF
5752 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5753 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5755 tree elt;
5757 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5758 elt = TREE_CHAIN (elt))
5759 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5760 /* We can normally use the value of the field in the
5761 CONSTRUCTOR. However, if this is a bitfield in
5762 an integral mode that we can fit in a HOST_WIDE_INT,
5763 we must mask only the number of bits in the bitfield,
5764 since this is done implicitly by the constructor. If
5765 the bitfield does not meet either of those conditions,
5766 we can't do this optimization. */
5767 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5768 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5769 == MODE_INT)
5770 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5771 <= HOST_BITS_PER_WIDE_INT))))
5773 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5774 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5776 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5778 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5780 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5781 op0 = expand_and (op0, op1, target);
5783 else
5785 enum machine_mode imode
5786 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5787 tree count
5788 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
5791 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5792 target, 0);
5793 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5794 target, 0);
5798 return op0;
5803 enum machine_mode mode1;
5804 int bitsize;
5805 int bitpos;
5806 tree offset;
5807 int volatilep = 0;
5808 int alignment;
5809 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5810 &mode1, &unsignedp, &volatilep,
5811 &alignment);
5813 /* If we got back the original object, something is wrong. Perhaps
5814 we are evaluating an expression too early. In any event, don't
5815 infinitely recurse. */
5816 if (tem == exp)
5817 abort ();
5819 /* If TEM's type is a union of variable size, pass TARGET to the inner
5820 computation, since it will need a temporary and TARGET is known
5821 to have to do. This occurs in unchecked conversion in Ada. */
5823 op0 = expand_expr (tem,
5824 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5825 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5826 != INTEGER_CST)
5827 ? target : NULL_RTX),
5828 VOIDmode,
5829 modifier == EXPAND_INITIALIZER
5830 ? modifier : EXPAND_NORMAL);
5832 /* If this is a constant, put it into a register if it is a
5833 legitimate constant and memory if it isn't. */
5834 if (CONSTANT_P (op0))
5836 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5837 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5838 op0 = force_reg (mode, op0);
5839 else
5840 op0 = validize_mem (force_const_mem (mode, op0));
5843 if (offset != 0)
5845 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5847 if (GET_CODE (op0) != MEM)
5848 abort ();
5850 if (GET_MODE (offset_rtx) != ptr_mode)
5852 #ifdef POINTERS_EXTEND_UNSIGNED
5853 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5854 #else
5855 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5856 #endif
5859 op0 = change_address (op0, VOIDmode,
5860 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5861 force_reg (ptr_mode, offset_rtx)));
5864 /* Don't forget about volatility even if this is a bitfield. */
5865 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5867 op0 = copy_rtx (op0);
5868 MEM_VOLATILE_P (op0) = 1;
5871 /* Check the access. */
5872 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5874 enum memory_use_mode memory_usage;
5875 memory_usage = get_memory_usage_from_modifier (modifier);
5877 if (memory_usage != MEMORY_USE_DONT)
5879 rtx to;
5880 int size;
5882 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5883 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5885 /* Check the access right of the pointer. */
5886 if (size > BITS_PER_UNIT)
5887 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5888 to, ptr_mode,
5889 GEN_INT (size / BITS_PER_UNIT),
5890 TYPE_MODE (sizetype),
5891 GEN_INT (memory_usage),
5892 TYPE_MODE (integer_type_node));
5896 /* In cases where an aligned union has an unaligned object
5897 as a field, we might be extracting a BLKmode value from
5898 an integer-mode (e.g., SImode) object. Handle this case
5899 by doing the extract into an object as wide as the field
5900 (which we know to be the width of a basic mode), then
5901 storing into memory, and changing the mode to BLKmode.
5902 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5903 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5904 if (mode1 == VOIDmode
5905 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5906 || (modifier != EXPAND_CONST_ADDRESS
5907 && modifier != EXPAND_INITIALIZER
5908 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5909 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5910 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5911 /* If the field isn't aligned enough to fetch as a memref,
5912 fetch it as a bit field. */
5913 || (SLOW_UNALIGNED_ACCESS
5914 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5915 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5917 enum machine_mode ext_mode = mode;
5919 if (ext_mode == BLKmode)
5920 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5922 if (ext_mode == BLKmode)
5924 /* In this case, BITPOS must start at a byte boundary and
5925 TARGET, if specified, must be a MEM. */
5926 if (GET_CODE (op0) != MEM
5927 || (target != 0 && GET_CODE (target) != MEM)
5928 || bitpos % BITS_PER_UNIT != 0)
5929 abort ();
5931 op0 = change_address (op0, VOIDmode,
5932 plus_constant (XEXP (op0, 0),
5933 bitpos / BITS_PER_UNIT));
5934 if (target == 0)
5935 target = assign_temp (type, 0, 1, 1);
5937 emit_block_move (target, op0,
5938 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5939 / BITS_PER_UNIT),
5942 return target;
5945 op0 = validize_mem (op0);
5947 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5948 mark_reg_pointer (XEXP (op0, 0), alignment);
5950 op0 = extract_bit_field (op0, bitsize, bitpos,
5951 unsignedp, target, ext_mode, ext_mode,
5952 alignment,
5953 int_size_in_bytes (TREE_TYPE (tem)));
5955 /* If the result is a record type and BITSIZE is narrower than
5956 the mode of OP0, an integral mode, and this is a big endian
5957 machine, we must put the field into the high-order bits. */
5958 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5959 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5960 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5961 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5962 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5963 - bitsize),
5964 op0, 1);
5966 if (mode == BLKmode)
5968 rtx new = assign_stack_temp (ext_mode,
5969 bitsize / BITS_PER_UNIT, 0);
5971 emit_move_insn (new, op0);
5972 op0 = copy_rtx (new);
5973 PUT_MODE (op0, BLKmode);
5974 MEM_IN_STRUCT_P (op0) = 1;
5977 return op0;
5980 /* If the result is BLKmode, use that to access the object
5981 now as well. */
5982 if (mode == BLKmode)
5983 mode1 = BLKmode;
5985 /* Get a reference to just this component. */
5986 if (modifier == EXPAND_CONST_ADDRESS
5987 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5988 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5989 (bitpos / BITS_PER_UNIT)));
5990 else
5991 op0 = change_address (op0, mode1,
5992 plus_constant (XEXP (op0, 0),
5993 (bitpos / BITS_PER_UNIT)));
5994 if (GET_CODE (XEXP (op0, 0)) == REG)
5995 mark_reg_pointer (XEXP (op0, 0), alignment);
5997 MEM_IN_STRUCT_P (op0) = 1;
5998 MEM_VOLATILE_P (op0) |= volatilep;
5999 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6000 || modifier == EXPAND_CONST_ADDRESS
6001 || modifier == EXPAND_INITIALIZER)
6002 return op0;
6003 else if (target == 0)
6004 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6006 convert_move (target, op0, unsignedp);
6007 return target;
6010 /* Intended for a reference to a buffer of a file-object in Pascal.
6011 But it's not certain that a special tree code will really be
6012 necessary for these. INDIRECT_REF might work for them. */
6013 case BUFFER_REF:
6014 abort ();
6016 case IN_EXPR:
6018 /* Pascal set IN expression.
6020 Algorithm:
6021 rlo = set_low - (set_low%bits_per_word);
6022 the_word = set [ (index - rlo)/bits_per_word ];
6023 bit_index = index % bits_per_word;
6024 bitmask = 1 << bit_index;
6025 return !!(the_word & bitmask); */
6027 tree set = TREE_OPERAND (exp, 0);
6028 tree index = TREE_OPERAND (exp, 1);
6029 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6030 tree set_type = TREE_TYPE (set);
6031 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6032 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6033 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6034 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6035 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6036 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6037 rtx setaddr = XEXP (setval, 0);
6038 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6039 rtx rlow;
6040 rtx diff, quo, rem, addr, bit, result;
6042 preexpand_calls (exp);
6044 /* If domain is empty, answer is no. Likewise if index is constant
6045 and out of bounds. */
6046 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6047 && TREE_CODE (set_low_bound) == INTEGER_CST
6048 && tree_int_cst_lt (set_high_bound, set_low_bound))
6049 || (TREE_CODE (index) == INTEGER_CST
6050 && TREE_CODE (set_low_bound) == INTEGER_CST
6051 && tree_int_cst_lt (index, set_low_bound))
6052 || (TREE_CODE (set_high_bound) == INTEGER_CST
6053 && TREE_CODE (index) == INTEGER_CST
6054 && tree_int_cst_lt (set_high_bound, index))))
6055 return const0_rtx;
6057 if (target == 0)
6058 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6060 /* If we get here, we have to generate the code for both cases
6061 (in range and out of range). */
6063 op0 = gen_label_rtx ();
6064 op1 = gen_label_rtx ();
6066 if (! (GET_CODE (index_val) == CONST_INT
6067 && GET_CODE (lo_r) == CONST_INT))
6069 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6070 GET_MODE (index_val), iunsignedp, 0);
6071 emit_jump_insn (gen_blt (op1));
6074 if (! (GET_CODE (index_val) == CONST_INT
6075 && GET_CODE (hi_r) == CONST_INT))
6077 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6078 GET_MODE (index_val), iunsignedp, 0);
6079 emit_jump_insn (gen_bgt (op1));
6082 /* Calculate the element number of bit zero in the first word
6083 of the set. */
6084 if (GET_CODE (lo_r) == CONST_INT)
6085 rlow = GEN_INT (INTVAL (lo_r)
6086 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6087 else
6088 rlow = expand_binop (index_mode, and_optab, lo_r,
6089 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6090 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6092 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6093 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6095 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6096 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6097 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6098 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6100 addr = memory_address (byte_mode,
6101 expand_binop (index_mode, add_optab, diff,
6102 setaddr, NULL_RTX, iunsignedp,
6103 OPTAB_LIB_WIDEN));
6105 /* Extract the bit we want to examine */
6106 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6107 gen_rtx_MEM (byte_mode, addr),
6108 make_tree (TREE_TYPE (index), rem),
6109 NULL_RTX, 1);
6110 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6111 GET_MODE (target) == byte_mode ? target : 0,
6112 1, OPTAB_LIB_WIDEN);
6114 if (result != target)
6115 convert_move (target, result, 1);
6117 /* Output the code to handle the out-of-range case. */
6118 emit_jump (op0);
6119 emit_label (op1);
6120 emit_move_insn (target, const0_rtx);
6121 emit_label (op0);
6122 return target;
6125 case WITH_CLEANUP_EXPR:
6126 if (RTL_EXPR_RTL (exp) == 0)
6128 RTL_EXPR_RTL (exp)
6129 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6130 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6132 /* That's it for this cleanup. */
6133 TREE_OPERAND (exp, 2) = 0;
6135 return RTL_EXPR_RTL (exp);
6137 case CLEANUP_POINT_EXPR:
6139 extern int temp_slot_level;
6140 /* Start a new binding layer that will keep track of all cleanup
6141 actions to be performed. */
6142 expand_start_bindings (0);
6144 target_temp_slot_level = temp_slot_level;
6146 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6147 /* If we're going to use this value, load it up now. */
6148 if (! ignore)
6149 op0 = force_not_mem (op0);
6150 preserve_temp_slots (op0);
6151 expand_end_bindings (NULL_TREE, 0, 0);
6153 return op0;
6155 case CALL_EXPR:
6156 /* Check for a built-in function. */
6157 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6158 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6159 == FUNCTION_DECL)
6160 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6161 return expand_builtin (exp, target, subtarget, tmode, ignore);
6163 /* If this call was expanded already by preexpand_calls,
6164 just return the result we got. */
6165 if (CALL_EXPR_RTL (exp) != 0)
6166 return CALL_EXPR_RTL (exp);
6168 return expand_call (exp, target, ignore);
6170 case NON_LVALUE_EXPR:
6171 case NOP_EXPR:
6172 case CONVERT_EXPR:
6173 case REFERENCE_EXPR:
6174 if (TREE_CODE (type) == UNION_TYPE)
6176 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6177 if (target == 0)
6179 if (mode != BLKmode)
6180 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6181 else
6182 target = assign_temp (type, 0, 1, 1);
6185 if (GET_CODE (target) == MEM)
6186 /* Store data into beginning of memory target. */
6187 store_expr (TREE_OPERAND (exp, 0),
6188 change_address (target, TYPE_MODE (valtype), 0), 0);
6190 else if (GET_CODE (target) == REG)
6191 /* Store this field into a union of the proper type. */
6192 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6193 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6194 VOIDmode, 0, 1,
6195 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6196 else
6197 abort ();
6199 /* Return the entire union. */
6200 return target;
6203 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6205 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6206 ro_modifier);
6208 /* If the signedness of the conversion differs and OP0 is
6209 a promoted SUBREG, clear that indication since we now
6210 have to do the proper extension. */
6211 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6212 && GET_CODE (op0) == SUBREG)
6213 SUBREG_PROMOTED_VAR_P (op0) = 0;
6215 return op0;
6218 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6219 if (GET_MODE (op0) == mode)
6220 return op0;
6222 /* If OP0 is a constant, just convert it into the proper mode. */
6223 if (CONSTANT_P (op0))
6224 return
6225 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6226 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6228 if (modifier == EXPAND_INITIALIZER)
6229 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6231 if (target == 0)
6232 return
6233 convert_to_mode (mode, op0,
6234 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6235 else
6236 convert_move (target, op0,
6237 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6238 return target;
6240 case PLUS_EXPR:
6241 /* We come here from MINUS_EXPR when the second operand is a
6242 constant. */
6243 plus_expr:
6244 this_optab = add_optab;
6246 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6247 something else, make sure we add the register to the constant and
6248 then to the other thing. This case can occur during strength
6249 reduction and doing it this way will produce better code if the
6250 frame pointer or argument pointer is eliminated.
6252 fold-const.c will ensure that the constant is always in the inner
6253 PLUS_EXPR, so the only case we need to do anything about is if
6254 sp, ap, or fp is our second argument, in which case we must swap
6255 the innermost first argument and our second argument. */
6257 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6258 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6259 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6260 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6261 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6262 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6264 tree t = TREE_OPERAND (exp, 1);
6266 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6267 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6270 /* If the result is to be ptr_mode and we are adding an integer to
6271 something, we might be forming a constant. So try to use
6272 plus_constant. If it produces a sum and we can't accept it,
6273 use force_operand. This allows P = &ARR[const] to generate
6274 efficient code on machines where a SYMBOL_REF is not a valid
6275 address.
6277 If this is an EXPAND_SUM call, always return the sum. */
6278 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6279 || mode == ptr_mode)
6281 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6282 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6283 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6285 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6286 EXPAND_SUM);
6287 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6288 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6289 op1 = force_operand (op1, target);
6290 return op1;
6293 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6294 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6295 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6297 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6298 EXPAND_SUM);
6299 if (! CONSTANT_P (op0))
6301 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6302 VOIDmode, modifier);
6303 /* Don't go to both_summands if modifier
6304 says it's not right to return a PLUS. */
6305 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6306 goto binop2;
6307 goto both_summands;
6309 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6310 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6311 op0 = force_operand (op0, target);
6312 return op0;
6316 /* No sense saving up arithmetic to be done
6317 if it's all in the wrong mode to form part of an address.
6318 And force_operand won't know whether to sign-extend or
6319 zero-extend. */
6320 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6321 || mode != ptr_mode)
6322 goto binop;
6324 preexpand_calls (exp);
6325 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6326 subtarget = 0;
6328 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6329 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6331 both_summands:
6332 /* Make sure any term that's a sum with a constant comes last. */
6333 if (GET_CODE (op0) == PLUS
6334 && CONSTANT_P (XEXP (op0, 1)))
6336 temp = op0;
6337 op0 = op1;
6338 op1 = temp;
6340 /* If adding to a sum including a constant,
6341 associate it to put the constant outside. */
6342 if (GET_CODE (op1) == PLUS
6343 && CONSTANT_P (XEXP (op1, 1)))
6345 rtx constant_term = const0_rtx;
6347 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6348 if (temp != 0)
6349 op0 = temp;
6350 /* Ensure that MULT comes first if there is one. */
6351 else if (GET_CODE (op0) == MULT)
6352 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6353 else
6354 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6356 /* Let's also eliminate constants from op0 if possible. */
6357 op0 = eliminate_constant_term (op0, &constant_term);
6359 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6360 their sum should be a constant. Form it into OP1, since the
6361 result we want will then be OP0 + OP1. */
6363 temp = simplify_binary_operation (PLUS, mode, constant_term,
6364 XEXP (op1, 1));
6365 if (temp != 0)
6366 op1 = temp;
6367 else
6368 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6371 /* Put a constant term last and put a multiplication first. */
6372 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6373 temp = op1, op1 = op0, op0 = temp;
6375 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6376 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6378 case MINUS_EXPR:
6379 /* For initializers, we are allowed to return a MINUS of two
6380 symbolic constants. Here we handle all cases when both operands
6381 are constant. */
6382 /* Handle difference of two symbolic constants,
6383 for the sake of an initializer. */
6384 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6385 && really_constant_p (TREE_OPERAND (exp, 0))
6386 && really_constant_p (TREE_OPERAND (exp, 1)))
6388 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6389 VOIDmode, ro_modifier);
6390 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6391 VOIDmode, ro_modifier);
6393 /* If the last operand is a CONST_INT, use plus_constant of
6394 the negated constant. Else make the MINUS. */
6395 if (GET_CODE (op1) == CONST_INT)
6396 return plus_constant (op0, - INTVAL (op1));
6397 else
6398 return gen_rtx_MINUS (mode, op0, op1);
6400 /* Convert A - const to A + (-const). */
6401 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6403 tree negated = fold (build1 (NEGATE_EXPR, type,
6404 TREE_OPERAND (exp, 1)));
6406 /* Deal with the case where we can't negate the constant
6407 in TYPE. */
6408 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6410 tree newtype = signed_type (type);
6411 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6412 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6413 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6415 if (! TREE_OVERFLOW (newneg))
6416 return expand_expr (convert (type,
6417 build (PLUS_EXPR, newtype,
6418 newop0, newneg)),
6419 target, tmode, ro_modifier);
6421 else
6423 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6424 goto plus_expr;
6427 this_optab = sub_optab;
6428 goto binop;
6430 case MULT_EXPR:
6431 preexpand_calls (exp);
6432 /* If first operand is constant, swap them.
6433 Thus the following special case checks need only
6434 check the second operand. */
6435 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6437 register tree t1 = TREE_OPERAND (exp, 0);
6438 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6439 TREE_OPERAND (exp, 1) = t1;
6442 /* Attempt to return something suitable for generating an
6443 indexed address, for machines that support that. */
6445 if (modifier == EXPAND_SUM && mode == ptr_mode
6446 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6447 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6449 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6450 EXPAND_SUM);
6452 /* Apply distributive law if OP0 is x+c. */
6453 if (GET_CODE (op0) == PLUS
6454 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6455 return gen_rtx_PLUS (mode,
6456 gen_rtx_MULT (mode, XEXP (op0, 0),
6457 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6458 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6459 * INTVAL (XEXP (op0, 1))));
6461 if (GET_CODE (op0) != REG)
6462 op0 = force_operand (op0, NULL_RTX);
6463 if (GET_CODE (op0) != REG)
6464 op0 = copy_to_mode_reg (mode, op0);
6466 return gen_rtx_MULT (mode, op0,
6467 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6470 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6471 subtarget = 0;
6473 /* Check for multiplying things that have been extended
6474 from a narrower type. If this machine supports multiplying
6475 in that narrower type with a result in the desired type,
6476 do it that way, and avoid the explicit type-conversion. */
6477 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6478 && TREE_CODE (type) == INTEGER_TYPE
6479 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6480 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6481 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6482 && int_fits_type_p (TREE_OPERAND (exp, 1),
6483 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6484 /* Don't use a widening multiply if a shift will do. */
6485 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6486 > HOST_BITS_PER_WIDE_INT)
6487 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6489 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6490 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6492 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6493 /* If both operands are extended, they must either both
6494 be zero-extended or both be sign-extended. */
6495 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6497 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6499 enum machine_mode innermode
6500 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6501 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6502 ? smul_widen_optab : umul_widen_optab);
6503 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6504 ? umul_widen_optab : smul_widen_optab);
6505 if (mode == GET_MODE_WIDER_MODE (innermode))
6507 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6509 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6510 NULL_RTX, VOIDmode, 0);
6511 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6512 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6513 VOIDmode, 0);
6514 else
6515 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6516 NULL_RTX, VOIDmode, 0);
6517 goto binop2;
6519 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6520 && innermode == word_mode)
6522 rtx htem;
6523 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6524 NULL_RTX, VOIDmode, 0);
6525 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6526 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6527 VOIDmode, 0);
6528 else
6529 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6530 NULL_RTX, VOIDmode, 0);
6531 temp = expand_binop (mode, other_optab, op0, op1, target,
6532 unsignedp, OPTAB_LIB_WIDEN);
6533 htem = expand_mult_highpart_adjust (innermode,
6534 gen_highpart (innermode, temp),
6535 op0, op1,
6536 gen_highpart (innermode, temp),
6537 unsignedp);
6538 emit_move_insn (gen_highpart (innermode, temp), htem);
6539 return temp;
6543 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6544 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6545 return expand_mult (mode, op0, op1, target, unsignedp);
6547 case TRUNC_DIV_EXPR:
6548 case FLOOR_DIV_EXPR:
6549 case CEIL_DIV_EXPR:
6550 case ROUND_DIV_EXPR:
6551 case EXACT_DIV_EXPR:
6552 preexpand_calls (exp);
6553 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6554 subtarget = 0;
6555 /* Possible optimization: compute the dividend with EXPAND_SUM
6556 then if the divisor is constant can optimize the case
6557 where some terms of the dividend have coeffs divisible by it. */
6558 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6559 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6560 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6562 case RDIV_EXPR:
6563 this_optab = flodiv_optab;
6564 goto binop;
6566 case TRUNC_MOD_EXPR:
6567 case FLOOR_MOD_EXPR:
6568 case CEIL_MOD_EXPR:
6569 case ROUND_MOD_EXPR:
6570 preexpand_calls (exp);
6571 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6572 subtarget = 0;
6573 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6574 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6575 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6577 case FIX_ROUND_EXPR:
6578 case FIX_FLOOR_EXPR:
6579 case FIX_CEIL_EXPR:
6580 abort (); /* Not used for C. */
6582 case FIX_TRUNC_EXPR:
6583 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6584 if (target == 0)
6585 target = gen_reg_rtx (mode);
6586 expand_fix (target, op0, unsignedp);
6587 return target;
6589 case FLOAT_EXPR:
6590 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6591 if (target == 0)
6592 target = gen_reg_rtx (mode);
6593 /* expand_float can't figure out what to do if FROM has VOIDmode.
6594 So give it the correct mode. With -O, cse will optimize this. */
6595 if (GET_MODE (op0) == VOIDmode)
6596 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6597 op0);
6598 expand_float (target, op0,
6599 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6600 return target;
6602 case NEGATE_EXPR:
6603 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6604 temp = expand_unop (mode, neg_optab, op0, target, 0);
6605 if (temp == 0)
6606 abort ();
6607 return temp;
6609 case ABS_EXPR:
6610 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6612 /* Handle complex values specially. */
6613 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6614 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6615 return expand_complex_abs (mode, op0, target, unsignedp);
6617 /* Unsigned abs is simply the operand. Testing here means we don't
6618 risk generating incorrect code below. */
6619 if (TREE_UNSIGNED (type))
6620 return op0;
6622 return expand_abs (mode, op0, target, unsignedp,
6623 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
6625 case MAX_EXPR:
6626 case MIN_EXPR:
6627 target = original_target;
6628 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
6629 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6630 || GET_MODE (target) != mode
6631 || (GET_CODE (target) == REG
6632 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6633 target = gen_reg_rtx (mode);
6634 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6635 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6637 /* First try to do it with a special MIN or MAX instruction.
6638 If that does not win, use a conditional jump to select the proper
6639 value. */
6640 this_optab = (TREE_UNSIGNED (type)
6641 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6642 : (code == MIN_EXPR ? smin_optab : smax_optab));
6644 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6645 OPTAB_WIDEN);
6646 if (temp != 0)
6647 return temp;
6649 /* At this point, a MEM target is no longer useful; we will get better
6650 code without it. */
6652 if (GET_CODE (target) == MEM)
6653 target = gen_reg_rtx (mode);
6655 if (target != op0)
6656 emit_move_insn (target, op0);
6658 op0 = gen_label_rtx ();
6660 /* If this mode is an integer too wide to compare properly,
6661 compare word by word. Rely on cse to optimize constant cases. */
6662 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6664 if (code == MAX_EXPR)
6665 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6666 target, op1, NULL_RTX, op0);
6667 else
6668 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6669 op1, target, NULL_RTX, op0);
6670 emit_move_insn (target, op1);
6672 else
6674 if (code == MAX_EXPR)
6675 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6676 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6677 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6678 else
6679 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6680 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6681 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6682 if (temp == const0_rtx)
6683 emit_move_insn (target, op1);
6684 else if (temp != const_true_rtx)
6686 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6687 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6688 else
6689 abort ();
6690 emit_move_insn (target, op1);
6693 emit_label (op0);
6694 return target;
6696 case BIT_NOT_EXPR:
6697 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6698 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6699 if (temp == 0)
6700 abort ();
6701 return temp;
6703 case FFS_EXPR:
6704 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6705 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6706 if (temp == 0)
6707 abort ();
6708 return temp;
6710 /* ??? Can optimize bitwise operations with one arg constant.
6711 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6712 and (a bitwise1 b) bitwise2 b (etc)
6713 but that is probably not worth while. */
6715 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6716 boolean values when we want in all cases to compute both of them. In
6717 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6718 as actual zero-or-1 values and then bitwise anding. In cases where
6719 there cannot be any side effects, better code would be made by
6720 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6721 how to recognize those cases. */
6723 case TRUTH_AND_EXPR:
6724 case BIT_AND_EXPR:
6725 this_optab = and_optab;
6726 goto binop;
6728 case TRUTH_OR_EXPR:
6729 case BIT_IOR_EXPR:
6730 this_optab = ior_optab;
6731 goto binop;
6733 case TRUTH_XOR_EXPR:
6734 case BIT_XOR_EXPR:
6735 this_optab = xor_optab;
6736 goto binop;
6738 case LSHIFT_EXPR:
6739 case RSHIFT_EXPR:
6740 case LROTATE_EXPR:
6741 case RROTATE_EXPR:
6742 preexpand_calls (exp);
6743 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6744 subtarget = 0;
6745 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6746 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6747 unsignedp);
6749 /* Could determine the answer when only additive constants differ. Also,
6750 the addition of one can be handled by changing the condition. */
6751 case LT_EXPR:
6752 case LE_EXPR:
6753 case GT_EXPR:
6754 case GE_EXPR:
6755 case EQ_EXPR:
6756 case NE_EXPR:
6757 preexpand_calls (exp);
6758 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6759 if (temp != 0)
6760 return temp;
6762 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6763 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6764 && original_target
6765 && GET_CODE (original_target) == REG
6766 && (GET_MODE (original_target)
6767 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6769 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6770 VOIDmode, 0);
6772 if (temp != original_target)
6773 temp = copy_to_reg (temp);
6775 op1 = gen_label_rtx ();
6776 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6777 GET_MODE (temp), unsignedp, 0);
6778 emit_jump_insn (gen_beq (op1));
6779 emit_move_insn (temp, const1_rtx);
6780 emit_label (op1);
6781 return temp;
6784 /* If no set-flag instruction, must generate a conditional
6785 store into a temporary variable. Drop through
6786 and handle this like && and ||. */
6788 case TRUTH_ANDIF_EXPR:
6789 case TRUTH_ORIF_EXPR:
6790 if (! ignore
6791 && (target == 0 || ! safe_from_p (target, exp, 1)
6792 /* Make sure we don't have a hard reg (such as function's return
6793 value) live across basic blocks, if not optimizing. */
6794 || (!optimize && GET_CODE (target) == REG
6795 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6796 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6798 if (target)
6799 emit_clr_insn (target);
6801 op1 = gen_label_rtx ();
6802 jumpifnot (exp, op1);
6804 if (target)
6805 emit_0_to_1_insn (target);
6807 emit_label (op1);
6808 return ignore ? const0_rtx : target;
6810 case TRUTH_NOT_EXPR:
6811 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6812 /* The parser is careful to generate TRUTH_NOT_EXPR
6813 only with operands that are always zero or one. */
6814 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6815 target, 1, OPTAB_LIB_WIDEN);
6816 if (temp == 0)
6817 abort ();
6818 return temp;
6820 case COMPOUND_EXPR:
6821 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6822 emit_queue ();
6823 return expand_expr (TREE_OPERAND (exp, 1),
6824 (ignore ? const0_rtx : target),
6825 VOIDmode, 0);
6827 case COND_EXPR:
6828 /* If we would have a "singleton" (see below) were it not for a
6829 conversion in each arm, bring that conversion back out. */
6830 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6831 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6832 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6833 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6835 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6836 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6838 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6839 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6840 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6841 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6842 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6843 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6844 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6845 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6846 return expand_expr (build1 (NOP_EXPR, type,
6847 build (COND_EXPR, TREE_TYPE (true),
6848 TREE_OPERAND (exp, 0),
6849 true, false)),
6850 target, tmode, modifier);
6854 /* Note that COND_EXPRs whose type is a structure or union
6855 are required to be constructed to contain assignments of
6856 a temporary variable, so that we can evaluate them here
6857 for side effect only. If type is void, we must do likewise. */
6859 /* If an arm of the branch requires a cleanup,
6860 only that cleanup is performed. */
6862 tree singleton = 0;
6863 tree binary_op = 0, unary_op = 0;
6865 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6866 convert it to our mode, if necessary. */
6867 if (integer_onep (TREE_OPERAND (exp, 1))
6868 && integer_zerop (TREE_OPERAND (exp, 2))
6869 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6871 if (ignore)
6873 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6874 ro_modifier);
6875 return const0_rtx;
6878 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6879 if (GET_MODE (op0) == mode)
6880 return op0;
6882 if (target == 0)
6883 target = gen_reg_rtx (mode);
6884 convert_move (target, op0, unsignedp);
6885 return target;
6888 /* Check for X ? A + B : A. If we have this, we can copy A to the
6889 output and conditionally add B. Similarly for unary operations.
6890 Don't do this if X has side-effects because those side effects
6891 might affect A or B and the "?" operation is a sequence point in
6892 ANSI. (operand_equal_p tests for side effects.) */
6894 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6895 && operand_equal_p (TREE_OPERAND (exp, 2),
6896 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6897 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6898 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6899 && operand_equal_p (TREE_OPERAND (exp, 1),
6900 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6901 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6902 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6903 && operand_equal_p (TREE_OPERAND (exp, 2),
6904 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6905 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6906 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6907 && operand_equal_p (TREE_OPERAND (exp, 1),
6908 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6909 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6911 /* If we are not to produce a result, we have no target. Otherwise,
6912 if a target was specified use it; it will not be used as an
6913 intermediate target unless it is safe. If no target, use a
6914 temporary. */
6916 if (ignore)
6917 temp = 0;
6918 else if (original_target
6919 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
6920 || (singleton && GET_CODE (original_target) == REG
6921 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6922 && original_target == var_rtx (singleton)))
6923 && GET_MODE (original_target) == mode
6924 #ifdef HAVE_conditional_move
6925 && (! can_conditionally_move_p (mode)
6926 || GET_CODE (original_target) == REG
6927 || TREE_ADDRESSABLE (type))
6928 #endif
6929 && ! (GET_CODE (original_target) == MEM
6930 && MEM_VOLATILE_P (original_target)))
6931 temp = original_target;
6932 else if (TREE_ADDRESSABLE (type))
6933 abort ();
6934 else
6935 temp = assign_temp (type, 0, 0, 1);
6937 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6938 do the test of X as a store-flag operation, do this as
6939 A + ((X != 0) << log C). Similarly for other simple binary
6940 operators. Only do for C == 1 if BRANCH_COST is low. */
6941 if (temp && singleton && binary_op
6942 && (TREE_CODE (binary_op) == PLUS_EXPR
6943 || TREE_CODE (binary_op) == MINUS_EXPR
6944 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6945 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6946 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6947 : integer_onep (TREE_OPERAND (binary_op, 1)))
6948 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6950 rtx result;
6951 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6952 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6953 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6954 : xor_optab);
6956 /* If we had X ? A : A + 1, do this as A + (X == 0).
6958 We have to invert the truth value here and then put it
6959 back later if do_store_flag fails. We cannot simply copy
6960 TREE_OPERAND (exp, 0) to another variable and modify that
6961 because invert_truthvalue can modify the tree pointed to
6962 by its argument. */
6963 if (singleton == TREE_OPERAND (exp, 1))
6964 TREE_OPERAND (exp, 0)
6965 = invert_truthvalue (TREE_OPERAND (exp, 0));
6967 result = do_store_flag (TREE_OPERAND (exp, 0),
6968 (safe_from_p (temp, singleton, 1)
6969 ? temp : NULL_RTX),
6970 mode, BRANCH_COST <= 1);
6972 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6973 result = expand_shift (LSHIFT_EXPR, mode, result,
6974 build_int_2 (tree_log2
6975 (TREE_OPERAND
6976 (binary_op, 1)),
6978 (safe_from_p (temp, singleton, 1)
6979 ? temp : NULL_RTX), 0);
6981 if (result)
6983 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6984 return expand_binop (mode, boptab, op1, result, temp,
6985 unsignedp, OPTAB_LIB_WIDEN);
6987 else if (singleton == TREE_OPERAND (exp, 1))
6988 TREE_OPERAND (exp, 0)
6989 = invert_truthvalue (TREE_OPERAND (exp, 0));
6992 do_pending_stack_adjust ();
6993 NO_DEFER_POP;
6994 op0 = gen_label_rtx ();
6996 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6998 if (temp != 0)
7000 /* If the target conflicts with the other operand of the
7001 binary op, we can't use it. Also, we can't use the target
7002 if it is a hard register, because evaluating the condition
7003 might clobber it. */
7004 if ((binary_op
7005 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7006 || (GET_CODE (temp) == REG
7007 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7008 temp = gen_reg_rtx (mode);
7009 store_expr (singleton, temp, 0);
7011 else
7012 expand_expr (singleton,
7013 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7014 if (singleton == TREE_OPERAND (exp, 1))
7015 jumpif (TREE_OPERAND (exp, 0), op0);
7016 else
7017 jumpifnot (TREE_OPERAND (exp, 0), op0);
7019 start_cleanup_deferral ();
7020 if (binary_op && temp == 0)
7021 /* Just touch the other operand. */
7022 expand_expr (TREE_OPERAND (binary_op, 1),
7023 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7024 else if (binary_op)
7025 store_expr (build (TREE_CODE (binary_op), type,
7026 make_tree (type, temp),
7027 TREE_OPERAND (binary_op, 1)),
7028 temp, 0);
7029 else
7030 store_expr (build1 (TREE_CODE (unary_op), type,
7031 make_tree (type, temp)),
7032 temp, 0);
7033 op1 = op0;
7035 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7036 comparison operator. If we have one of these cases, set the
7037 output to A, branch on A (cse will merge these two references),
7038 then set the output to FOO. */
7039 else if (temp
7040 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7041 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7042 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7043 TREE_OPERAND (exp, 1), 0)
7044 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7045 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7046 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7048 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7049 temp = gen_reg_rtx (mode);
7050 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7051 jumpif (TREE_OPERAND (exp, 0), op0);
7053 start_cleanup_deferral ();
7054 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7055 op1 = op0;
7057 else if (temp
7058 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7059 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7060 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7061 TREE_OPERAND (exp, 2), 0)
7062 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7063 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7064 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7066 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7067 temp = gen_reg_rtx (mode);
7068 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7069 jumpifnot (TREE_OPERAND (exp, 0), op0);
7071 start_cleanup_deferral ();
7072 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7073 op1 = op0;
7075 else
7077 op1 = gen_label_rtx ();
7078 jumpifnot (TREE_OPERAND (exp, 0), op0);
7080 start_cleanup_deferral ();
7081 if (temp != 0)
7082 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7083 else
7084 expand_expr (TREE_OPERAND (exp, 1),
7085 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7086 end_cleanup_deferral ();
7087 emit_queue ();
7088 emit_jump_insn (gen_jump (op1));
7089 emit_barrier ();
7090 emit_label (op0);
7091 start_cleanup_deferral ();
7092 if (temp != 0)
7093 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7094 else
7095 expand_expr (TREE_OPERAND (exp, 2),
7096 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7099 end_cleanup_deferral ();
7101 emit_queue ();
7102 emit_label (op1);
7103 OK_DEFER_POP;
7105 return temp;
7108 case TARGET_EXPR:
7110 /* Something needs to be initialized, but we didn't know
7111 where that thing was when building the tree. For example,
7112 it could be the return value of a function, or a parameter
7113 to a function which lays down in the stack, or a temporary
7114 variable which must be passed by reference.
7116 We guarantee that the expression will either be constructed
7117 or copied into our original target. */
7119 tree slot = TREE_OPERAND (exp, 0);
7120 tree cleanups = NULL_TREE;
7121 tree exp1;
7123 if (TREE_CODE (slot) != VAR_DECL)
7124 abort ();
7126 if (! ignore)
7127 target = original_target;
7129 if (target == 0)
7131 if (DECL_RTL (slot) != 0)
7133 target = DECL_RTL (slot);
7134 /* If we have already expanded the slot, so don't do
7135 it again. (mrs) */
7136 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7137 return target;
7139 else
7141 target = assign_temp (type, 2, 0, 1);
7142 /* All temp slots at this level must not conflict. */
7143 preserve_temp_slots (target);
7144 DECL_RTL (slot) = target;
7145 if (TREE_ADDRESSABLE (slot))
7147 TREE_ADDRESSABLE (slot) = 0;
7148 mark_addressable (slot);
7151 /* Since SLOT is not known to the called function
7152 to belong to its stack frame, we must build an explicit
7153 cleanup. This case occurs when we must build up a reference
7154 to pass the reference as an argument. In this case,
7155 it is very likely that such a reference need not be
7156 built here. */
7158 if (TREE_OPERAND (exp, 2) == 0)
7159 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7160 cleanups = TREE_OPERAND (exp, 2);
7163 else
7165 /* This case does occur, when expanding a parameter which
7166 needs to be constructed on the stack. The target
7167 is the actual stack address that we want to initialize.
7168 The function we call will perform the cleanup in this case. */
7170 /* If we have already assigned it space, use that space,
7171 not target that we were passed in, as our target
7172 parameter is only a hint. */
7173 if (DECL_RTL (slot) != 0)
7175 target = DECL_RTL (slot);
7176 /* If we have already expanded the slot, so don't do
7177 it again. (mrs) */
7178 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7179 return target;
7181 else
7183 DECL_RTL (slot) = target;
7184 /* If we must have an addressable slot, then make sure that
7185 the RTL that we just stored in slot is OK. */
7186 if (TREE_ADDRESSABLE (slot))
7188 TREE_ADDRESSABLE (slot) = 0;
7189 mark_addressable (slot);
7194 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7195 /* Mark it as expanded. */
7196 TREE_OPERAND (exp, 1) = NULL_TREE;
7198 TREE_USED (slot) = 1;
7199 store_expr (exp1, target, 0);
7201 expand_decl_cleanup (NULL_TREE, cleanups);
7203 return target;
7206 case INIT_EXPR:
7208 tree lhs = TREE_OPERAND (exp, 0);
7209 tree rhs = TREE_OPERAND (exp, 1);
7210 tree noncopied_parts = 0;
7211 tree lhs_type = TREE_TYPE (lhs);
7213 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7214 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7215 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7216 TYPE_NONCOPIED_PARTS (lhs_type));
7217 while (noncopied_parts != 0)
7219 expand_assignment (TREE_VALUE (noncopied_parts),
7220 TREE_PURPOSE (noncopied_parts), 0, 0);
7221 noncopied_parts = TREE_CHAIN (noncopied_parts);
7223 return temp;
7226 case MODIFY_EXPR:
7228 /* If lhs is complex, expand calls in rhs before computing it.
7229 That's so we don't compute a pointer and save it over a call.
7230 If lhs is simple, compute it first so we can give it as a
7231 target if the rhs is just a call. This avoids an extra temp and copy
7232 and that prevents a partial-subsumption which makes bad code.
7233 Actually we could treat component_ref's of vars like vars. */
7235 tree lhs = TREE_OPERAND (exp, 0);
7236 tree rhs = TREE_OPERAND (exp, 1);
7237 tree noncopied_parts = 0;
7238 tree lhs_type = TREE_TYPE (lhs);
7240 temp = 0;
7242 if (TREE_CODE (lhs) != VAR_DECL
7243 && TREE_CODE (lhs) != RESULT_DECL
7244 && TREE_CODE (lhs) != PARM_DECL
7245 && ! (TREE_CODE (lhs) == INDIRECT_REF
7246 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7247 preexpand_calls (exp);
7249 /* Check for |= or &= of a bitfield of size one into another bitfield
7250 of size 1. In this case, (unless we need the result of the
7251 assignment) we can do this more efficiently with a
7252 test followed by an assignment, if necessary.
7254 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7255 things change so we do, this code should be enhanced to
7256 support it. */
7257 if (ignore
7258 && TREE_CODE (lhs) == COMPONENT_REF
7259 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7260 || TREE_CODE (rhs) == BIT_AND_EXPR)
7261 && TREE_OPERAND (rhs, 0) == lhs
7262 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7263 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7264 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7266 rtx label = gen_label_rtx ();
7268 do_jump (TREE_OPERAND (rhs, 1),
7269 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7270 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7271 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7272 (TREE_CODE (rhs) == BIT_IOR_EXPR
7273 ? integer_one_node
7274 : integer_zero_node)),
7275 0, 0);
7276 do_pending_stack_adjust ();
7277 emit_label (label);
7278 return const0_rtx;
7281 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7282 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7283 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7284 TYPE_NONCOPIED_PARTS (lhs_type));
7286 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7287 while (noncopied_parts != 0)
7289 expand_assignment (TREE_PURPOSE (noncopied_parts),
7290 TREE_VALUE (noncopied_parts), 0, 0);
7291 noncopied_parts = TREE_CHAIN (noncopied_parts);
7293 return temp;
7296 case PREINCREMENT_EXPR:
7297 case PREDECREMENT_EXPR:
7298 return expand_increment (exp, 0, ignore);
7300 case POSTINCREMENT_EXPR:
7301 case POSTDECREMENT_EXPR:
7302 /* Faster to treat as pre-increment if result is not used. */
7303 return expand_increment (exp, ! ignore, ignore);
7305 case ADDR_EXPR:
7306 /* If nonzero, TEMP will be set to the address of something that might
7307 be a MEM corresponding to a stack slot. */
7308 temp = 0;
7310 /* Are we taking the address of a nested function? */
7311 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7312 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7313 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7314 && ! TREE_STATIC (exp))
7316 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7317 op0 = force_operand (op0, target);
7319 /* If we are taking the address of something erroneous, just
7320 return a zero. */
7321 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7322 return const0_rtx;
7323 else
7325 /* We make sure to pass const0_rtx down if we came in with
7326 ignore set, to avoid doing the cleanups twice for something. */
7327 op0 = expand_expr (TREE_OPERAND (exp, 0),
7328 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7329 (modifier == EXPAND_INITIALIZER
7330 ? modifier : EXPAND_CONST_ADDRESS));
7332 /* If we are going to ignore the result, OP0 will have been set
7333 to const0_rtx, so just return it. Don't get confused and
7334 think we are taking the address of the constant. */
7335 if (ignore)
7336 return op0;
7338 op0 = protect_from_queue (op0, 0);
7340 /* We would like the object in memory. If it is a constant,
7341 we can have it be statically allocated into memory. For
7342 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7343 memory and store the value into it. */
7345 if (CONSTANT_P (op0))
7346 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7347 op0);
7348 else if (GET_CODE (op0) == MEM)
7350 mark_temp_addr_taken (op0);
7351 temp = XEXP (op0, 0);
7354 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7355 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7357 /* If this object is in a register, it must be not
7358 be BLKmode. */
7359 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7360 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7362 mark_temp_addr_taken (memloc);
7363 emit_move_insn (memloc, op0);
7364 op0 = memloc;
7367 if (GET_CODE (op0) != MEM)
7368 abort ();
7370 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7372 temp = XEXP (op0, 0);
7373 #ifdef POINTERS_EXTEND_UNSIGNED
7374 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7375 && mode == ptr_mode)
7376 temp = convert_memory_address (ptr_mode, temp);
7377 #endif
7378 return temp;
7381 op0 = force_operand (XEXP (op0, 0), target);
7384 if (flag_force_addr && GET_CODE (op0) != REG)
7385 op0 = force_reg (Pmode, op0);
7387 if (GET_CODE (op0) == REG
7388 && ! REG_USERVAR_P (op0))
7389 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7391 /* If we might have had a temp slot, add an equivalent address
7392 for it. */
7393 if (temp != 0)
7394 update_temp_slot_address (temp, op0);
7396 #ifdef POINTERS_EXTEND_UNSIGNED
7397 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7398 && mode == ptr_mode)
7399 op0 = convert_memory_address (ptr_mode, op0);
7400 #endif
7402 return op0;
7404 case ENTRY_VALUE_EXPR:
7405 abort ();
7407 /* COMPLEX type for Extended Pascal & Fortran */
7408 case COMPLEX_EXPR:
7410 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7411 rtx insns;
7413 /* Get the rtx code of the operands. */
7414 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7415 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7417 if (! target)
7418 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7420 start_sequence ();
7422 /* Move the real (op0) and imaginary (op1) parts to their location. */
7423 emit_move_insn (gen_realpart (mode, target), op0);
7424 emit_move_insn (gen_imagpart (mode, target), op1);
7426 insns = get_insns ();
7427 end_sequence ();
7429 /* Complex construction should appear as a single unit. */
7430 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7431 each with a separate pseudo as destination.
7432 It's not correct for flow to treat them as a unit. */
7433 if (GET_CODE (target) != CONCAT)
7434 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7435 else
7436 emit_insns (insns);
7438 return target;
7441 case REALPART_EXPR:
7442 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7443 return gen_realpart (mode, op0);
7445 case IMAGPART_EXPR:
7446 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7447 return gen_imagpart (mode, op0);
7449 case CONJ_EXPR:
7451 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7452 rtx imag_t;
7453 rtx insns;
7455 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7457 if (! target)
7458 target = gen_reg_rtx (mode);
7460 start_sequence ();
7462 /* Store the realpart and the negated imagpart to target. */
7463 emit_move_insn (gen_realpart (partmode, target),
7464 gen_realpart (partmode, op0));
7466 imag_t = gen_imagpart (partmode, target);
7467 temp = expand_unop (partmode, neg_optab,
7468 gen_imagpart (partmode, op0), imag_t, 0);
7469 if (temp != imag_t)
7470 emit_move_insn (imag_t, temp);
7472 insns = get_insns ();
7473 end_sequence ();
7475 /* Conjugate should appear as a single unit
7476 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7477 each with a separate pseudo as destination.
7478 It's not correct for flow to treat them as a unit. */
7479 if (GET_CODE (target) != CONCAT)
7480 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7481 else
7482 emit_insns (insns);
7484 return target;
7487 case TRY_CATCH_EXPR:
7489 tree handler = TREE_OPERAND (exp, 1);
7491 expand_eh_region_start ();
7493 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7495 expand_eh_region_end (handler);
7497 return op0;
7500 case POPDCC_EXPR:
7502 rtx dcc = get_dynamic_cleanup_chain ();
7503 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
7504 return const0_rtx;
7507 case POPDHC_EXPR:
7509 rtx dhc = get_dynamic_handler_chain ();
7510 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
7511 return const0_rtx;
7514 case ERROR_MARK:
7515 op0 = CONST0_RTX (tmode);
7516 if (op0 != 0)
7517 return op0;
7518 return const0_rtx;
7520 default:
7521 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7524 /* Here to do an ordinary binary operator, generating an instruction
7525 from the optab already placed in `this_optab'. */
7526 binop:
7527 preexpand_calls (exp);
7528 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7529 subtarget = 0;
7530 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7531 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7532 binop2:
7533 temp = expand_binop (mode, this_optab, op0, op1, target,
7534 unsignedp, OPTAB_LIB_WIDEN);
7535 if (temp == 0)
7536 abort ();
7537 return temp;
7542 /* Return the alignment in bits of EXP, a pointer valued expression.
7543 But don't return more than MAX_ALIGN no matter what.
7544 The alignment returned is, by default, the alignment of the thing that
7545 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7547 Otherwise, look at the expression to see if we can do better, i.e., if the
7548 expression is actually pointing at an object whose alignment is tighter. */
7550 static int
7551 get_pointer_alignment (exp, max_align)
7552 tree exp;
7553 unsigned max_align;
7555 unsigned align, inner;
7557 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7558 return 0;
7560 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7561 align = MIN (align, max_align);
7563 while (1)
7565 switch (TREE_CODE (exp))
7567 case NOP_EXPR:
7568 case CONVERT_EXPR:
7569 case NON_LVALUE_EXPR:
7570 exp = TREE_OPERAND (exp, 0);
7571 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7572 return align;
7573 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7574 align = MIN (inner, max_align);
7575 break;
7577 case PLUS_EXPR:
7578 /* If sum of pointer + int, restrict our maximum alignment to that
7579 imposed by the integer. If not, we can't do any better than
7580 ALIGN. */
7581 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7582 return align;
7584 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7585 & (max_align - 1))
7586 != 0)
7587 max_align >>= 1;
7589 exp = TREE_OPERAND (exp, 0);
7590 break;
7592 case ADDR_EXPR:
7593 /* See what we are pointing at and look at its alignment. */
7594 exp = TREE_OPERAND (exp, 0);
7595 if (TREE_CODE (exp) == FUNCTION_DECL)
7596 align = FUNCTION_BOUNDARY;
7597 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7598 align = DECL_ALIGN (exp);
7599 #ifdef CONSTANT_ALIGNMENT
7600 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7601 align = CONSTANT_ALIGNMENT (exp, align);
7602 #endif
7603 return MIN (align, max_align);
7605 default:
7606 return align;
7611 /* Return the tree node and offset if a given argument corresponds to
7612 a string constant. */
7614 static tree
7615 string_constant (arg, ptr_offset)
7616 tree arg;
7617 tree *ptr_offset;
7619 STRIP_NOPS (arg);
7621 if (TREE_CODE (arg) == ADDR_EXPR
7622 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7624 *ptr_offset = integer_zero_node;
7625 return TREE_OPERAND (arg, 0);
7627 else if (TREE_CODE (arg) == PLUS_EXPR)
7629 tree arg0 = TREE_OPERAND (arg, 0);
7630 tree arg1 = TREE_OPERAND (arg, 1);
7632 STRIP_NOPS (arg0);
7633 STRIP_NOPS (arg1);
7635 if (TREE_CODE (arg0) == ADDR_EXPR
7636 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7638 *ptr_offset = arg1;
7639 return TREE_OPERAND (arg0, 0);
7641 else if (TREE_CODE (arg1) == ADDR_EXPR
7642 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7644 *ptr_offset = arg0;
7645 return TREE_OPERAND (arg1, 0);
7649 return 0;
7652 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7653 way, because it could contain a zero byte in the middle.
7654 TREE_STRING_LENGTH is the size of the character array, not the string.
7656 Unfortunately, string_constant can't access the values of const char
7657 arrays with initializers, so neither can we do so here. */
7659 static tree
7660 c_strlen (src)
7661 tree src;
7663 tree offset_node;
7664 int offset, max;
7665 char *ptr;
7667 src = string_constant (src, &offset_node);
7668 if (src == 0)
7669 return 0;
7670 max = TREE_STRING_LENGTH (src);
7671 ptr = TREE_STRING_POINTER (src);
7672 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7674 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7675 compute the offset to the following null if we don't know where to
7676 start searching for it. */
7677 int i;
7678 for (i = 0; i < max; i++)
7679 if (ptr[i] == 0)
7680 return 0;
7681 /* We don't know the starting offset, but we do know that the string
7682 has no internal zero bytes. We can assume that the offset falls
7683 within the bounds of the string; otherwise, the programmer deserves
7684 what he gets. Subtract the offset from the length of the string,
7685 and return that. */
7686 /* This would perhaps not be valid if we were dealing with named
7687 arrays in addition to literal string constants. */
7688 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7691 /* We have a known offset into the string. Start searching there for
7692 a null character. */
7693 if (offset_node == 0)
7694 offset = 0;
7695 else
7697 /* Did we get a long long offset? If so, punt. */
7698 if (TREE_INT_CST_HIGH (offset_node) != 0)
7699 return 0;
7700 offset = TREE_INT_CST_LOW (offset_node);
7702 /* If the offset is known to be out of bounds, warn, and call strlen at
7703 runtime. */
7704 if (offset < 0 || offset > max)
7706 warning ("offset outside bounds of constant string");
7707 return 0;
7709 /* Use strlen to search for the first zero byte. Since any strings
7710 constructed with build_string will have nulls appended, we win even
7711 if we get handed something like (char[4])"abcd".
7713 Since OFFSET is our starting index into the string, no further
7714 calculation is needed. */
7715 return size_int (strlen (ptr + offset));
7719 expand_builtin_return_addr (fndecl_code, count, tem)
7720 enum built_in_function fndecl_code;
7721 int count;
7722 rtx tem;
7724 int i;
7726 /* Some machines need special handling before we can access
7727 arbitrary frames. For example, on the sparc, we must first flush
7728 all register windows to the stack. */
7729 #ifdef SETUP_FRAME_ADDRESSES
7730 if (count > 0)
7731 SETUP_FRAME_ADDRESSES ();
7732 #endif
7734 /* On the sparc, the return address is not in the frame, it is in a
7735 register. There is no way to access it off of the current frame
7736 pointer, but it can be accessed off the previous frame pointer by
7737 reading the value from the register window save area. */
7738 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7739 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7740 count--;
7741 #endif
7743 /* Scan back COUNT frames to the specified frame. */
7744 for (i = 0; i < count; i++)
7746 /* Assume the dynamic chain pointer is in the word that the
7747 frame address points to, unless otherwise specified. */
7748 #ifdef DYNAMIC_CHAIN_ADDRESS
7749 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7750 #endif
7751 tem = memory_address (Pmode, tem);
7752 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
7755 /* For __builtin_frame_address, return what we've got. */
7756 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7757 return tem;
7759 /* For __builtin_return_address, Get the return address from that
7760 frame. */
7761 #ifdef RETURN_ADDR_RTX
7762 tem = RETURN_ADDR_RTX (count, tem);
7763 #else
7764 tem = memory_address (Pmode,
7765 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7766 tem = gen_rtx_MEM (Pmode, tem);
7767 #endif
7768 return tem;
7771 /* __builtin_setjmp is passed a pointer to an array of five words (not
7772 all will be used on all machines). It operates similarly to the C
7773 library function of the same name, but is more efficient. Much of
7774 the code below (and for longjmp) is copied from the handling of
7775 non-local gotos.
7777 NOTE: This is intended for use by GNAT and the exception handling
7778 scheme in the compiler and will only work in the method used by
7779 them. */
7782 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
7783 rtx buf_addr;
7784 rtx target;
7785 rtx first_label, next_label;
7787 rtx lab1 = gen_label_rtx ();
7788 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
7789 enum machine_mode value_mode;
7790 rtx stack_save;
7792 value_mode = TYPE_MODE (integer_type_node);
7794 #ifdef POINTERS_EXTEND_UNSIGNED
7795 buf_addr = convert_memory_address (Pmode, buf_addr);
7796 #endif
7798 buf_addr = force_reg (Pmode, buf_addr);
7800 if (target == 0 || GET_CODE (target) != REG
7801 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7802 target = gen_reg_rtx (value_mode);
7804 emit_queue ();
7806 #ifndef BUILTIN_SETJMP_FRAME_VALUE
7807 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
7808 #endif
7810 /* We store the frame pointer and the address of lab1 in the buffer
7811 and use the rest of it for the stack save area, which is
7812 machine-dependent. */
7813 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
7814 BUILTIN_SETJMP_FRAME_VALUE);
7815 emit_move_insn (validize_mem
7816 (gen_rtx_MEM (Pmode,
7817 plus_constant (buf_addr,
7818 GET_MODE_SIZE (Pmode)))),
7819 gen_rtx_LABEL_REF (Pmode, lab1));
7821 stack_save = gen_rtx_MEM (sa_mode,
7822 plus_constant (buf_addr,
7823 2 * GET_MODE_SIZE (Pmode)));
7824 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
7826 /* If there is further processing to do, do it. */
7827 #ifdef HAVE_builtin_setjmp_setup
7828 if (HAVE_builtin_setjmp_setup)
7829 emit_insn (gen_builtin_setjmp_setup (buf_addr));
7830 #endif
7832 /* Set TARGET to zero and branch to the first-time-through label. */
7833 emit_move_insn (target, const0_rtx);
7834 emit_jump_insn (gen_jump (first_label));
7835 emit_barrier ();
7836 emit_label (lab1);
7838 /* Tell flow about the strange goings on. */
7839 current_function_has_nonlocal_label = 1;
7841 /* Clobber the FP when we get here, so we have to make sure it's
7842 marked as used by this function. */
7843 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7845 /* Mark the static chain as clobbered here so life information
7846 doesn't get messed up for it. */
7847 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
7849 /* Now put in the code to restore the frame pointer, and argument
7850 pointer, if needed. The code below is from expand_end_bindings
7851 in stmt.c; see detailed documentation there. */
7852 #ifdef HAVE_nonlocal_goto
7853 if (! HAVE_nonlocal_goto)
7854 #endif
7855 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
7857 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7858 if (fixed_regs[ARG_POINTER_REGNUM])
7860 #ifdef ELIMINABLE_REGS
7861 int i;
7862 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
7864 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
7865 if (elim_regs[i].from == ARG_POINTER_REGNUM
7866 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
7867 break;
7869 if (i == sizeof elim_regs / sizeof elim_regs [0])
7870 #endif
7872 /* Now restore our arg pointer from the address at which it
7873 was saved in our stack frame.
7874 If there hasn't be space allocated for it yet, make
7875 some now. */
7876 if (arg_pointer_save_area == 0)
7877 arg_pointer_save_area
7878 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
7879 emit_move_insn (virtual_incoming_args_rtx,
7880 copy_to_reg (arg_pointer_save_area));
7883 #endif
7885 #ifdef HAVE_builtin_setjmp_receiver
7886 if (HAVE_builtin_setjmp_receiver)
7887 emit_insn (gen_builtin_setjmp_receiver (lab1));
7888 else
7889 #endif
7890 #ifdef HAVE_nonlocal_goto_receiver
7891 if (HAVE_nonlocal_goto_receiver)
7892 emit_insn (gen_nonlocal_goto_receiver ());
7893 else
7894 #endif
7896 ; /* Nothing */
7899 /* Set TARGET, and branch to the next-time-through label. */
7900 emit_move_insn (target, const1_rtx);
7901 emit_jump_insn (gen_jump (next_label));
7902 emit_barrier ();
7904 return target;
7907 void
7908 expand_builtin_longjmp (buf_addr, value)
7909 rtx buf_addr, value;
7911 rtx fp, lab, stack;
7912 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
7914 #ifdef POINTERS_EXTEND_UNSIGNED
7915 buf_addr = convert_memory_address (Pmode, buf_addr);
7916 #endif
7917 buf_addr = force_reg (Pmode, buf_addr);
7919 /* We used to store value in static_chain_rtx, but that fails if pointers
7920 are smaller than integers. We instead require that the user must pass
7921 a second argument of 1, because that is what builtin_setjmp will
7922 return. This also makes EH slightly more efficient, since we are no
7923 longer copying around a value that we don't care about. */
7924 if (value != const1_rtx)
7925 abort ();
7927 #ifdef HAVE_builtin_longjmp
7928 if (HAVE_builtin_longjmp)
7929 emit_insn (gen_builtin_longjmp (buf_addr));
7930 else
7931 #endif
7933 fp = gen_rtx_MEM (Pmode, buf_addr);
7934 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
7935 GET_MODE_SIZE (Pmode)));
7937 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
7938 2 * GET_MODE_SIZE (Pmode)));
7940 /* Pick up FP, label, and SP from the block and jump. This code is
7941 from expand_goto in stmt.c; see there for detailed comments. */
7942 #if HAVE_nonlocal_goto
7943 if (HAVE_nonlocal_goto)
7944 /* We have to pass a value to the nonlocal_goto pattern that will
7945 get copied into the static_chain pointer, but it does not matter
7946 what that value is, because builtin_setjmp does not use it. */
7947 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
7948 else
7949 #endif
7951 lab = copy_to_reg (lab);
7953 emit_move_insn (hard_frame_pointer_rtx, fp);
7954 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
7956 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7957 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
7958 emit_indirect_jump (lab);
7964 /* Expand an expression EXP that calls a built-in function,
7965 with result going to TARGET if that's convenient
7966 (and in mode MODE if that's convenient).
7967 SUBTARGET may be used as the target for computing one of EXP's operands.
7968 IGNORE is nonzero if the value is to be ignored. */
7970 #define CALLED_AS_BUILT_IN(NODE) \
7971 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7973 static rtx
7974 expand_builtin (exp, target, subtarget, mode, ignore)
7975 tree exp;
7976 rtx target;
7977 rtx subtarget;
7978 enum machine_mode mode;
7979 int ignore;
7981 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7982 tree arglist = TREE_OPERAND (exp, 1);
7983 rtx op0;
7984 rtx lab1, insns;
7985 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7986 optab builtin_optab;
7988 switch (DECL_FUNCTION_CODE (fndecl))
7990 case BUILT_IN_ABS:
7991 case BUILT_IN_LABS:
7992 case BUILT_IN_FABS:
7993 /* build_function_call changes these into ABS_EXPR. */
7994 abort ();
7996 case BUILT_IN_SIN:
7997 case BUILT_IN_COS:
7998 /* Treat these like sqrt, but only if the user asks for them. */
7999 if (! flag_fast_math)
8000 break;
8001 case BUILT_IN_FSQRT:
8002 /* If not optimizing, call the library function. */
8003 if (! optimize)
8004 break;
8006 if (arglist == 0
8007 /* Arg could be wrong type if user redeclared this fcn wrong. */
8008 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8009 break;
8011 /* Stabilize and compute the argument. */
8012 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8013 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8015 exp = copy_node (exp);
8016 arglist = copy_node (arglist);
8017 TREE_OPERAND (exp, 1) = arglist;
8018 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8020 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8022 /* Make a suitable register to place result in. */
8023 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8025 emit_queue ();
8026 start_sequence ();
8028 switch (DECL_FUNCTION_CODE (fndecl))
8030 case BUILT_IN_SIN:
8031 builtin_optab = sin_optab; break;
8032 case BUILT_IN_COS:
8033 builtin_optab = cos_optab; break;
8034 case BUILT_IN_FSQRT:
8035 builtin_optab = sqrt_optab; break;
8036 default:
8037 abort ();
8040 /* Compute into TARGET.
8041 Set TARGET to wherever the result comes back. */
8042 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8043 builtin_optab, op0, target, 0);
8045 /* If we were unable to expand via the builtin, stop the
8046 sequence (without outputting the insns) and break, causing
8047 a call to the library function. */
8048 if (target == 0)
8050 end_sequence ();
8051 break;
8054 /* Check the results by default. But if flag_fast_math is turned on,
8055 then assume sqrt will always be called with valid arguments. */
8057 if (! flag_fast_math)
8059 /* Don't define the builtin FP instructions
8060 if your machine is not IEEE. */
8061 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8062 abort ();
8064 lab1 = gen_label_rtx ();
8066 /* Test the result; if it is NaN, set errno=EDOM because
8067 the argument was not in the domain. */
8068 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8069 emit_jump_insn (gen_beq (lab1));
8071 #ifdef TARGET_EDOM
8073 #ifdef GEN_ERRNO_RTX
8074 rtx errno_rtx = GEN_ERRNO_RTX;
8075 #else
8076 rtx errno_rtx
8077 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8078 #endif
8080 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8082 #else
8083 /* We can't set errno=EDOM directly; let the library call do it.
8084 Pop the arguments right away in case the call gets deleted. */
8085 NO_DEFER_POP;
8086 expand_call (exp, target, 0);
8087 OK_DEFER_POP;
8088 #endif
8090 emit_label (lab1);
8093 /* Output the entire sequence. */
8094 insns = get_insns ();
8095 end_sequence ();
8096 emit_insns (insns);
8098 return target;
8100 case BUILT_IN_FMOD:
8101 break;
8103 /* __builtin_apply_args returns block of memory allocated on
8104 the stack into which is stored the arg pointer, structure
8105 value address, static chain, and all the registers that might
8106 possibly be used in performing a function call. The code is
8107 moved to the start of the function so the incoming values are
8108 saved. */
8109 case BUILT_IN_APPLY_ARGS:
8110 /* Don't do __builtin_apply_args more than once in a function.
8111 Save the result of the first call and reuse it. */
8112 if (apply_args_value != 0)
8113 return apply_args_value;
8115 /* When this function is called, it means that registers must be
8116 saved on entry to this function. So we migrate the
8117 call to the first insn of this function. */
8118 rtx temp;
8119 rtx seq;
8121 start_sequence ();
8122 temp = expand_builtin_apply_args ();
8123 seq = get_insns ();
8124 end_sequence ();
8126 apply_args_value = temp;
8128 /* Put the sequence after the NOTE that starts the function.
8129 If this is inside a SEQUENCE, make the outer-level insn
8130 chain current, so the code is placed at the start of the
8131 function. */
8132 push_topmost_sequence ();
8133 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8134 pop_topmost_sequence ();
8135 return temp;
8138 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8139 FUNCTION with a copy of the parameters described by
8140 ARGUMENTS, and ARGSIZE. It returns a block of memory
8141 allocated on the stack into which is stored all the registers
8142 that might possibly be used for returning the result of a
8143 function. ARGUMENTS is the value returned by
8144 __builtin_apply_args. ARGSIZE is the number of bytes of
8145 arguments that must be copied. ??? How should this value be
8146 computed? We'll also need a safe worst case value for varargs
8147 functions. */
8148 case BUILT_IN_APPLY:
8149 if (arglist == 0
8150 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8151 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8152 || TREE_CHAIN (arglist) == 0
8153 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8154 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8155 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8156 return const0_rtx;
8157 else
8159 int i;
8160 tree t;
8161 rtx ops[3];
8163 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8164 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8166 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8169 /* __builtin_return (RESULT) causes the function to return the
8170 value described by RESULT. RESULT is address of the block of
8171 memory returned by __builtin_apply. */
8172 case BUILT_IN_RETURN:
8173 if (arglist
8174 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8175 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8176 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8177 NULL_RTX, VOIDmode, 0));
8178 return const0_rtx;
8180 case BUILT_IN_SAVEREGS:
8181 /* Don't do __builtin_saveregs more than once in a function.
8182 Save the result of the first call and reuse it. */
8183 if (saveregs_value != 0)
8184 return saveregs_value;
8186 /* When this function is called, it means that registers must be
8187 saved on entry to this function. So we migrate the
8188 call to the first insn of this function. */
8189 rtx temp;
8190 rtx seq;
8192 /* Now really call the function. `expand_call' does not call
8193 expand_builtin, so there is no danger of infinite recursion here. */
8194 start_sequence ();
8196 #ifdef EXPAND_BUILTIN_SAVEREGS
8197 /* Do whatever the machine needs done in this case. */
8198 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8199 #else
8200 /* The register where the function returns its value
8201 is likely to have something else in it, such as an argument.
8202 So preserve that register around the call. */
8204 if (value_mode != VOIDmode)
8206 rtx valreg = hard_libcall_value (value_mode);
8207 rtx saved_valreg = gen_reg_rtx (value_mode);
8209 emit_move_insn (saved_valreg, valreg);
8210 temp = expand_call (exp, target, ignore);
8211 emit_move_insn (valreg, saved_valreg);
8213 else
8214 /* Generate the call, putting the value in a pseudo. */
8215 temp = expand_call (exp, target, ignore);
8216 #endif
8218 seq = get_insns ();
8219 end_sequence ();
8221 saveregs_value = temp;
8223 /* Put the sequence after the NOTE that starts the function.
8224 If this is inside a SEQUENCE, make the outer-level insn
8225 chain current, so the code is placed at the start of the
8226 function. */
8227 push_topmost_sequence ();
8228 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8229 pop_topmost_sequence ();
8230 return temp;
8233 /* __builtin_args_info (N) returns word N of the arg space info
8234 for the current function. The number and meanings of words
8235 is controlled by the definition of CUMULATIVE_ARGS. */
8236 case BUILT_IN_ARGS_INFO:
8238 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8239 int *word_ptr = (int *) &current_function_args_info;
8240 #if 0
8241 /* These are used by the code below that is if 0'ed away */
8242 int i;
8243 tree type, elts, result;
8244 #endif
8246 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8247 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8248 __FILE__, __LINE__);
8250 if (arglist != 0)
8252 tree arg = TREE_VALUE (arglist);
8253 if (TREE_CODE (arg) != INTEGER_CST)
8254 error ("argument of `__builtin_args_info' must be constant");
8255 else
8257 int wordnum = TREE_INT_CST_LOW (arg);
8259 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8260 error ("argument of `__builtin_args_info' out of range");
8261 else
8262 return GEN_INT (word_ptr[wordnum]);
8265 else
8266 error ("missing argument in `__builtin_args_info'");
8268 return const0_rtx;
8270 #if 0
8271 for (i = 0; i < nwords; i++)
8272 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8274 type = build_array_type (integer_type_node,
8275 build_index_type (build_int_2 (nwords, 0)));
8276 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8277 TREE_CONSTANT (result) = 1;
8278 TREE_STATIC (result) = 1;
8279 result = build (INDIRECT_REF, build_pointer_type (type), result);
8280 TREE_CONSTANT (result) = 1;
8281 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8282 #endif
8285 /* Return the address of the first anonymous stack arg. */
8286 case BUILT_IN_NEXT_ARG:
8288 tree fntype = TREE_TYPE (current_function_decl);
8290 if ((TYPE_ARG_TYPES (fntype) == 0
8291 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8292 == void_type_node))
8293 && ! current_function_varargs)
8295 error ("`va_start' used in function with fixed args");
8296 return const0_rtx;
8299 if (arglist)
8301 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8302 tree arg = TREE_VALUE (arglist);
8304 /* Strip off all nops for the sake of the comparison. This
8305 is not quite the same as STRIP_NOPS. It does more.
8306 We must also strip off INDIRECT_EXPR for C++ reference
8307 parameters. */
8308 while (TREE_CODE (arg) == NOP_EXPR
8309 || TREE_CODE (arg) == CONVERT_EXPR
8310 || TREE_CODE (arg) == NON_LVALUE_EXPR
8311 || TREE_CODE (arg) == INDIRECT_REF)
8312 arg = TREE_OPERAND (arg, 0);
8313 if (arg != last_parm)
8314 warning ("second parameter of `va_start' not last named argument");
8316 else if (! current_function_varargs)
8317 /* Evidently an out of date version of <stdarg.h>; can't validate
8318 va_start's second argument, but can still work as intended. */
8319 warning ("`__builtin_next_arg' called without an argument");
8322 return expand_binop (Pmode, add_optab,
8323 current_function_internal_arg_pointer,
8324 current_function_arg_offset_rtx,
8325 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8327 case BUILT_IN_CLASSIFY_TYPE:
8328 if (arglist != 0)
8330 tree type = TREE_TYPE (TREE_VALUE (arglist));
8331 enum tree_code code = TREE_CODE (type);
8332 if (code == VOID_TYPE)
8333 return GEN_INT (void_type_class);
8334 if (code == INTEGER_TYPE)
8335 return GEN_INT (integer_type_class);
8336 if (code == CHAR_TYPE)
8337 return GEN_INT (char_type_class);
8338 if (code == ENUMERAL_TYPE)
8339 return GEN_INT (enumeral_type_class);
8340 if (code == BOOLEAN_TYPE)
8341 return GEN_INT (boolean_type_class);
8342 if (code == POINTER_TYPE)
8343 return GEN_INT (pointer_type_class);
8344 if (code == REFERENCE_TYPE)
8345 return GEN_INT (reference_type_class);
8346 if (code == OFFSET_TYPE)
8347 return GEN_INT (offset_type_class);
8348 if (code == REAL_TYPE)
8349 return GEN_INT (real_type_class);
8350 if (code == COMPLEX_TYPE)
8351 return GEN_INT (complex_type_class);
8352 if (code == FUNCTION_TYPE)
8353 return GEN_INT (function_type_class);
8354 if (code == METHOD_TYPE)
8355 return GEN_INT (method_type_class);
8356 if (code == RECORD_TYPE)
8357 return GEN_INT (record_type_class);
8358 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8359 return GEN_INT (union_type_class);
8360 if (code == ARRAY_TYPE)
8362 if (TYPE_STRING_FLAG (type))
8363 return GEN_INT (string_type_class);
8364 else
8365 return GEN_INT (array_type_class);
8367 if (code == SET_TYPE)
8368 return GEN_INT (set_type_class);
8369 if (code == FILE_TYPE)
8370 return GEN_INT (file_type_class);
8371 if (code == LANG_TYPE)
8372 return GEN_INT (lang_type_class);
8374 return GEN_INT (no_type_class);
8376 case BUILT_IN_CONSTANT_P:
8377 if (arglist == 0)
8378 return const0_rtx;
8379 else
8381 tree arg = TREE_VALUE (arglist);
8383 STRIP_NOPS (arg);
8384 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8385 || (TREE_CODE (arg) == ADDR_EXPR
8386 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8387 ? const1_rtx : const0_rtx);
8390 case BUILT_IN_FRAME_ADDRESS:
8391 /* The argument must be a nonnegative integer constant.
8392 It counts the number of frames to scan up the stack.
8393 The value is the address of that frame. */
8394 case BUILT_IN_RETURN_ADDRESS:
8395 /* The argument must be a nonnegative integer constant.
8396 It counts the number of frames to scan up the stack.
8397 The value is the return address saved in that frame. */
8398 if (arglist == 0)
8399 /* Warning about missing arg was already issued. */
8400 return const0_rtx;
8401 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8402 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8404 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8405 error ("invalid arg to `__builtin_frame_address'");
8406 else
8407 error ("invalid arg to `__builtin_return_address'");
8408 return const0_rtx;
8410 else
8412 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8413 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8414 hard_frame_pointer_rtx);
8416 /* Some ports cannot access arbitrary stack frames. */
8417 if (tem == NULL)
8419 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8420 warning ("unsupported arg to `__builtin_frame_address'");
8421 else
8422 warning ("unsupported arg to `__builtin_return_address'");
8423 return const0_rtx;
8426 /* For __builtin_frame_address, return what we've got. */
8427 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8428 return tem;
8430 if (GET_CODE (tem) != REG)
8431 tem = copy_to_reg (tem);
8432 return tem;
8435 /* Returns the address of the area where the structure is returned.
8436 0 otherwise. */
8437 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8438 if (arglist != 0
8439 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8440 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8441 return const0_rtx;
8442 else
8443 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8445 case BUILT_IN_ALLOCA:
8446 if (arglist == 0
8447 /* Arg could be non-integer if user redeclared this fcn wrong. */
8448 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8449 break;
8451 /* Compute the argument. */
8452 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8454 /* Allocate the desired space. */
8455 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8457 case BUILT_IN_FFS:
8458 /* If not optimizing, call the library function. */
8459 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8460 break;
8462 if (arglist == 0
8463 /* Arg could be non-integer if user redeclared this fcn wrong. */
8464 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8465 break;
8467 /* Compute the argument. */
8468 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8469 /* Compute ffs, into TARGET if possible.
8470 Set TARGET to wherever the result comes back. */
8471 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8472 ffs_optab, op0, target, 1);
8473 if (target == 0)
8474 abort ();
8475 return target;
8477 case BUILT_IN_STRLEN:
8478 /* If not optimizing, call the library function. */
8479 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8480 break;
8482 if (arglist == 0
8483 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8484 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8485 break;
8486 else
8488 tree src = TREE_VALUE (arglist);
8489 tree len = c_strlen (src);
8491 int align
8492 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8494 rtx result, src_rtx, char_rtx;
8495 enum machine_mode insn_mode = value_mode, char_mode;
8496 enum insn_code icode;
8498 /* If the length is known, just return it. */
8499 if (len != 0)
8500 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8502 /* If SRC is not a pointer type, don't do this operation inline. */
8503 if (align == 0)
8504 break;
8506 /* Call a function if we can't compute strlen in the right mode. */
8508 while (insn_mode != VOIDmode)
8510 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8511 if (icode != CODE_FOR_nothing)
8512 break;
8514 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8516 if (insn_mode == VOIDmode)
8517 break;
8519 /* Make a place to write the result of the instruction. */
8520 result = target;
8521 if (! (result != 0
8522 && GET_CODE (result) == REG
8523 && GET_MODE (result) == insn_mode
8524 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8525 result = gen_reg_rtx (insn_mode);
8527 /* Make sure the operands are acceptable to the predicates. */
8529 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8530 result = gen_reg_rtx (insn_mode);
8531 src_rtx = memory_address (BLKmode,
8532 expand_expr (src, NULL_RTX, ptr_mode,
8533 EXPAND_NORMAL));
8535 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8536 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8538 /* Check the string is readable and has an end. */
8539 if (flag_check_memory_usage)
8540 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8541 src_rtx, ptr_mode,
8542 GEN_INT (MEMORY_USE_RO),
8543 TYPE_MODE (integer_type_node));
8545 char_rtx = const0_rtx;
8546 char_mode = insn_operand_mode[(int)icode][2];
8547 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8548 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8550 emit_insn (GEN_FCN (icode) (result,
8551 gen_rtx_MEM (BLKmode, src_rtx),
8552 char_rtx, GEN_INT (align)));
8554 /* Return the value in the proper mode for this function. */
8555 if (GET_MODE (result) == value_mode)
8556 return result;
8557 else if (target != 0)
8559 convert_move (target, result, 0);
8560 return target;
8562 else
8563 return convert_to_mode (value_mode, result, 0);
8566 case BUILT_IN_STRCPY:
8567 /* If not optimizing, call the library function. */
8568 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8569 break;
8571 if (arglist == 0
8572 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8573 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8574 || TREE_CHAIN (arglist) == 0
8575 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8576 break;
8577 else
8579 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8581 if (len == 0)
8582 break;
8584 len = size_binop (PLUS_EXPR, len, integer_one_node);
8586 chainon (arglist, build_tree_list (NULL_TREE, len));
8589 /* Drops in. */
8590 case BUILT_IN_MEMCPY:
8591 /* If not optimizing, call the library function. */
8592 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8593 break;
8595 if (arglist == 0
8596 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8597 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8598 || TREE_CHAIN (arglist) == 0
8599 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8600 != POINTER_TYPE)
8601 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8602 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8603 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8604 != INTEGER_TYPE))
8605 break;
8606 else
8608 tree dest = TREE_VALUE (arglist);
8609 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8610 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8611 tree type;
8613 int src_align
8614 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8615 int dest_align
8616 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8617 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
8619 /* If either SRC or DEST is not a pointer type, don't do
8620 this operation in-line. */
8621 if (src_align == 0 || dest_align == 0)
8623 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8624 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8625 break;
8628 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8629 dest_mem = gen_rtx_MEM (BLKmode,
8630 memory_address (BLKmode, dest_rtx));
8631 /* There could be a void* cast on top of the object. */
8632 while (TREE_CODE (dest) == NOP_EXPR)
8633 dest = TREE_OPERAND (dest, 0);
8634 type = TREE_TYPE (TREE_TYPE (dest));
8635 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8636 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8637 src_mem = gen_rtx_MEM (BLKmode,
8638 memory_address (BLKmode, src_rtx));
8639 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8641 /* Just copy the rights of SRC to the rights of DEST. */
8642 if (flag_check_memory_usage)
8643 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8644 dest_rtx, ptr_mode,
8645 src_rtx, ptr_mode,
8646 len_rtx, TYPE_MODE (sizetype));
8648 /* There could be a void* cast on top of the object. */
8649 while (TREE_CODE (src) == NOP_EXPR)
8650 src = TREE_OPERAND (src, 0);
8651 type = TREE_TYPE (TREE_TYPE (src));
8652 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8654 /* Copy word part most expediently. */
8655 dest_addr
8656 = emit_block_move (dest_mem, src_mem, len_rtx,
8657 MIN (src_align, dest_align));
8659 if (dest_addr == 0)
8660 dest_addr = force_operand (dest_rtx, NULL_RTX);
8662 return dest_addr;
8665 case BUILT_IN_MEMSET:
8666 /* If not optimizing, call the library function. */
8667 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8668 break;
8670 if (arglist == 0
8671 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8672 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8673 || TREE_CHAIN (arglist) == 0
8674 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8675 != INTEGER_TYPE)
8676 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8677 || (INTEGER_TYPE
8678 != (TREE_CODE (TREE_TYPE
8679 (TREE_VALUE
8680 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8681 break;
8682 else
8684 tree dest = TREE_VALUE (arglist);
8685 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8686 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8687 tree type;
8689 int dest_align
8690 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8691 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
8693 /* If DEST is not a pointer type, don't do this
8694 operation in-line. */
8695 if (dest_align == 0)
8696 break;
8698 /* If the arguments have side-effects, then we can only evaluate
8699 them at most once. The following code evaluates them twice if
8700 they are not constants because we break out to expand_call
8701 in that case. They can't be constants if they have side-effects
8702 so we can check for that first. Alternatively, we could call
8703 save_expr to make multiple evaluation safe. */
8704 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
8705 break;
8707 /* If VAL is not 0, don't do this operation in-line. */
8708 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8709 break;
8711 /* If LEN does not expand to a constant, don't do this
8712 operation in-line. */
8713 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8714 if (GET_CODE (len_rtx) != CONST_INT)
8715 break;
8717 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8718 dest_mem = gen_rtx_MEM (BLKmode,
8719 memory_address (BLKmode, dest_rtx));
8721 /* Just check DST is writable and mark it as readable. */
8722 if (flag_check_memory_usage)
8723 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8724 dest_rtx, ptr_mode,
8725 len_rtx, TYPE_MODE (sizetype),
8726 GEN_INT (MEMORY_USE_WO),
8727 TYPE_MODE (integer_type_node));
8729 /* There could be a void* cast on top of the object. */
8730 while (TREE_CODE (dest) == NOP_EXPR)
8731 dest = TREE_OPERAND (dest, 0);
8733 if (TREE_CODE (dest) == ADDR_EXPR)
8734 /* If this is the address of an object, check whether the
8735 object is an array. */
8736 type = TREE_TYPE (TREE_OPERAND (dest, 0));
8737 else
8738 type = TREE_TYPE (TREE_TYPE (dest));
8739 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8741 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
8743 if (dest_addr == 0)
8744 dest_addr = force_operand (dest_rtx, NULL_RTX);
8746 return dest_addr;
8749 /* These comparison functions need an instruction that returns an actual
8750 index. An ordinary compare that just sets the condition codes
8751 is not enough. */
8752 #ifdef HAVE_cmpstrsi
8753 case BUILT_IN_STRCMP:
8754 /* If not optimizing, call the library function. */
8755 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8756 break;
8758 /* If we need to check memory accesses, call the library function. */
8759 if (flag_check_memory_usage)
8760 break;
8762 if (arglist == 0
8763 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8764 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8765 || TREE_CHAIN (arglist) == 0
8766 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8767 break;
8768 else if (!HAVE_cmpstrsi)
8769 break;
8771 tree arg1 = TREE_VALUE (arglist);
8772 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8773 tree len, len2;
8775 len = c_strlen (arg1);
8776 if (len)
8777 len = size_binop (PLUS_EXPR, integer_one_node, len);
8778 len2 = c_strlen (arg2);
8779 if (len2)
8780 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8782 /* If we don't have a constant length for the first, use the length
8783 of the second, if we know it. We don't require a constant for
8784 this case; some cost analysis could be done if both are available
8785 but neither is constant. For now, assume they're equally cheap.
8787 If both strings have constant lengths, use the smaller. This
8788 could arise if optimization results in strcpy being called with
8789 two fixed strings, or if the code was machine-generated. We should
8790 add some code to the `memcmp' handler below to deal with such
8791 situations, someday. */
8792 if (!len || TREE_CODE (len) != INTEGER_CST)
8794 if (len2)
8795 len = len2;
8796 else if (len == 0)
8797 break;
8799 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8801 if (tree_int_cst_lt (len2, len))
8802 len = len2;
8805 chainon (arglist, build_tree_list (NULL_TREE, len));
8808 /* Drops in. */
8809 case BUILT_IN_MEMCMP:
8810 /* If not optimizing, call the library function. */
8811 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8812 break;
8814 /* If we need to check memory accesses, call the library function. */
8815 if (flag_check_memory_usage)
8816 break;
8818 if (arglist == 0
8819 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8820 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8821 || TREE_CHAIN (arglist) == 0
8822 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8823 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8824 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8825 break;
8826 else if (!HAVE_cmpstrsi)
8827 break;
8829 tree arg1 = TREE_VALUE (arglist);
8830 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8831 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8832 rtx result;
8834 int arg1_align
8835 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8836 int arg2_align
8837 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8838 enum machine_mode insn_mode
8839 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8841 /* If we don't have POINTER_TYPE, call the function. */
8842 if (arg1_align == 0 || arg2_align == 0)
8844 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8845 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8846 break;
8849 /* Make a place to write the result of the instruction. */
8850 result = target;
8851 if (! (result != 0
8852 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8853 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8854 result = gen_reg_rtx (insn_mode);
8856 emit_insn (gen_cmpstrsi (result,
8857 gen_rtx_MEM (BLKmode,
8858 expand_expr (arg1, NULL_RTX,
8859 ptr_mode,
8860 EXPAND_NORMAL)),
8861 gen_rtx_MEM (BLKmode,
8862 expand_expr (arg2, NULL_RTX,
8863 ptr_mode,
8864 EXPAND_NORMAL)),
8865 expand_expr (len, NULL_RTX, VOIDmode, 0),
8866 GEN_INT (MIN (arg1_align, arg2_align))));
8868 /* Return the value in the proper mode for this function. */
8869 mode = TYPE_MODE (TREE_TYPE (exp));
8870 if (GET_MODE (result) == mode)
8871 return result;
8872 else if (target != 0)
8874 convert_move (target, result, 0);
8875 return target;
8877 else
8878 return convert_to_mode (mode, result, 0);
8880 #else
8881 case BUILT_IN_STRCMP:
8882 case BUILT_IN_MEMCMP:
8883 break;
8884 #endif
8886 case BUILT_IN_SETJMP:
8887 if (arglist == 0
8888 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8889 break;
8890 else
8892 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8893 VOIDmode, 0);
8894 rtx lab = gen_label_rtx ();
8895 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
8896 emit_label (lab);
8897 return ret;
8900 /* __builtin_longjmp is passed a pointer to an array of five words.
8901 It's similar to the C library longjmp function but works with
8902 __builtin_setjmp above. */
8903 case BUILT_IN_LONGJMP:
8904 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8905 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8906 break;
8907 else
8909 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8910 VOIDmode, 0);
8911 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8912 NULL_RTX, VOIDmode, 0);
8913 expand_builtin_longjmp (buf_addr, value);
8914 return const0_rtx;
8917 /* Various hooks for the DWARF 2 __throw routine. */
8918 case BUILT_IN_UNWIND_INIT:
8919 expand_builtin_unwind_init ();
8920 return const0_rtx;
8921 case BUILT_IN_FP:
8922 return frame_pointer_rtx;
8923 case BUILT_IN_SP:
8924 return stack_pointer_rtx;
8925 #ifdef DWARF2_UNWIND_INFO
8926 case BUILT_IN_DWARF_FP_REGNUM:
8927 return expand_builtin_dwarf_fp_regnum ();
8928 case BUILT_IN_DWARF_REG_SIZE:
8929 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
8930 #endif
8931 case BUILT_IN_FROB_RETURN_ADDR:
8932 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
8933 case BUILT_IN_EXTRACT_RETURN_ADDR:
8934 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
8935 case BUILT_IN_SET_RETURN_ADDR_REG:
8936 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
8937 return const0_rtx;
8938 case BUILT_IN_EH_STUB_OLD:
8939 return expand_builtin_eh_stub_old ();
8940 case BUILT_IN_EH_STUB:
8941 return expand_builtin_eh_stub ();
8942 case BUILT_IN_SET_EH_REGS:
8943 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
8944 TREE_VALUE (TREE_CHAIN (arglist)));
8945 return const0_rtx;
8947 default: /* just do library call, if unknown builtin */
8948 error ("built-in function `%s' not currently supported",
8949 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8952 /* The switch statement above can drop through to cause the function
8953 to be called normally. */
8955 return expand_call (exp, target, ignore);
8958 /* Built-in functions to perform an untyped call and return. */
8960 /* For each register that may be used for calling a function, this
8961 gives a mode used to copy the register's value. VOIDmode indicates
8962 the register is not used for calling a function. If the machine
8963 has register windows, this gives only the outbound registers.
8964 INCOMING_REGNO gives the corresponding inbound register. */
8965 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8967 /* For each register that may be used for returning values, this gives
8968 a mode used to copy the register's value. VOIDmode indicates the
8969 register is not used for returning values. If the machine has
8970 register windows, this gives only the outbound registers.
8971 INCOMING_REGNO gives the corresponding inbound register. */
8972 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8974 /* For each register that may be used for calling a function, this
8975 gives the offset of that register into the block returned by
8976 __builtin_apply_args. 0 indicates that the register is not
8977 used for calling a function. */
8978 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8980 /* Return the offset of register REGNO into the block returned by
8981 __builtin_apply_args. This is not declared static, since it is
8982 needed in objc-act.c. */
8984 int
8985 apply_args_register_offset (regno)
8986 int regno;
8988 apply_args_size ();
8990 /* Arguments are always put in outgoing registers (in the argument
8991 block) if such make sense. */
8992 #ifdef OUTGOING_REGNO
8993 regno = OUTGOING_REGNO(regno);
8994 #endif
8995 return apply_args_reg_offset[regno];
8998 /* Return the size required for the block returned by __builtin_apply_args,
8999 and initialize apply_args_mode. */
9001 static int
9002 apply_args_size ()
9004 static int size = -1;
9005 int align, regno;
9006 enum machine_mode mode;
9008 /* The values computed by this function never change. */
9009 if (size < 0)
9011 /* The first value is the incoming arg-pointer. */
9012 size = GET_MODE_SIZE (Pmode);
9014 /* The second value is the structure value address unless this is
9015 passed as an "invisible" first argument. */
9016 if (struct_value_rtx)
9017 size += GET_MODE_SIZE (Pmode);
9019 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9020 if (FUNCTION_ARG_REGNO_P (regno))
9022 /* Search for the proper mode for copying this register's
9023 value. I'm not sure this is right, but it works so far. */
9024 enum machine_mode best_mode = VOIDmode;
9026 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9027 mode != VOIDmode;
9028 mode = GET_MODE_WIDER_MODE (mode))
9029 if (HARD_REGNO_MODE_OK (regno, mode)
9030 && HARD_REGNO_NREGS (regno, mode) == 1)
9031 best_mode = mode;
9033 if (best_mode == VOIDmode)
9034 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9035 mode != VOIDmode;
9036 mode = GET_MODE_WIDER_MODE (mode))
9037 if (HARD_REGNO_MODE_OK (regno, mode)
9038 && (mov_optab->handlers[(int) mode].insn_code
9039 != CODE_FOR_nothing))
9040 best_mode = mode;
9042 mode = best_mode;
9043 if (mode == VOIDmode)
9044 abort ();
9046 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9047 if (size % align != 0)
9048 size = CEIL (size, align) * align;
9049 apply_args_reg_offset[regno] = size;
9050 size += GET_MODE_SIZE (mode);
9051 apply_args_mode[regno] = mode;
9053 else
9055 apply_args_mode[regno] = VOIDmode;
9056 apply_args_reg_offset[regno] = 0;
9059 return size;
9062 /* Return the size required for the block returned by __builtin_apply,
9063 and initialize apply_result_mode. */
9065 static int
9066 apply_result_size ()
9068 static int size = -1;
9069 int align, regno;
9070 enum machine_mode mode;
9072 /* The values computed by this function never change. */
9073 if (size < 0)
9075 size = 0;
9077 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9078 if (FUNCTION_VALUE_REGNO_P (regno))
9080 /* Search for the proper mode for copying this register's
9081 value. I'm not sure this is right, but it works so far. */
9082 enum machine_mode best_mode = VOIDmode;
9084 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9085 mode != TImode;
9086 mode = GET_MODE_WIDER_MODE (mode))
9087 if (HARD_REGNO_MODE_OK (regno, mode))
9088 best_mode = mode;
9090 if (best_mode == VOIDmode)
9091 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9092 mode != VOIDmode;
9093 mode = GET_MODE_WIDER_MODE (mode))
9094 if (HARD_REGNO_MODE_OK (regno, mode)
9095 && (mov_optab->handlers[(int) mode].insn_code
9096 != CODE_FOR_nothing))
9097 best_mode = mode;
9099 mode = best_mode;
9100 if (mode == VOIDmode)
9101 abort ();
9103 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9104 if (size % align != 0)
9105 size = CEIL (size, align) * align;
9106 size += GET_MODE_SIZE (mode);
9107 apply_result_mode[regno] = mode;
9109 else
9110 apply_result_mode[regno] = VOIDmode;
9112 /* Allow targets that use untyped_call and untyped_return to override
9113 the size so that machine-specific information can be stored here. */
9114 #ifdef APPLY_RESULT_SIZE
9115 size = APPLY_RESULT_SIZE;
9116 #endif
9118 return size;
9121 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9122 /* Create a vector describing the result block RESULT. If SAVEP is true,
9123 the result block is used to save the values; otherwise it is used to
9124 restore the values. */
9126 static rtx
9127 result_vector (savep, result)
9128 int savep;
9129 rtx result;
9131 int regno, size, align, nelts;
9132 enum machine_mode mode;
9133 rtx reg, mem;
9134 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9136 size = nelts = 0;
9137 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9138 if ((mode = apply_result_mode[regno]) != VOIDmode)
9140 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9141 if (size % align != 0)
9142 size = CEIL (size, align) * align;
9143 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9144 mem = change_address (result, mode,
9145 plus_constant (XEXP (result, 0), size));
9146 savevec[nelts++] = (savep
9147 ? gen_rtx_SET (VOIDmode, mem, reg)
9148 : gen_rtx_SET (VOIDmode, reg, mem));
9149 size += GET_MODE_SIZE (mode);
9151 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9153 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9155 /* Save the state required to perform an untyped call with the same
9156 arguments as were passed to the current function. */
9158 static rtx
9159 expand_builtin_apply_args ()
9161 rtx registers;
9162 int size, align, regno;
9163 enum machine_mode mode;
9165 /* Create a block where the arg-pointer, structure value address,
9166 and argument registers can be saved. */
9167 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9169 /* Walk past the arg-pointer and structure value address. */
9170 size = GET_MODE_SIZE (Pmode);
9171 if (struct_value_rtx)
9172 size += GET_MODE_SIZE (Pmode);
9174 /* Save each register used in calling a function to the block. */
9175 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9176 if ((mode = apply_args_mode[regno]) != VOIDmode)
9178 rtx tem;
9180 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9181 if (size % align != 0)
9182 size = CEIL (size, align) * align;
9184 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9186 #ifdef STACK_REGS
9187 /* For reg-stack.c's stack register household.
9188 Compare with a similar piece of code in function.c. */
9190 emit_insn (gen_rtx_USE (mode, tem));
9191 #endif
9193 emit_move_insn (change_address (registers, mode,
9194 plus_constant (XEXP (registers, 0),
9195 size)),
9196 tem);
9197 size += GET_MODE_SIZE (mode);
9200 /* Save the arg pointer to the block. */
9201 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9202 copy_to_reg (virtual_incoming_args_rtx));
9203 size = GET_MODE_SIZE (Pmode);
9205 /* Save the structure value address unless this is passed as an
9206 "invisible" first argument. */
9207 if (struct_value_incoming_rtx)
9209 emit_move_insn (change_address (registers, Pmode,
9210 plus_constant (XEXP (registers, 0),
9211 size)),
9212 copy_to_reg (struct_value_incoming_rtx));
9213 size += GET_MODE_SIZE (Pmode);
9216 /* Return the address of the block. */
9217 return copy_addr_to_reg (XEXP (registers, 0));
9220 /* Perform an untyped call and save the state required to perform an
9221 untyped return of whatever value was returned by the given function. */
9223 static rtx
9224 expand_builtin_apply (function, arguments, argsize)
9225 rtx function, arguments, argsize;
9227 int size, align, regno;
9228 enum machine_mode mode;
9229 rtx incoming_args, result, reg, dest, call_insn;
9230 rtx old_stack_level = 0;
9231 rtx call_fusage = 0;
9233 /* Create a block where the return registers can be saved. */
9234 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9236 /* ??? The argsize value should be adjusted here. */
9238 /* Fetch the arg pointer from the ARGUMENTS block. */
9239 incoming_args = gen_reg_rtx (Pmode);
9240 emit_move_insn (incoming_args,
9241 gen_rtx_MEM (Pmode, arguments));
9242 #ifndef STACK_GROWS_DOWNWARD
9243 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9244 incoming_args, 0, OPTAB_LIB_WIDEN);
9245 #endif
9247 /* Perform postincrements before actually calling the function. */
9248 emit_queue ();
9250 /* Push a new argument block and copy the arguments. */
9251 do_pending_stack_adjust ();
9253 /* Save the stack with nonlocal if available */
9254 #ifdef HAVE_save_stack_nonlocal
9255 if (HAVE_save_stack_nonlocal)
9256 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9257 else
9258 #endif
9259 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9261 /* Push a block of memory onto the stack to store the memory arguments.
9262 Save the address in a register, and copy the memory arguments. ??? I
9263 haven't figured out how the calling convention macros effect this,
9264 but it's likely that the source and/or destination addresses in
9265 the block copy will need updating in machine specific ways. */
9266 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9267 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9268 gen_rtx_MEM (BLKmode, incoming_args),
9269 argsize,
9270 PARM_BOUNDARY / BITS_PER_UNIT);
9272 /* Refer to the argument block. */
9273 apply_args_size ();
9274 arguments = gen_rtx_MEM (BLKmode, arguments);
9276 /* Walk past the arg-pointer and structure value address. */
9277 size = GET_MODE_SIZE (Pmode);
9278 if (struct_value_rtx)
9279 size += GET_MODE_SIZE (Pmode);
9281 /* Restore each of the registers previously saved. Make USE insns
9282 for each of these registers for use in making the call. */
9283 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9284 if ((mode = apply_args_mode[regno]) != VOIDmode)
9286 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9287 if (size % align != 0)
9288 size = CEIL (size, align) * align;
9289 reg = gen_rtx_REG (mode, regno);
9290 emit_move_insn (reg,
9291 change_address (arguments, mode,
9292 plus_constant (XEXP (arguments, 0),
9293 size)));
9295 use_reg (&call_fusage, reg);
9296 size += GET_MODE_SIZE (mode);
9299 /* Restore the structure value address unless this is passed as an
9300 "invisible" first argument. */
9301 size = GET_MODE_SIZE (Pmode);
9302 if (struct_value_rtx)
9304 rtx value = gen_reg_rtx (Pmode);
9305 emit_move_insn (value,
9306 change_address (arguments, Pmode,
9307 plus_constant (XEXP (arguments, 0),
9308 size)));
9309 emit_move_insn (struct_value_rtx, value);
9310 if (GET_CODE (struct_value_rtx) == REG)
9311 use_reg (&call_fusage, struct_value_rtx);
9312 size += GET_MODE_SIZE (Pmode);
9315 /* All arguments and registers used for the call are set up by now! */
9316 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9318 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9319 and we don't want to load it into a register as an optimization,
9320 because prepare_call_address already did it if it should be done. */
9321 if (GET_CODE (function) != SYMBOL_REF)
9322 function = memory_address (FUNCTION_MODE, function);
9324 /* Generate the actual call instruction and save the return value. */
9325 #ifdef HAVE_untyped_call
9326 if (HAVE_untyped_call)
9327 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9328 result, result_vector (1, result)));
9329 else
9330 #endif
9331 #ifdef HAVE_call_value
9332 if (HAVE_call_value)
9334 rtx valreg = 0;
9336 /* Locate the unique return register. It is not possible to
9337 express a call that sets more than one return register using
9338 call_value; use untyped_call for that. In fact, untyped_call
9339 only needs to save the return registers in the given block. */
9340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9341 if ((mode = apply_result_mode[regno]) != VOIDmode)
9343 if (valreg)
9344 abort (); /* HAVE_untyped_call required. */
9345 valreg = gen_rtx_REG (mode, regno);
9348 emit_call_insn (gen_call_value (valreg,
9349 gen_rtx_MEM (FUNCTION_MODE, function),
9350 const0_rtx, NULL_RTX, const0_rtx));
9352 emit_move_insn (change_address (result, GET_MODE (valreg),
9353 XEXP (result, 0)),
9354 valreg);
9356 else
9357 #endif
9358 abort ();
9360 /* Find the CALL insn we just emitted. */
9361 for (call_insn = get_last_insn ();
9362 call_insn && GET_CODE (call_insn) != CALL_INSN;
9363 call_insn = PREV_INSN (call_insn))
9366 if (! call_insn)
9367 abort ();
9369 /* Put the register usage information on the CALL. If there is already
9370 some usage information, put ours at the end. */
9371 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9373 rtx link;
9375 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9376 link = XEXP (link, 1))
9379 XEXP (link, 1) = call_fusage;
9381 else
9382 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9384 /* Restore the stack. */
9385 #ifdef HAVE_save_stack_nonlocal
9386 if (HAVE_save_stack_nonlocal)
9387 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9388 else
9389 #endif
9390 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9392 /* Return the address of the result block. */
9393 return copy_addr_to_reg (XEXP (result, 0));
9396 /* Perform an untyped return. */
9398 static void
9399 expand_builtin_return (result)
9400 rtx result;
9402 int size, align, regno;
9403 enum machine_mode mode;
9404 rtx reg;
9405 rtx call_fusage = 0;
9407 apply_result_size ();
9408 result = gen_rtx_MEM (BLKmode, result);
9410 #ifdef HAVE_untyped_return
9411 if (HAVE_untyped_return)
9413 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9414 emit_barrier ();
9415 return;
9417 #endif
9419 /* Restore the return value and note that each value is used. */
9420 size = 0;
9421 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9422 if ((mode = apply_result_mode[regno]) != VOIDmode)
9424 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9425 if (size % align != 0)
9426 size = CEIL (size, align) * align;
9427 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9428 emit_move_insn (reg,
9429 change_address (result, mode,
9430 plus_constant (XEXP (result, 0),
9431 size)));
9433 push_to_sequence (call_fusage);
9434 emit_insn (gen_rtx_USE (VOIDmode, reg));
9435 call_fusage = get_insns ();
9436 end_sequence ();
9437 size += GET_MODE_SIZE (mode);
9440 /* Put the USE insns before the return. */
9441 emit_insns (call_fusage);
9443 /* Return whatever values was restored by jumping directly to the end
9444 of the function. */
9445 expand_null_return ();
9448 /* Expand code for a post- or pre- increment or decrement
9449 and return the RTX for the result.
9450 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9452 static rtx
9453 expand_increment (exp, post, ignore)
9454 register tree exp;
9455 int post, ignore;
9457 register rtx op0, op1;
9458 register rtx temp, value;
9459 register tree incremented = TREE_OPERAND (exp, 0);
9460 optab this_optab = add_optab;
9461 int icode;
9462 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9463 int op0_is_copy = 0;
9464 int single_insn = 0;
9465 /* 1 means we can't store into OP0 directly,
9466 because it is a subreg narrower than a word,
9467 and we don't dare clobber the rest of the word. */
9468 int bad_subreg = 0;
9470 /* Stabilize any component ref that might need to be
9471 evaluated more than once below. */
9472 if (!post
9473 || TREE_CODE (incremented) == BIT_FIELD_REF
9474 || (TREE_CODE (incremented) == COMPONENT_REF
9475 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9476 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9477 incremented = stabilize_reference (incremented);
9478 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9479 ones into save exprs so that they don't accidentally get evaluated
9480 more than once by the code below. */
9481 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9482 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9483 incremented = save_expr (incremented);
9485 /* Compute the operands as RTX.
9486 Note whether OP0 is the actual lvalue or a copy of it:
9487 I believe it is a copy iff it is a register or subreg
9488 and insns were generated in computing it. */
9490 temp = get_last_insn ();
9491 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9493 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9494 in place but instead must do sign- or zero-extension during assignment,
9495 so we copy it into a new register and let the code below use it as
9496 a copy.
9498 Note that we can safely modify this SUBREG since it is know not to be
9499 shared (it was made by the expand_expr call above). */
9501 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9503 if (post)
9504 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9505 else
9506 bad_subreg = 1;
9508 else if (GET_CODE (op0) == SUBREG
9509 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9511 /* We cannot increment this SUBREG in place. If we are
9512 post-incrementing, get a copy of the old value. Otherwise,
9513 just mark that we cannot increment in place. */
9514 if (post)
9515 op0 = copy_to_reg (op0);
9516 else
9517 bad_subreg = 1;
9520 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9521 && temp != get_last_insn ());
9522 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9523 EXPAND_MEMORY_USE_BAD);
9525 /* Decide whether incrementing or decrementing. */
9526 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9527 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9528 this_optab = sub_optab;
9530 /* Convert decrement by a constant into a negative increment. */
9531 if (this_optab == sub_optab
9532 && GET_CODE (op1) == CONST_INT)
9534 op1 = GEN_INT (- INTVAL (op1));
9535 this_optab = add_optab;
9538 /* For a preincrement, see if we can do this with a single instruction. */
9539 if (!post)
9541 icode = (int) this_optab->handlers[(int) mode].insn_code;
9542 if (icode != (int) CODE_FOR_nothing
9543 /* Make sure that OP0 is valid for operands 0 and 1
9544 of the insn we want to queue. */
9545 && (*insn_operand_predicate[icode][0]) (op0, mode)
9546 && (*insn_operand_predicate[icode][1]) (op0, mode)
9547 && (*insn_operand_predicate[icode][2]) (op1, mode))
9548 single_insn = 1;
9551 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9552 then we cannot just increment OP0. We must therefore contrive to
9553 increment the original value. Then, for postincrement, we can return
9554 OP0 since it is a copy of the old value. For preincrement, expand here
9555 unless we can do it with a single insn.
9557 Likewise if storing directly into OP0 would clobber high bits
9558 we need to preserve (bad_subreg). */
9559 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9561 /* This is the easiest way to increment the value wherever it is.
9562 Problems with multiple evaluation of INCREMENTED are prevented
9563 because either (1) it is a component_ref or preincrement,
9564 in which case it was stabilized above, or (2) it is an array_ref
9565 with constant index in an array in a register, which is
9566 safe to reevaluate. */
9567 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9568 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9569 ? MINUS_EXPR : PLUS_EXPR),
9570 TREE_TYPE (exp),
9571 incremented,
9572 TREE_OPERAND (exp, 1));
9574 while (TREE_CODE (incremented) == NOP_EXPR
9575 || TREE_CODE (incremented) == CONVERT_EXPR)
9577 newexp = convert (TREE_TYPE (incremented), newexp);
9578 incremented = TREE_OPERAND (incremented, 0);
9581 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9582 return post ? op0 : temp;
9585 if (post)
9587 /* We have a true reference to the value in OP0.
9588 If there is an insn to add or subtract in this mode, queue it.
9589 Queueing the increment insn avoids the register shuffling
9590 that often results if we must increment now and first save
9591 the old value for subsequent use. */
9593 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9594 op0 = stabilize (op0);
9595 #endif
9597 icode = (int) this_optab->handlers[(int) mode].insn_code;
9598 if (icode != (int) CODE_FOR_nothing
9599 /* Make sure that OP0 is valid for operands 0 and 1
9600 of the insn we want to queue. */
9601 && (*insn_operand_predicate[icode][0]) (op0, mode)
9602 && (*insn_operand_predicate[icode][1]) (op0, mode))
9604 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9605 op1 = force_reg (mode, op1);
9607 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9609 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9611 rtx addr = (general_operand (XEXP (op0, 0), mode)
9612 ? force_reg (Pmode, XEXP (op0, 0))
9613 : copy_to_reg (XEXP (op0, 0)));
9614 rtx temp, result;
9616 op0 = change_address (op0, VOIDmode, addr);
9617 temp = force_reg (GET_MODE (op0), op0);
9618 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9619 op1 = force_reg (mode, op1);
9621 /* The increment queue is LIFO, thus we have to `queue'
9622 the instructions in reverse order. */
9623 enqueue_insn (op0, gen_move_insn (op0, temp));
9624 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9625 return result;
9629 /* Preincrement, or we can't increment with one simple insn. */
9630 if (post)
9631 /* Save a copy of the value before inc or dec, to return it later. */
9632 temp = value = copy_to_reg (op0);
9633 else
9634 /* Arrange to return the incremented value. */
9635 /* Copy the rtx because expand_binop will protect from the queue,
9636 and the results of that would be invalid for us to return
9637 if our caller does emit_queue before using our result. */
9638 temp = copy_rtx (value = op0);
9640 /* Increment however we can. */
9641 op1 = expand_binop (mode, this_optab, value, op1,
9642 flag_check_memory_usage ? NULL_RTX : op0,
9643 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9644 /* Make sure the value is stored into OP0. */
9645 if (op1 != op0)
9646 emit_move_insn (op0, op1);
9648 return temp;
9651 /* Expand all function calls contained within EXP, innermost ones first.
9652 But don't look within expressions that have sequence points.
9653 For each CALL_EXPR, record the rtx for its value
9654 in the CALL_EXPR_RTL field. */
9656 static void
9657 preexpand_calls (exp)
9658 tree exp;
9660 register int nops, i;
9661 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9663 if (! do_preexpand_calls)
9664 return;
9666 /* Only expressions and references can contain calls. */
9668 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9669 return;
9671 switch (TREE_CODE (exp))
9673 case CALL_EXPR:
9674 /* Do nothing if already expanded. */
9675 if (CALL_EXPR_RTL (exp) != 0
9676 /* Do nothing if the call returns a variable-sized object. */
9677 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9678 /* Do nothing to built-in functions. */
9679 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9680 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9681 == FUNCTION_DECL)
9682 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9683 return;
9685 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9686 return;
9688 case COMPOUND_EXPR:
9689 case COND_EXPR:
9690 case TRUTH_ANDIF_EXPR:
9691 case TRUTH_ORIF_EXPR:
9692 /* If we find one of these, then we can be sure
9693 the adjust will be done for it (since it makes jumps).
9694 Do it now, so that if this is inside an argument
9695 of a function, we don't get the stack adjustment
9696 after some other args have already been pushed. */
9697 do_pending_stack_adjust ();
9698 return;
9700 case BLOCK:
9701 case RTL_EXPR:
9702 case WITH_CLEANUP_EXPR:
9703 case CLEANUP_POINT_EXPR:
9704 case TRY_CATCH_EXPR:
9705 return;
9707 case SAVE_EXPR:
9708 if (SAVE_EXPR_RTL (exp) != 0)
9709 return;
9711 default:
9712 break;
9715 nops = tree_code_length[(int) TREE_CODE (exp)];
9716 for (i = 0; i < nops; i++)
9717 if (TREE_OPERAND (exp, i) != 0)
9719 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9720 if (type == 'e' || type == '<' || type == '1' || type == '2'
9721 || type == 'r')
9722 preexpand_calls (TREE_OPERAND (exp, i));
9726 /* At the start of a function, record that we have no previously-pushed
9727 arguments waiting to be popped. */
9729 void
9730 init_pending_stack_adjust ()
9732 pending_stack_adjust = 0;
9735 /* When exiting from function, if safe, clear out any pending stack adjust
9736 so the adjustment won't get done.
9738 Note, if the current function calls alloca, then it must have a
9739 frame pointer regardless of the value of flag_omit_frame_pointer. */
9741 void
9742 clear_pending_stack_adjust ()
9744 #ifdef EXIT_IGNORE_STACK
9745 if (optimize > 0
9746 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9747 && EXIT_IGNORE_STACK
9748 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9749 && ! flag_inline_functions)
9750 pending_stack_adjust = 0;
9751 #endif
9754 /* Pop any previously-pushed arguments that have not been popped yet. */
9756 void
9757 do_pending_stack_adjust ()
9759 if (inhibit_defer_pop == 0)
9761 if (pending_stack_adjust != 0)
9762 adjust_stack (GEN_INT (pending_stack_adjust));
9763 pending_stack_adjust = 0;
9767 /* Expand conditional expressions. */
9769 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9770 LABEL is an rtx of code CODE_LABEL, in this function and all the
9771 functions here. */
9773 void
9774 jumpifnot (exp, label)
9775 tree exp;
9776 rtx label;
9778 do_jump (exp, label, NULL_RTX);
9781 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9783 void
9784 jumpif (exp, label)
9785 tree exp;
9786 rtx label;
9788 do_jump (exp, NULL_RTX, label);
9791 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9792 the result is zero, or IF_TRUE_LABEL if the result is one.
9793 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9794 meaning fall through in that case.
9796 do_jump always does any pending stack adjust except when it does not
9797 actually perform a jump. An example where there is no jump
9798 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9800 This function is responsible for optimizing cases such as
9801 &&, || and comparison operators in EXP. */
9803 void
9804 do_jump (exp, if_false_label, if_true_label)
9805 tree exp;
9806 rtx if_false_label, if_true_label;
9808 register enum tree_code code = TREE_CODE (exp);
9809 /* Some cases need to create a label to jump to
9810 in order to properly fall through.
9811 These cases set DROP_THROUGH_LABEL nonzero. */
9812 rtx drop_through_label = 0;
9813 rtx temp;
9814 rtx comparison = 0;
9815 int i;
9816 tree type;
9817 enum machine_mode mode;
9819 emit_queue ();
9821 switch (code)
9823 case ERROR_MARK:
9824 break;
9826 case INTEGER_CST:
9827 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9828 if (temp)
9829 emit_jump (temp);
9830 break;
9832 #if 0
9833 /* This is not true with #pragma weak */
9834 case ADDR_EXPR:
9835 /* The address of something can never be zero. */
9836 if (if_true_label)
9837 emit_jump (if_true_label);
9838 break;
9839 #endif
9841 case NOP_EXPR:
9842 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9843 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9844 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9845 goto normal;
9846 case CONVERT_EXPR:
9847 /* If we are narrowing the operand, we have to do the compare in the
9848 narrower mode. */
9849 if ((TYPE_PRECISION (TREE_TYPE (exp))
9850 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9851 goto normal;
9852 case NON_LVALUE_EXPR:
9853 case REFERENCE_EXPR:
9854 case ABS_EXPR:
9855 case NEGATE_EXPR:
9856 case LROTATE_EXPR:
9857 case RROTATE_EXPR:
9858 /* These cannot change zero->non-zero or vice versa. */
9859 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9860 break;
9862 #if 0
9863 /* This is never less insns than evaluating the PLUS_EXPR followed by
9864 a test and can be longer if the test is eliminated. */
9865 case PLUS_EXPR:
9866 /* Reduce to minus. */
9867 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9868 TREE_OPERAND (exp, 0),
9869 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9870 TREE_OPERAND (exp, 1))));
9871 /* Process as MINUS. */
9872 #endif
9874 case MINUS_EXPR:
9875 /* Non-zero iff operands of minus differ. */
9876 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9877 TREE_OPERAND (exp, 0),
9878 TREE_OPERAND (exp, 1)),
9879 NE, NE);
9880 break;
9882 case BIT_AND_EXPR:
9883 /* If we are AND'ing with a small constant, do this comparison in the
9884 smallest type that fits. If the machine doesn't have comparisons
9885 that small, it will be converted back to the wider comparison.
9886 This helps if we are testing the sign bit of a narrower object.
9887 combine can't do this for us because it can't know whether a
9888 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9890 if (! SLOW_BYTE_ACCESS
9891 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9892 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9893 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9894 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9895 && (type = type_for_mode (mode, 1)) != 0
9896 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9897 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9898 != CODE_FOR_nothing))
9900 do_jump (convert (type, exp), if_false_label, if_true_label);
9901 break;
9903 goto normal;
9905 case TRUTH_NOT_EXPR:
9906 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9907 break;
9909 case TRUTH_ANDIF_EXPR:
9910 if (if_false_label == 0)
9911 if_false_label = drop_through_label = gen_label_rtx ();
9912 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9913 start_cleanup_deferral ();
9914 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9915 end_cleanup_deferral ();
9916 break;
9918 case TRUTH_ORIF_EXPR:
9919 if (if_true_label == 0)
9920 if_true_label = drop_through_label = gen_label_rtx ();
9921 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9922 start_cleanup_deferral ();
9923 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9924 end_cleanup_deferral ();
9925 break;
9927 case COMPOUND_EXPR:
9928 push_temp_slots ();
9929 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9930 preserve_temp_slots (NULL_RTX);
9931 free_temp_slots ();
9932 pop_temp_slots ();
9933 emit_queue ();
9934 do_pending_stack_adjust ();
9935 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9936 break;
9938 case COMPONENT_REF:
9939 case BIT_FIELD_REF:
9940 case ARRAY_REF:
9942 int bitsize, bitpos, unsignedp;
9943 enum machine_mode mode;
9944 tree type;
9945 tree offset;
9946 int volatilep = 0;
9947 int alignment;
9949 /* Get description of this reference. We don't actually care
9950 about the underlying object here. */
9951 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9952 &mode, &unsignedp, &volatilep,
9953 &alignment);
9955 type = type_for_size (bitsize, unsignedp);
9956 if (! SLOW_BYTE_ACCESS
9957 && type != 0 && bitsize >= 0
9958 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9959 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9960 != CODE_FOR_nothing))
9962 do_jump (convert (type, exp), if_false_label, if_true_label);
9963 break;
9965 goto normal;
9968 case COND_EXPR:
9969 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9970 if (integer_onep (TREE_OPERAND (exp, 1))
9971 && integer_zerop (TREE_OPERAND (exp, 2)))
9972 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9974 else if (integer_zerop (TREE_OPERAND (exp, 1))
9975 && integer_onep (TREE_OPERAND (exp, 2)))
9976 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9978 else
9980 register rtx label1 = gen_label_rtx ();
9981 drop_through_label = gen_label_rtx ();
9983 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9985 start_cleanup_deferral ();
9986 /* Now the THEN-expression. */
9987 do_jump (TREE_OPERAND (exp, 1),
9988 if_false_label ? if_false_label : drop_through_label,
9989 if_true_label ? if_true_label : drop_through_label);
9990 /* In case the do_jump just above never jumps. */
9991 do_pending_stack_adjust ();
9992 emit_label (label1);
9994 /* Now the ELSE-expression. */
9995 do_jump (TREE_OPERAND (exp, 2),
9996 if_false_label ? if_false_label : drop_through_label,
9997 if_true_label ? if_true_label : drop_through_label);
9998 end_cleanup_deferral ();
10000 break;
10002 case EQ_EXPR:
10004 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10006 if (integer_zerop (TREE_OPERAND (exp, 1)))
10007 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10008 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10009 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10011 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10012 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10013 do_jump
10014 (fold
10015 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10016 fold (build (EQ_EXPR, TREE_TYPE (exp),
10017 fold (build1 (REALPART_EXPR,
10018 TREE_TYPE (inner_type),
10019 exp0)),
10020 fold (build1 (REALPART_EXPR,
10021 TREE_TYPE (inner_type),
10022 exp1)))),
10023 fold (build (EQ_EXPR, TREE_TYPE (exp),
10024 fold (build1 (IMAGPART_EXPR,
10025 TREE_TYPE (inner_type),
10026 exp0)),
10027 fold (build1 (IMAGPART_EXPR,
10028 TREE_TYPE (inner_type),
10029 exp1)))))),
10030 if_false_label, if_true_label);
10032 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10033 && !can_compare_p (TYPE_MODE (inner_type)))
10034 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10035 else
10036 comparison = compare (exp, EQ, EQ);
10037 break;
10040 case NE_EXPR:
10042 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10044 if (integer_zerop (TREE_OPERAND (exp, 1)))
10045 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10046 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10047 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10049 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10050 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10051 do_jump
10052 (fold
10053 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10054 fold (build (NE_EXPR, TREE_TYPE (exp),
10055 fold (build1 (REALPART_EXPR,
10056 TREE_TYPE (inner_type),
10057 exp0)),
10058 fold (build1 (REALPART_EXPR,
10059 TREE_TYPE (inner_type),
10060 exp1)))),
10061 fold (build (NE_EXPR, TREE_TYPE (exp),
10062 fold (build1 (IMAGPART_EXPR,
10063 TREE_TYPE (inner_type),
10064 exp0)),
10065 fold (build1 (IMAGPART_EXPR,
10066 TREE_TYPE (inner_type),
10067 exp1)))))),
10068 if_false_label, if_true_label);
10070 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10071 && !can_compare_p (TYPE_MODE (inner_type)))
10072 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10073 else
10074 comparison = compare (exp, NE, NE);
10075 break;
10078 case LT_EXPR:
10079 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10080 == MODE_INT)
10081 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10082 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10083 else
10084 comparison = compare (exp, LT, LTU);
10085 break;
10087 case LE_EXPR:
10088 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10089 == MODE_INT)
10090 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10091 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10092 else
10093 comparison = compare (exp, LE, LEU);
10094 break;
10096 case GT_EXPR:
10097 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10098 == MODE_INT)
10099 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10100 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10101 else
10102 comparison = compare (exp, GT, GTU);
10103 break;
10105 case GE_EXPR:
10106 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10107 == MODE_INT)
10108 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10109 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10110 else
10111 comparison = compare (exp, GE, GEU);
10112 break;
10114 default:
10115 normal:
10116 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10117 #if 0
10118 /* This is not needed any more and causes poor code since it causes
10119 comparisons and tests from non-SI objects to have different code
10120 sequences. */
10121 /* Copy to register to avoid generating bad insns by cse
10122 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10123 if (!cse_not_expected && GET_CODE (temp) == MEM)
10124 temp = copy_to_reg (temp);
10125 #endif
10126 do_pending_stack_adjust ();
10127 if (GET_CODE (temp) == CONST_INT)
10128 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10129 else if (GET_CODE (temp) == LABEL_REF)
10130 comparison = const_true_rtx;
10131 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10132 && !can_compare_p (GET_MODE (temp)))
10133 /* Note swapping the labels gives us not-equal. */
10134 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10135 else if (GET_MODE (temp) != VOIDmode)
10136 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10137 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10138 GET_MODE (temp), NULL_RTX, 0);
10139 else
10140 abort ();
10143 /* Do any postincrements in the expression that was tested. */
10144 emit_queue ();
10146 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10147 straight into a conditional jump instruction as the jump condition.
10148 Otherwise, all the work has been done already. */
10150 if (comparison == const_true_rtx)
10152 if (if_true_label)
10153 emit_jump (if_true_label);
10155 else if (comparison == const0_rtx)
10157 if (if_false_label)
10158 emit_jump (if_false_label);
10160 else if (comparison)
10161 do_jump_for_compare (comparison, if_false_label, if_true_label);
10163 if (drop_through_label)
10165 /* If do_jump produces code that might be jumped around,
10166 do any stack adjusts from that code, before the place
10167 where control merges in. */
10168 do_pending_stack_adjust ();
10169 emit_label (drop_through_label);
10173 /* Given a comparison expression EXP for values too wide to be compared
10174 with one insn, test the comparison and jump to the appropriate label.
10175 The code of EXP is ignored; we always test GT if SWAP is 0,
10176 and LT if SWAP is 1. */
10178 static void
10179 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10180 tree exp;
10181 int swap;
10182 rtx if_false_label, if_true_label;
10184 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10185 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10186 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10187 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10188 rtx drop_through_label = 0;
10189 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10190 int i;
10192 if (! if_true_label || ! if_false_label)
10193 drop_through_label = gen_label_rtx ();
10194 if (! if_true_label)
10195 if_true_label = drop_through_label;
10196 if (! if_false_label)
10197 if_false_label = drop_through_label;
10199 /* Compare a word at a time, high order first. */
10200 for (i = 0; i < nwords; i++)
10202 rtx comp;
10203 rtx op0_word, op1_word;
10205 if (WORDS_BIG_ENDIAN)
10207 op0_word = operand_subword_force (op0, i, mode);
10208 op1_word = operand_subword_force (op1, i, mode);
10210 else
10212 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10213 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10216 /* All but high-order word must be compared as unsigned. */
10217 comp = compare_from_rtx (op0_word, op1_word,
10218 (unsignedp || i > 0) ? GTU : GT,
10219 unsignedp, word_mode, NULL_RTX, 0);
10220 if (comp == const_true_rtx)
10221 emit_jump (if_true_label);
10222 else if (comp != const0_rtx)
10223 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10225 /* Consider lower words only if these are equal. */
10226 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10227 NULL_RTX, 0);
10228 if (comp == const_true_rtx)
10229 emit_jump (if_false_label);
10230 else if (comp != const0_rtx)
10231 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10234 if (if_false_label)
10235 emit_jump (if_false_label);
10236 if (drop_through_label)
10237 emit_label (drop_through_label);
10240 /* Compare OP0 with OP1, word at a time, in mode MODE.
10241 UNSIGNEDP says to do unsigned comparison.
10242 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10244 void
10245 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10246 enum machine_mode mode;
10247 int unsignedp;
10248 rtx op0, op1;
10249 rtx if_false_label, if_true_label;
10251 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10252 rtx drop_through_label = 0;
10253 int i;
10255 if (! if_true_label || ! if_false_label)
10256 drop_through_label = gen_label_rtx ();
10257 if (! if_true_label)
10258 if_true_label = drop_through_label;
10259 if (! if_false_label)
10260 if_false_label = drop_through_label;
10262 /* Compare a word at a time, high order first. */
10263 for (i = 0; i < nwords; i++)
10265 rtx comp;
10266 rtx op0_word, op1_word;
10268 if (WORDS_BIG_ENDIAN)
10270 op0_word = operand_subword_force (op0, i, mode);
10271 op1_word = operand_subword_force (op1, i, mode);
10273 else
10275 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10276 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10279 /* All but high-order word must be compared as unsigned. */
10280 comp = compare_from_rtx (op0_word, op1_word,
10281 (unsignedp || i > 0) ? GTU : GT,
10282 unsignedp, word_mode, NULL_RTX, 0);
10283 if (comp == const_true_rtx)
10284 emit_jump (if_true_label);
10285 else if (comp != const0_rtx)
10286 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10288 /* Consider lower words only if these are equal. */
10289 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10290 NULL_RTX, 0);
10291 if (comp == const_true_rtx)
10292 emit_jump (if_false_label);
10293 else if (comp != const0_rtx)
10294 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10297 if (if_false_label)
10298 emit_jump (if_false_label);
10299 if (drop_through_label)
10300 emit_label (drop_through_label);
10303 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10304 with one insn, test the comparison and jump to the appropriate label. */
10306 static void
10307 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10308 tree exp;
10309 rtx if_false_label, if_true_label;
10311 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10312 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10313 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10314 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10315 int i;
10316 rtx drop_through_label = 0;
10318 if (! if_false_label)
10319 drop_through_label = if_false_label = gen_label_rtx ();
10321 for (i = 0; i < nwords; i++)
10323 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10324 operand_subword_force (op1, i, mode),
10325 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10326 word_mode, NULL_RTX, 0);
10327 if (comp == const_true_rtx)
10328 emit_jump (if_false_label);
10329 else if (comp != const0_rtx)
10330 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10333 if (if_true_label)
10334 emit_jump (if_true_label);
10335 if (drop_through_label)
10336 emit_label (drop_through_label);
10339 /* Jump according to whether OP0 is 0.
10340 We assume that OP0 has an integer mode that is too wide
10341 for the available compare insns. */
10343 void
10344 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10345 rtx op0;
10346 rtx if_false_label, if_true_label;
10348 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10349 rtx part;
10350 int i;
10351 rtx drop_through_label = 0;
10353 /* The fastest way of doing this comparison on almost any machine is to
10354 "or" all the words and compare the result. If all have to be loaded
10355 from memory and this is a very wide item, it's possible this may
10356 be slower, but that's highly unlikely. */
10358 part = gen_reg_rtx (word_mode);
10359 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10360 for (i = 1; i < nwords && part != 0; i++)
10361 part = expand_binop (word_mode, ior_optab, part,
10362 operand_subword_force (op0, i, GET_MODE (op0)),
10363 part, 1, OPTAB_WIDEN);
10365 if (part != 0)
10367 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10368 NULL_RTX, 0);
10370 if (comp == const_true_rtx)
10371 emit_jump (if_false_label);
10372 else if (comp == const0_rtx)
10373 emit_jump (if_true_label);
10374 else
10375 do_jump_for_compare (comp, if_false_label, if_true_label);
10377 return;
10380 /* If we couldn't do the "or" simply, do this with a series of compares. */
10381 if (! if_false_label)
10382 drop_through_label = if_false_label = gen_label_rtx ();
10384 for (i = 0; i < nwords; i++)
10386 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10387 GET_MODE (op0)),
10388 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10389 if (comp == const_true_rtx)
10390 emit_jump (if_false_label);
10391 else if (comp != const0_rtx)
10392 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10395 if (if_true_label)
10396 emit_jump (if_true_label);
10398 if (drop_through_label)
10399 emit_label (drop_through_label);
10402 /* Given a comparison expression in rtl form, output conditional branches to
10403 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10405 static void
10406 do_jump_for_compare (comparison, if_false_label, if_true_label)
10407 rtx comparison, if_false_label, if_true_label;
10409 if (if_true_label)
10411 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10412 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10413 else
10414 abort ();
10416 if (if_false_label)
10417 emit_jump (if_false_label);
10419 else if (if_false_label)
10421 rtx insn;
10422 rtx prev = get_last_insn ();
10423 rtx branch = 0;
10425 /* Output the branch with the opposite condition. Then try to invert
10426 what is generated. If more than one insn is a branch, or if the
10427 branch is not the last insn written, abort. If we can't invert
10428 the branch, emit make a true label, redirect this jump to that,
10429 emit a jump to the false label and define the true label. */
10431 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10432 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10433 else
10434 abort ();
10436 /* Here we get the first insn that was just emitted. It used to be the
10437 case that, on some machines, emitting the branch would discard
10438 the previous compare insn and emit a replacement. This isn't
10439 done anymore, but abort if we see that PREV is deleted. */
10441 if (prev == 0)
10442 insn = get_insns ();
10443 else if (INSN_DELETED_P (prev))
10444 abort ();
10445 else
10446 insn = NEXT_INSN (prev);
10448 for (; insn; insn = NEXT_INSN (insn))
10449 if (GET_CODE (insn) == JUMP_INSN)
10451 if (branch)
10452 abort ();
10453 branch = insn;
10456 if (branch != get_last_insn ())
10457 abort ();
10459 JUMP_LABEL (branch) = if_false_label;
10460 if (! invert_jump (branch, if_false_label))
10462 if_true_label = gen_label_rtx ();
10463 redirect_jump (branch, if_true_label);
10464 emit_jump (if_false_label);
10465 emit_label (if_true_label);
10470 /* Generate code for a comparison expression EXP
10471 (including code to compute the values to be compared)
10472 and set (CC0) according to the result.
10473 SIGNED_CODE should be the rtx operation for this comparison for
10474 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10476 We force a stack adjustment unless there are currently
10477 things pushed on the stack that aren't yet used. */
10479 static rtx
10480 compare (exp, signed_code, unsigned_code)
10481 register tree exp;
10482 enum rtx_code signed_code, unsigned_code;
10484 register rtx op0
10485 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10486 register rtx op1
10487 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10488 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10489 register enum machine_mode mode = TYPE_MODE (type);
10490 int unsignedp = TREE_UNSIGNED (type);
10491 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10493 #ifdef HAVE_canonicalize_funcptr_for_compare
10494 /* If function pointers need to be "canonicalized" before they can
10495 be reliably compared, then canonicalize them. */
10496 if (HAVE_canonicalize_funcptr_for_compare
10497 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10498 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10499 == FUNCTION_TYPE))
10501 rtx new_op0 = gen_reg_rtx (mode);
10503 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10504 op0 = new_op0;
10507 if (HAVE_canonicalize_funcptr_for_compare
10508 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10509 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10510 == FUNCTION_TYPE))
10512 rtx new_op1 = gen_reg_rtx (mode);
10514 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10515 op1 = new_op1;
10517 #endif
10519 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10520 ((mode == BLKmode)
10521 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10522 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10525 /* Like compare but expects the values to compare as two rtx's.
10526 The decision as to signed or unsigned comparison must be made by the caller.
10528 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10529 compared.
10531 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10532 size of MODE should be used. */
10535 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10536 register rtx op0, op1;
10537 enum rtx_code code;
10538 int unsignedp;
10539 enum machine_mode mode;
10540 rtx size;
10541 int align;
10543 rtx tem;
10545 /* If one operand is constant, make it the second one. Only do this
10546 if the other operand is not constant as well. */
10548 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10549 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10551 tem = op0;
10552 op0 = op1;
10553 op1 = tem;
10554 code = swap_condition (code);
10557 if (flag_force_mem)
10559 op0 = force_not_mem (op0);
10560 op1 = force_not_mem (op1);
10563 do_pending_stack_adjust ();
10565 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10566 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10567 return tem;
10569 #if 0
10570 /* There's no need to do this now that combine.c can eliminate lots of
10571 sign extensions. This can be less efficient in certain cases on other
10572 machines. */
10574 /* If this is a signed equality comparison, we can do it as an
10575 unsigned comparison since zero-extension is cheaper than sign
10576 extension and comparisons with zero are done as unsigned. This is
10577 the case even on machines that can do fast sign extension, since
10578 zero-extension is easier to combine with other operations than
10579 sign-extension is. If we are comparing against a constant, we must
10580 convert it to what it would look like unsigned. */
10581 if ((code == EQ || code == NE) && ! unsignedp
10582 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10584 if (GET_CODE (op1) == CONST_INT
10585 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10586 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10587 unsignedp = 1;
10589 #endif
10591 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10593 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10596 /* Generate code to calculate EXP using a store-flag instruction
10597 and return an rtx for the result. EXP is either a comparison
10598 or a TRUTH_NOT_EXPR whose operand is a comparison.
10600 If TARGET is nonzero, store the result there if convenient.
10602 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10603 cheap.
10605 Return zero if there is no suitable set-flag instruction
10606 available on this machine.
10608 Once expand_expr has been called on the arguments of the comparison,
10609 we are committed to doing the store flag, since it is not safe to
10610 re-evaluate the expression. We emit the store-flag insn by calling
10611 emit_store_flag, but only expand the arguments if we have a reason
10612 to believe that emit_store_flag will be successful. If we think that
10613 it will, but it isn't, we have to simulate the store-flag with a
10614 set/jump/set sequence. */
10616 static rtx
10617 do_store_flag (exp, target, mode, only_cheap)
10618 tree exp;
10619 rtx target;
10620 enum machine_mode mode;
10621 int only_cheap;
10623 enum rtx_code code;
10624 tree arg0, arg1, type;
10625 tree tem;
10626 enum machine_mode operand_mode;
10627 int invert = 0;
10628 int unsignedp;
10629 rtx op0, op1;
10630 enum insn_code icode;
10631 rtx subtarget = target;
10632 rtx result, label;
10634 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10635 result at the end. We can't simply invert the test since it would
10636 have already been inverted if it were valid. This case occurs for
10637 some floating-point comparisons. */
10639 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10640 invert = 1, exp = TREE_OPERAND (exp, 0);
10642 arg0 = TREE_OPERAND (exp, 0);
10643 arg1 = TREE_OPERAND (exp, 1);
10644 type = TREE_TYPE (arg0);
10645 operand_mode = TYPE_MODE (type);
10646 unsignedp = TREE_UNSIGNED (type);
10648 /* We won't bother with BLKmode store-flag operations because it would mean
10649 passing a lot of information to emit_store_flag. */
10650 if (operand_mode == BLKmode)
10651 return 0;
10653 /* We won't bother with store-flag operations involving function pointers
10654 when function pointers must be canonicalized before comparisons. */
10655 #ifdef HAVE_canonicalize_funcptr_for_compare
10656 if (HAVE_canonicalize_funcptr_for_compare
10657 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10658 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10659 == FUNCTION_TYPE))
10660 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10661 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10662 == FUNCTION_TYPE))))
10663 return 0;
10664 #endif
10666 STRIP_NOPS (arg0);
10667 STRIP_NOPS (arg1);
10669 /* Get the rtx comparison code to use. We know that EXP is a comparison
10670 operation of some type. Some comparisons against 1 and -1 can be
10671 converted to comparisons with zero. Do so here so that the tests
10672 below will be aware that we have a comparison with zero. These
10673 tests will not catch constants in the first operand, but constants
10674 are rarely passed as the first operand. */
10676 switch (TREE_CODE (exp))
10678 case EQ_EXPR:
10679 code = EQ;
10680 break;
10681 case NE_EXPR:
10682 code = NE;
10683 break;
10684 case LT_EXPR:
10685 if (integer_onep (arg1))
10686 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10687 else
10688 code = unsignedp ? LTU : LT;
10689 break;
10690 case LE_EXPR:
10691 if (! unsignedp && integer_all_onesp (arg1))
10692 arg1 = integer_zero_node, code = LT;
10693 else
10694 code = unsignedp ? LEU : LE;
10695 break;
10696 case GT_EXPR:
10697 if (! unsignedp && integer_all_onesp (arg1))
10698 arg1 = integer_zero_node, code = GE;
10699 else
10700 code = unsignedp ? GTU : GT;
10701 break;
10702 case GE_EXPR:
10703 if (integer_onep (arg1))
10704 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10705 else
10706 code = unsignedp ? GEU : GE;
10707 break;
10708 default:
10709 abort ();
10712 /* Put a constant second. */
10713 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10715 tem = arg0; arg0 = arg1; arg1 = tem;
10716 code = swap_condition (code);
10719 /* If this is an equality or inequality test of a single bit, we can
10720 do this by shifting the bit being tested to the low-order bit and
10721 masking the result with the constant 1. If the condition was EQ,
10722 we xor it with 1. This does not require an scc insn and is faster
10723 than an scc insn even if we have it. */
10725 if ((code == NE || code == EQ)
10726 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10727 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10729 tree inner = TREE_OPERAND (arg0, 0);
10730 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10731 int ops_unsignedp;
10733 /* If INNER is a right shift of a constant and it plus BITNUM does
10734 not overflow, adjust BITNUM and INNER. */
10736 if (TREE_CODE (inner) == RSHIFT_EXPR
10737 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10738 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10739 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10740 < TYPE_PRECISION (type)))
10742 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10743 inner = TREE_OPERAND (inner, 0);
10746 /* If we are going to be able to omit the AND below, we must do our
10747 operations as unsigned. If we must use the AND, we have a choice.
10748 Normally unsigned is faster, but for some machines signed is. */
10749 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10750 #ifdef LOAD_EXTEND_OP
10751 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10752 #else
10754 #endif
10757 if (subtarget == 0 || GET_CODE (subtarget) != REG
10758 || GET_MODE (subtarget) != operand_mode
10759 || ! safe_from_p (subtarget, inner, 1))
10760 subtarget = 0;
10762 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10764 if (bitnum != 0)
10765 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10766 size_int (bitnum), subtarget, ops_unsignedp);
10768 if (GET_MODE (op0) != mode)
10769 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10771 if ((code == EQ && ! invert) || (code == NE && invert))
10772 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10773 ops_unsignedp, OPTAB_LIB_WIDEN);
10775 /* Put the AND last so it can combine with more things. */
10776 if (bitnum != TYPE_PRECISION (type) - 1)
10777 op0 = expand_and (op0, const1_rtx, subtarget);
10779 return op0;
10782 /* Now see if we are likely to be able to do this. Return if not. */
10783 if (! can_compare_p (operand_mode))
10784 return 0;
10785 icode = setcc_gen_code[(int) code];
10786 if (icode == CODE_FOR_nothing
10787 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10789 /* We can only do this if it is one of the special cases that
10790 can be handled without an scc insn. */
10791 if ((code == LT && integer_zerop (arg1))
10792 || (! only_cheap && code == GE && integer_zerop (arg1)))
10794 else if (BRANCH_COST >= 0
10795 && ! only_cheap && (code == NE || code == EQ)
10796 && TREE_CODE (type) != REAL_TYPE
10797 && ((abs_optab->handlers[(int) operand_mode].insn_code
10798 != CODE_FOR_nothing)
10799 || (ffs_optab->handlers[(int) operand_mode].insn_code
10800 != CODE_FOR_nothing)))
10802 else
10803 return 0;
10806 preexpand_calls (exp);
10807 if (subtarget == 0 || GET_CODE (subtarget) != REG
10808 || GET_MODE (subtarget) != operand_mode
10809 || ! safe_from_p (subtarget, arg1, 1))
10810 subtarget = 0;
10812 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10813 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10815 if (target == 0)
10816 target = gen_reg_rtx (mode);
10818 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10819 because, if the emit_store_flag does anything it will succeed and
10820 OP0 and OP1 will not be used subsequently. */
10822 result = emit_store_flag (target, code,
10823 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10824 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10825 operand_mode, unsignedp, 1);
10827 if (result)
10829 if (invert)
10830 result = expand_binop (mode, xor_optab, result, const1_rtx,
10831 result, 0, OPTAB_LIB_WIDEN);
10832 return result;
10835 /* If this failed, we have to do this with set/compare/jump/set code. */
10836 if (GET_CODE (target) != REG
10837 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10838 target = gen_reg_rtx (GET_MODE (target));
10840 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10841 result = compare_from_rtx (op0, op1, code, unsignedp,
10842 operand_mode, NULL_RTX, 0);
10843 if (GET_CODE (result) == CONST_INT)
10844 return (((result == const0_rtx && ! invert)
10845 || (result != const0_rtx && invert))
10846 ? const0_rtx : const1_rtx);
10848 label = gen_label_rtx ();
10849 if (bcc_gen_fctn[(int) code] == 0)
10850 abort ();
10852 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10853 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10854 emit_label (label);
10856 return target;
10859 /* Generate a tablejump instruction (used for switch statements). */
10861 #ifdef HAVE_tablejump
10863 /* INDEX is the value being switched on, with the lowest value
10864 in the table already subtracted.
10865 MODE is its expected mode (needed if INDEX is constant).
10866 RANGE is the length of the jump table.
10867 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10869 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10870 index value is out of range. */
10872 void
10873 do_tablejump (index, mode, range, table_label, default_label)
10874 rtx index, range, table_label, default_label;
10875 enum machine_mode mode;
10877 register rtx temp, vector;
10879 /* Do an unsigned comparison (in the proper mode) between the index
10880 expression and the value which represents the length of the range.
10881 Since we just finished subtracting the lower bound of the range
10882 from the index expression, this comparison allows us to simultaneously
10883 check that the original index expression value is both greater than
10884 or equal to the minimum value of the range and less than or equal to
10885 the maximum value of the range. */
10887 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10888 emit_jump_insn (gen_bgtu (default_label));
10890 /* If index is in range, it must fit in Pmode.
10891 Convert to Pmode so we can index with it. */
10892 if (mode != Pmode)
10893 index = convert_to_mode (Pmode, index, 1);
10895 /* Don't let a MEM slip thru, because then INDEX that comes
10896 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10897 and break_out_memory_refs will go to work on it and mess it up. */
10898 #ifdef PIC_CASE_VECTOR_ADDRESS
10899 if (flag_pic && GET_CODE (index) != REG)
10900 index = copy_to_mode_reg (Pmode, index);
10901 #endif
10903 /* If flag_force_addr were to affect this address
10904 it could interfere with the tricky assumptions made
10905 about addresses that contain label-refs,
10906 which may be valid only very near the tablejump itself. */
10907 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10908 GET_MODE_SIZE, because this indicates how large insns are. The other
10909 uses should all be Pmode, because they are addresses. This code
10910 could fail if addresses and insns are not the same size. */
10911 index = gen_rtx_PLUS (Pmode,
10912 gen_rtx_MULT (Pmode, index,
10913 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10914 gen_rtx_LABEL_REF (Pmode, table_label));
10915 #ifdef PIC_CASE_VECTOR_ADDRESS
10916 if (flag_pic)
10917 index = PIC_CASE_VECTOR_ADDRESS (index);
10918 else
10919 #endif
10920 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10921 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10922 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10923 RTX_UNCHANGING_P (vector) = 1;
10924 convert_move (temp, vector, 0);
10926 emit_jump_insn (gen_tablejump (temp, table_label));
10928 /* If we are generating PIC code or if the table is PC-relative, the
10929 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10930 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10931 emit_barrier ();
10934 #endif /* HAVE_tablejump */