8
[official-gcc.git] / gcc / expr.c
bloba32026e82eccd6b91f6d8f63827faea4379076bf
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
52 #ifdef PUSH_ROUNDING
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
58 #endif
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
84 /* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87 int do_preexpand_calls = 1;
89 /* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91 int pending_stack_adjust;
93 /* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97 int inhibit_defer_pop;
99 /* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
101 returned. */
102 static rtx saveregs_value;
104 /* Similarly for __builtin_apply_args. */
105 static rtx apply_args_value;
107 /* Nonzero if the machine description has been fixed to accept
108 CONSTANT_P_RTX patterns. We will emit a warning and continue
109 if we find we must actually use such a beast. */
110 static int can_handle_constant_p;
112 /* Don't check memory usage, since code is being emitted to check a memory
113 usage. Used when flag_check_memory_usage is true, to avoid infinite
114 recursion. */
115 static int in_check_memory_usage;
117 /* This structure is used by move_by_pieces to describe the move to
118 be performed. */
119 struct move_by_pieces
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 int to_struct;
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
130 int from_struct;
131 int len;
132 int offset;
133 int reverse;
136 /* This structure is used by clear_by_pieces to describe the clear to
137 be performed. */
139 struct clear_by_pieces
141 rtx to;
142 rtx to_addr;
143 int autinc_to;
144 int explicit_inc_to;
145 int to_struct;
146 int len;
147 int offset;
148 int reverse;
151 extern struct obstack permanent_obstack;
152 extern rtx arg_pointer_save_area;
154 static rtx get_push_address PROTO ((int));
156 static rtx enqueue_insn PROTO((rtx, rtx));
157 static int queued_subexp_p PROTO((rtx));
158 static void init_queue PROTO((void));
159 static void move_by_pieces PROTO((rtx, rtx, int, int));
160 static int move_by_pieces_ninsns PROTO((unsigned int, int));
161 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
162 struct move_by_pieces *));
163 static void clear_by_pieces PROTO((rtx, int, int));
164 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
165 struct clear_by_pieces *));
166 static int is_zeros_p PROTO((tree));
167 static int mostly_zeros_p PROTO((tree));
168 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
169 tree, tree, int));
170 static void store_constructor PROTO((tree, rtx, int));
171 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
172 enum machine_mode, int, int, int));
173 static enum memory_use_mode
174 get_memory_usage_from_modifier PROTO((enum expand_modifier));
175 static tree save_noncopied_parts PROTO((tree, tree));
176 static tree init_noncopied_parts PROTO((tree, tree));
177 static int safe_from_p PROTO((rtx, tree, int));
178 static int fixed_type_p PROTO((tree));
179 static rtx var_rtx PROTO((tree));
180 static int get_pointer_alignment PROTO((tree, unsigned));
181 static tree string_constant PROTO((tree, tree *));
182 static tree c_strlen PROTO((tree));
183 static rtx get_memory_rtx PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 static void preexpand_calls PROTO((tree));
194 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
195 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
196 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
197 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
198 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
200 /* Record for each mode whether we can move a register directly to or
201 from an object of that mode in memory. If we can't, we won't try
202 to use that mode directly when accessing a field of that mode. */
204 static char direct_load[NUM_MACHINE_MODES];
205 static char direct_store[NUM_MACHINE_MODES];
207 /* MOVE_RATIO is the number of move instructions that is better than
208 a block move. */
210 #ifndef MOVE_RATIO
211 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
212 #define MOVE_RATIO 2
213 #else
214 /* If we are optimizing for space (-Os), cut down the default move ratio */
215 #define MOVE_RATIO (optimize_size ? 3 : 15)
216 #endif
217 #endif
219 /* This array records the insn_code of insns to perform block moves. */
220 enum insn_code movstr_optab[NUM_MACHINE_MODES];
222 /* This array records the insn_code of insns to perform block clears. */
223 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
225 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
227 #ifndef SLOW_UNALIGNED_ACCESS
228 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
229 #endif
231 /* Register mappings for target machines without register windows. */
232 #ifndef INCOMING_REGNO
233 #define INCOMING_REGNO(OUT) (OUT)
234 #endif
235 #ifndef OUTGOING_REGNO
236 #define OUTGOING_REGNO(IN) (IN)
237 #endif
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
242 void
243 init_expr_once ()
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 char *free_point;
251 start_sequence ();
253 /* Since we are on the permanent obstack, we must be sure we save this
254 spot AFTER we call start_sequence, since it will reuse the rtl it
255 makes. */
256 free_point = (char *) oballoc (0);
258 /* Try indexing by frame ptr and try by stack ptr.
259 It is known that on the Convex the stack ptr isn't a valid index.
260 With luck, one or the other is valid on any machine. */
261 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
262 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
264 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
265 pat = PATTERN (insn);
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
270 int regno;
271 rtx reg;
273 direct_load[(int) mode] = direct_store[(int) mode] = 0;
274 PUT_MODE (mem, mode);
275 PUT_MODE (mem1, mode);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
288 reg = gen_rtx_REG (mode, regno);
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
312 /* Find out if CONSTANT_P_RTX is accepted. */
313 SET_DEST (pat) = gen_rtx_REG (TYPE_MODE (integer_type_node),
314 FIRST_PSEUDO_REGISTER);
315 SET_SRC (pat) = gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
316 SET_DEST (pat));
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 can_handle_constant_p = 1;
320 end_sequence ();
321 obfree (free_point);
324 /* This is run at the start of compiling a function. */
326 void
327 init_expr ()
329 init_queue ();
331 pending_stack_adjust = 0;
332 inhibit_defer_pop = 0;
333 saveregs_value = 0;
334 apply_args_value = 0;
335 forced_labels = 0;
338 /* Save all variables describing the current status into the structure *P.
339 This is used before starting a nested function. */
341 void
342 save_expr_status (p)
343 struct function *p;
345 /* Instead of saving the postincrement queue, empty it. */
346 emit_queue ();
348 p->pending_stack_adjust = pending_stack_adjust;
349 p->inhibit_defer_pop = inhibit_defer_pop;
350 p->saveregs_value = saveregs_value;
351 p->apply_args_value = apply_args_value;
352 p->forced_labels = forced_labels;
354 pending_stack_adjust = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
361 /* Restore all variables describing the current status from the structure *P.
362 This is used after a nested function. */
364 void
365 restore_expr_status (p)
366 struct function *p;
368 pending_stack_adjust = p->pending_stack_adjust;
369 inhibit_defer_pop = p->inhibit_defer_pop;
370 saveregs_value = p->saveregs_value;
371 apply_args_value = p->apply_args_value;
372 forced_labels = p->forced_labels;
375 /* Manage the queue of increment instructions to be output
376 for POSTINCREMENT_EXPR expressions, etc. */
378 static rtx pending_chain;
380 /* Queue up to increment (or change) VAR later. BODY says how:
381 BODY should be the same thing you would pass to emit_insn
382 to increment right away. It will go to emit_insn later on.
384 The value is a QUEUED expression to be used in place of VAR
385 where you want to guarantee the pre-incrementation value of VAR. */
387 static rtx
388 enqueue_insn (var, body)
389 rtx var, body;
391 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
392 var, NULL_RTX, NULL_RTX, body,
393 pending_chain);
394 return pending_chain;
397 /* Use protect_from_queue to convert a QUEUED expression
398 into something that you can put immediately into an instruction.
399 If the queued incrementation has not happened yet,
400 protect_from_queue returns the variable itself.
401 If the incrementation has happened, protect_from_queue returns a temp
402 that contains a copy of the old value of the variable.
404 Any time an rtx which might possibly be a QUEUED is to be put
405 into an instruction, it must be passed through protect_from_queue first.
406 QUEUED expressions are not meaningful in instructions.
408 Do not pass a value through protect_from_queue and then hold
409 on to it for a while before putting it in an instruction!
410 If the queue is flushed in between, incorrect code will result. */
413 protect_from_queue (x, modify)
414 register rtx x;
415 int modify;
417 register RTX_CODE code = GET_CODE (x);
419 #if 0 /* A QUEUED can hang around after the queue is forced out. */
420 /* Shortcut for most common case. */
421 if (pending_chain == 0)
422 return x;
423 #endif
425 if (code != QUEUED)
427 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
428 use of autoincrement. Make a copy of the contents of the memory
429 location rather than a copy of the address, but not if the value is
430 of mode BLKmode. Don't modify X in place since it might be
431 shared. */
432 if (code == MEM && GET_MODE (x) != BLKmode
433 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
435 register rtx y = XEXP (x, 0);
436 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
438 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
439 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
440 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
441 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
443 if (QUEUED_INSN (y))
445 register rtx temp = gen_reg_rtx (GET_MODE (new));
446 emit_insn_before (gen_move_insn (temp, new),
447 QUEUED_INSN (y));
448 return temp;
450 return new;
452 /* Otherwise, recursively protect the subexpressions of all
453 the kinds of rtx's that can contain a QUEUED. */
454 if (code == MEM)
456 rtx tem = protect_from_queue (XEXP (x, 0), 0);
457 if (tem != XEXP (x, 0))
459 x = copy_rtx (x);
460 XEXP (x, 0) = tem;
463 else if (code == PLUS || code == MULT)
465 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
466 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
467 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
469 x = copy_rtx (x);
470 XEXP (x, 0) = new0;
471 XEXP (x, 1) = new1;
474 return x;
476 /* If the increment has not happened, use the variable itself. */
477 if (QUEUED_INSN (x) == 0)
478 return QUEUED_VAR (x);
479 /* If the increment has happened and a pre-increment copy exists,
480 use that copy. */
481 if (QUEUED_COPY (x) != 0)
482 return QUEUED_COPY (x);
483 /* The increment has happened but we haven't set up a pre-increment copy.
484 Set one up now, and use it. */
485 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
486 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
487 QUEUED_INSN (x));
488 return QUEUED_COPY (x);
491 /* Return nonzero if X contains a QUEUED expression:
492 if it contains anything that will be altered by a queued increment.
493 We handle only combinations of MEM, PLUS, MINUS and MULT operators
494 since memory addresses generally contain only those. */
496 static int
497 queued_subexp_p (x)
498 rtx x;
500 register enum rtx_code code = GET_CODE (x);
501 switch (code)
503 case QUEUED:
504 return 1;
505 case MEM:
506 return queued_subexp_p (XEXP (x, 0));
507 case MULT:
508 case PLUS:
509 case MINUS:
510 return (queued_subexp_p (XEXP (x, 0))
511 || queued_subexp_p (XEXP (x, 1)));
512 default:
513 return 0;
517 /* Perform all the pending incrementations. */
519 void
520 emit_queue ()
522 register rtx p;
523 while ((p = pending_chain))
525 rtx body = QUEUED_BODY (p);
527 if (GET_CODE (body) == SEQUENCE)
529 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
530 emit_insn (QUEUED_BODY (p));
532 else
533 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
534 pending_chain = QUEUED_NEXT (p);
538 static void
539 init_queue ()
541 if (pending_chain)
542 abort ();
545 /* Copy data from FROM to TO, where the machine modes are not the same.
546 Both modes may be integer, or both may be floating.
547 UNSIGNEDP should be nonzero if FROM is an unsigned type.
548 This causes zero-extension instead of sign-extension. */
550 void
551 convert_move (to, from, unsignedp)
552 register rtx to, from;
553 int unsignedp;
555 enum machine_mode to_mode = GET_MODE (to);
556 enum machine_mode from_mode = GET_MODE (from);
557 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
558 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
559 enum insn_code code;
560 rtx libcall;
562 /* rtx code for making an equivalent value. */
563 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
565 to = protect_from_queue (to, 1);
566 from = protect_from_queue (from, 0);
568 if (to_real != from_real)
569 abort ();
571 /* If FROM is a SUBREG that indicates that we have already done at least
572 the required extension, strip it. We don't handle such SUBREGs as
573 TO here. */
575 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
576 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
577 >= GET_MODE_SIZE (to_mode))
578 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
579 from = gen_lowpart (to_mode, from), from_mode = to_mode;
581 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
582 abort ();
584 if (to_mode == from_mode
585 || (from_mode == VOIDmode && CONSTANT_P (from)))
587 emit_move_insn (to, from);
588 return;
591 if (to_real)
593 rtx value;
595 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
597 /* Try converting directly if the insn is supported. */
598 if ((code = can_extend_p (to_mode, from_mode, 0))
599 != CODE_FOR_nothing)
601 emit_unop_insn (code, to, from, UNKNOWN);
602 return;
606 #ifdef HAVE_trunchfqf2
607 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
609 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
610 return;
612 #endif
613 #ifdef HAVE_trunctqfqf2
614 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
616 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
617 return;
619 #endif
620 #ifdef HAVE_truncsfqf2
621 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
623 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
624 return;
626 #endif
627 #ifdef HAVE_truncdfqf2
628 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
631 return;
633 #endif
634 #ifdef HAVE_truncxfqf2
635 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_trunctfqf2
642 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
645 return;
647 #endif
649 #ifdef HAVE_trunctqfhf2
650 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
652 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
653 return;
655 #endif
656 #ifdef HAVE_truncsfhf2
657 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
659 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
660 return;
662 #endif
663 #ifdef HAVE_truncdfhf2
664 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
666 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
667 return;
669 #endif
670 #ifdef HAVE_truncxfhf2
671 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
674 return;
676 #endif
677 #ifdef HAVE_trunctfhf2
678 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
681 return;
683 #endif
685 #ifdef HAVE_truncsftqf2
686 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
688 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
689 return;
691 #endif
692 #ifdef HAVE_truncdftqf2
693 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
695 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
696 return;
698 #endif
699 #ifdef HAVE_truncxftqf2
700 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
702 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
703 return;
705 #endif
706 #ifdef HAVE_trunctftqf2
707 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
710 return;
712 #endif
714 #ifdef HAVE_truncdfsf2
715 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
717 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
718 return;
720 #endif
721 #ifdef HAVE_truncxfsf2
722 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
724 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
725 return;
727 #endif
728 #ifdef HAVE_trunctfsf2
729 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
731 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
732 return;
734 #endif
735 #ifdef HAVE_truncxfdf2
736 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
738 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
739 return;
741 #endif
742 #ifdef HAVE_trunctfdf2
743 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
745 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
746 return;
748 #endif
750 libcall = (rtx) 0;
751 switch (from_mode)
753 case SFmode:
754 switch (to_mode)
756 case DFmode:
757 libcall = extendsfdf2_libfunc;
758 break;
760 case XFmode:
761 libcall = extendsfxf2_libfunc;
762 break;
764 case TFmode:
765 libcall = extendsftf2_libfunc;
766 break;
768 default:
769 break;
771 break;
773 case DFmode:
774 switch (to_mode)
776 case SFmode:
777 libcall = truncdfsf2_libfunc;
778 break;
780 case XFmode:
781 libcall = extenddfxf2_libfunc;
782 break;
784 case TFmode:
785 libcall = extenddftf2_libfunc;
786 break;
788 default:
789 break;
791 break;
793 case XFmode:
794 switch (to_mode)
796 case SFmode:
797 libcall = truncxfsf2_libfunc;
798 break;
800 case DFmode:
801 libcall = truncxfdf2_libfunc;
802 break;
804 default:
805 break;
807 break;
809 case TFmode:
810 switch (to_mode)
812 case SFmode:
813 libcall = trunctfsf2_libfunc;
814 break;
816 case DFmode:
817 libcall = trunctfdf2_libfunc;
818 break;
820 default:
821 break;
823 break;
825 default:
826 break;
829 if (libcall == (rtx) 0)
830 /* This conversion is not implemented yet. */
831 abort ();
833 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
834 1, from, from_mode);
835 emit_move_insn (to, value);
836 return;
839 /* Now both modes are integers. */
841 /* Handle expanding beyond a word. */
842 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
843 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
845 rtx insns;
846 rtx lowpart;
847 rtx fill_value;
848 rtx lowfrom;
849 int i;
850 enum machine_mode lowpart_mode;
851 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
853 /* Try converting directly if the insn is supported. */
854 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
855 != CODE_FOR_nothing)
857 /* If FROM is a SUBREG, put it into a register. Do this
858 so that we always generate the same set of insns for
859 better cse'ing; if an intermediate assignment occurred,
860 we won't be doing the operation directly on the SUBREG. */
861 if (optimize > 0 && GET_CODE (from) == SUBREG)
862 from = force_reg (from_mode, from);
863 emit_unop_insn (code, to, from, equiv_code);
864 return;
866 /* Next, try converting via full word. */
867 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
868 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
869 != CODE_FOR_nothing))
871 if (GET_CODE (to) == REG)
872 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
873 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
874 emit_unop_insn (code, to,
875 gen_lowpart (word_mode, to), equiv_code);
876 return;
879 /* No special multiword conversion insn; do it by hand. */
880 start_sequence ();
882 /* Since we will turn this into a no conflict block, we must ensure
883 that the source does not overlap the target. */
885 if (reg_overlap_mentioned_p (to, from))
886 from = force_reg (from_mode, from);
888 /* Get a copy of FROM widened to a word, if necessary. */
889 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
890 lowpart_mode = word_mode;
891 else
892 lowpart_mode = from_mode;
894 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
896 lowpart = gen_lowpart (lowpart_mode, to);
897 emit_move_insn (lowpart, lowfrom);
899 /* Compute the value to put in each remaining word. */
900 if (unsignedp)
901 fill_value = const0_rtx;
902 else
904 #ifdef HAVE_slt
905 if (HAVE_slt
906 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
907 && STORE_FLAG_VALUE == -1)
909 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
910 lowpart_mode, 0, 0);
911 fill_value = gen_reg_rtx (word_mode);
912 emit_insn (gen_slt (fill_value));
914 else
915 #endif
917 fill_value
918 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
919 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
920 NULL_RTX, 0);
921 fill_value = convert_to_mode (word_mode, fill_value, 1);
925 /* Fill the remaining words. */
926 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
928 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
929 rtx subword = operand_subword (to, index, 1, to_mode);
931 if (subword == 0)
932 abort ();
934 if (fill_value != subword)
935 emit_move_insn (subword, fill_value);
938 insns = get_insns ();
939 end_sequence ();
941 emit_no_conflict_block (insns, to, from, NULL_RTX,
942 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
943 return;
946 /* Truncating multi-word to a word or less. */
947 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
948 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
950 if (!((GET_CODE (from) == MEM
951 && ! MEM_VOLATILE_P (from)
952 && direct_load[(int) to_mode]
953 && ! mode_dependent_address_p (XEXP (from, 0)))
954 || GET_CODE (from) == REG
955 || GET_CODE (from) == SUBREG))
956 from = force_reg (from_mode, from);
957 convert_move (to, gen_lowpart (word_mode, from), 0);
958 return;
961 /* Handle pointer conversion */ /* SPEE 900220 */
962 if (to_mode == PQImode)
964 if (from_mode != QImode)
965 from = convert_to_mode (QImode, from, unsignedp);
967 #ifdef HAVE_truncqipqi2
968 if (HAVE_truncqipqi2)
970 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
971 return;
973 #endif /* HAVE_truncqipqi2 */
974 abort ();
977 if (from_mode == PQImode)
979 if (to_mode != QImode)
981 from = convert_to_mode (QImode, from, unsignedp);
982 from_mode = QImode;
984 else
986 #ifdef HAVE_extendpqiqi2
987 if (HAVE_extendpqiqi2)
989 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
990 return;
992 #endif /* HAVE_extendpqiqi2 */
993 abort ();
997 if (to_mode == PSImode)
999 if (from_mode != SImode)
1000 from = convert_to_mode (SImode, from, unsignedp);
1002 #ifdef HAVE_truncsipsi2
1003 if (HAVE_truncsipsi2)
1005 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1006 return;
1008 #endif /* HAVE_truncsipsi2 */
1009 abort ();
1012 if (from_mode == PSImode)
1014 if (to_mode != SImode)
1016 from = convert_to_mode (SImode, from, unsignedp);
1017 from_mode = SImode;
1019 else
1021 #ifdef HAVE_extendpsisi2
1022 if (HAVE_extendpsisi2)
1024 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1025 return;
1027 #endif /* HAVE_extendpsisi2 */
1028 abort ();
1032 if (to_mode == PDImode)
1034 if (from_mode != DImode)
1035 from = convert_to_mode (DImode, from, unsignedp);
1037 #ifdef HAVE_truncdipdi2
1038 if (HAVE_truncdipdi2)
1040 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1041 return;
1043 #endif /* HAVE_truncdipdi2 */
1044 abort ();
1047 if (from_mode == PDImode)
1049 if (to_mode != DImode)
1051 from = convert_to_mode (DImode, from, unsignedp);
1052 from_mode = DImode;
1054 else
1056 #ifdef HAVE_extendpdidi2
1057 if (HAVE_extendpdidi2)
1059 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1060 return;
1062 #endif /* HAVE_extendpdidi2 */
1063 abort ();
1067 /* Now follow all the conversions between integers
1068 no more than a word long. */
1070 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1071 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1072 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1073 GET_MODE_BITSIZE (from_mode)))
1075 if (!((GET_CODE (from) == MEM
1076 && ! MEM_VOLATILE_P (from)
1077 && direct_load[(int) to_mode]
1078 && ! mode_dependent_address_p (XEXP (from, 0)))
1079 || GET_CODE (from) == REG
1080 || GET_CODE (from) == SUBREG))
1081 from = force_reg (from_mode, from);
1082 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1083 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1084 from = copy_to_reg (from);
1085 emit_move_insn (to, gen_lowpart (to_mode, from));
1086 return;
1089 /* Handle extension. */
1090 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1092 /* Convert directly if that works. */
1093 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1094 != CODE_FOR_nothing)
1096 emit_unop_insn (code, to, from, equiv_code);
1097 return;
1099 else
1101 enum machine_mode intermediate;
1103 /* Search for a mode to convert via. */
1104 for (intermediate = from_mode; intermediate != VOIDmode;
1105 intermediate = GET_MODE_WIDER_MODE (intermediate))
1106 if (((can_extend_p (to_mode, intermediate, unsignedp)
1107 != CODE_FOR_nothing)
1108 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1109 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1110 && (can_extend_p (intermediate, from_mode, unsignedp)
1111 != CODE_FOR_nothing))
1113 convert_move (to, convert_to_mode (intermediate, from,
1114 unsignedp), unsignedp);
1115 return;
1118 /* No suitable intermediate mode. */
1119 abort ();
1123 /* Support special truncate insns for certain modes. */
1125 if (from_mode == DImode && to_mode == SImode)
1127 #ifdef HAVE_truncdisi2
1128 if (HAVE_truncdisi2)
1130 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1131 return;
1133 #endif
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 return;
1138 if (from_mode == DImode && to_mode == HImode)
1140 #ifdef HAVE_truncdihi2
1141 if (HAVE_truncdihi2)
1143 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1144 return;
1146 #endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1151 if (from_mode == DImode && to_mode == QImode)
1153 #ifdef HAVE_truncdiqi2
1154 if (HAVE_truncdiqi2)
1156 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1157 return;
1159 #endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1164 if (from_mode == SImode && to_mode == HImode)
1166 #ifdef HAVE_truncsihi2
1167 if (HAVE_truncsihi2)
1169 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1170 return;
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1177 if (from_mode == SImode && to_mode == QImode)
1179 #ifdef HAVE_truncsiqi2
1180 if (HAVE_truncsiqi2)
1182 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1183 return;
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1190 if (from_mode == HImode && to_mode == QImode)
1192 #ifdef HAVE_trunchiqi2
1193 if (HAVE_trunchiqi2)
1195 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1196 return;
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1203 if (from_mode == TImode && to_mode == DImode)
1205 #ifdef HAVE_trunctidi2
1206 if (HAVE_trunctidi2)
1208 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1209 return;
1211 #endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1216 if (from_mode == TImode && to_mode == SImode)
1218 #ifdef HAVE_trunctisi2
1219 if (HAVE_trunctisi2)
1221 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1222 return;
1224 #endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1229 if (from_mode == TImode && to_mode == HImode)
1231 #ifdef HAVE_trunctihi2
1232 if (HAVE_trunctihi2)
1234 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1235 return;
1237 #endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1242 if (from_mode == TImode && to_mode == QImode)
1244 #ifdef HAVE_trunctiqi2
1245 if (HAVE_trunctiqi2)
1247 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1248 return;
1250 #endif
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 return;
1255 /* Handle truncation of volatile memrefs, and so on;
1256 the things that couldn't be truncated directly,
1257 and for which there was no special instruction. */
1258 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1260 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1261 emit_move_insn (to, temp);
1262 return;
1265 /* Mode combination is not recognized. */
1266 abort ();
1269 /* Return an rtx for a value that would result
1270 from converting X to mode MODE.
1271 Both X and MODE may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1273 This can be done by referring to a part of X in place
1274 or by copying to a new temporary with conversion.
1276 This function *must not* call protect_from_queue
1277 except when putting X into an insn (in which case convert_move does it). */
1280 convert_to_mode (mode, x, unsignedp)
1281 enum machine_mode mode;
1282 rtx x;
1283 int unsignedp;
1285 return convert_modes (mode, VOIDmode, x, unsignedp);
1288 /* Return an rtx for a value that would result
1289 from converting X from mode OLDMODE to mode MODE.
1290 Both modes may be floating, or both integer.
1291 UNSIGNEDP is nonzero if X is an unsigned value.
1293 This can be done by referring to a part of X in place
1294 or by copying to a new temporary with conversion.
1296 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1298 This function *must not* call protect_from_queue
1299 except when putting X into an insn (in which case convert_move does it). */
1302 convert_modes (mode, oldmode, x, unsignedp)
1303 enum machine_mode mode, oldmode;
1304 rtx x;
1305 int unsignedp;
1307 register rtx temp;
1309 /* If FROM is a SUBREG that indicates that we have already done at least
1310 the required extension, strip it. */
1312 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1313 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1314 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1315 x = gen_lowpart (mode, x);
1317 if (GET_MODE (x) != VOIDmode)
1318 oldmode = GET_MODE (x);
1320 if (mode == oldmode)
1321 return x;
1323 /* There is one case that we must handle specially: If we are converting
1324 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1325 we are to interpret the constant as unsigned, gen_lowpart will do
1326 the wrong if the constant appears negative. What we want to do is
1327 make the high-order word of the constant zero, not all ones. */
1329 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1331 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1333 HOST_WIDE_INT val = INTVAL (x);
1335 if (oldmode != VOIDmode
1336 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1338 int width = GET_MODE_BITSIZE (oldmode);
1340 /* We need to zero extend VAL. */
1341 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1344 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1347 /* We can do this with a gen_lowpart if both desired and current modes
1348 are integer, and this is either a constant integer, a register, or a
1349 non-volatile MEM. Except for the constant case where MODE is no
1350 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1352 if ((GET_CODE (x) == CONST_INT
1353 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1354 || (GET_MODE_CLASS (mode) == MODE_INT
1355 && GET_MODE_CLASS (oldmode) == MODE_INT
1356 && (GET_CODE (x) == CONST_DOUBLE
1357 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1358 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1359 && direct_load[(int) mode])
1360 || (GET_CODE (x) == REG
1361 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1362 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1364 /* ?? If we don't know OLDMODE, we have to assume here that
1365 X does not need sign- or zero-extension. This may not be
1366 the case, but it's the best we can do. */
1367 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1368 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1370 HOST_WIDE_INT val = INTVAL (x);
1371 int width = GET_MODE_BITSIZE (oldmode);
1373 /* We must sign or zero-extend in this case. Start by
1374 zero-extending, then sign extend if we need to. */
1375 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1376 if (! unsignedp
1377 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1378 val |= (HOST_WIDE_INT) (-1) << width;
1380 return GEN_INT (val);
1383 return gen_lowpart (mode, x);
1386 temp = gen_reg_rtx (mode);
1387 convert_move (temp, x, unsignedp);
1388 return temp;
1391 /* Generate several move instructions to copy LEN bytes
1392 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1393 The caller must pass FROM and TO
1394 through protect_from_queue before calling.
1395 ALIGN (in bytes) is maximum alignment we can assume. */
1397 static void
1398 move_by_pieces (to, from, len, align)
1399 rtx to, from;
1400 int len, align;
1402 struct move_by_pieces data;
1403 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1404 int max_size = MOVE_MAX + 1;
1406 data.offset = 0;
1407 data.to_addr = to_addr;
1408 data.from_addr = from_addr;
1409 data.to = to;
1410 data.from = from;
1411 data.autinc_to
1412 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1413 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1414 data.autinc_from
1415 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1416 || GET_CODE (from_addr) == POST_INC
1417 || GET_CODE (from_addr) == POST_DEC);
1419 data.explicit_inc_from = 0;
1420 data.explicit_inc_to = 0;
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 if (data.reverse) data.offset = len;
1424 data.len = len;
1426 data.to_struct = MEM_IN_STRUCT_P (to);
1427 data.from_struct = MEM_IN_STRUCT_P (from);
1429 /* If copying requires more than two move insns,
1430 copy addresses to registers (to make displacements shorter)
1431 and use post-increment if available. */
1432 if (!(data.autinc_from && data.autinc_to)
1433 && move_by_pieces_ninsns (len, align) > 2)
1435 #ifdef HAVE_PRE_DECREMENT
1436 if (data.reverse && ! data.autinc_from)
1438 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = -1;
1442 #endif
1443 #ifdef HAVE_POST_INCREMENT
1444 if (! data.autinc_from)
1446 data.from_addr = copy_addr_to_reg (from_addr);
1447 data.autinc_from = 1;
1448 data.explicit_inc_from = 1;
1450 #endif
1451 if (!data.autinc_from && CONSTANT_P (from_addr))
1452 data.from_addr = copy_addr_to_reg (from_addr);
1453 #ifdef HAVE_PRE_DECREMENT
1454 if (data.reverse && ! data.autinc_to)
1456 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1457 data.autinc_to = 1;
1458 data.explicit_inc_to = -1;
1460 #endif
1461 #ifdef HAVE_POST_INCREMENT
1462 if (! data.reverse && ! data.autinc_to)
1464 data.to_addr = copy_addr_to_reg (to_addr);
1465 data.autinc_to = 1;
1466 data.explicit_inc_to = 1;
1468 #endif
1469 if (!data.autinc_to && CONSTANT_P (to_addr))
1470 data.to_addr = copy_addr_to_reg (to_addr);
1473 if (! SLOW_UNALIGNED_ACCESS
1474 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1475 align = MOVE_MAX;
1477 /* First move what we can in the largest integer mode, then go to
1478 successively smaller modes. */
1480 while (max_size > 1)
1482 enum machine_mode mode = VOIDmode, tmode;
1483 enum insn_code icode;
1485 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1486 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1487 if (GET_MODE_SIZE (tmode) < max_size)
1488 mode = tmode;
1490 if (mode == VOIDmode)
1491 break;
1493 icode = mov_optab->handlers[(int) mode].insn_code;
1494 if (icode != CODE_FOR_nothing
1495 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1496 GET_MODE_SIZE (mode)))
1497 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1499 max_size = GET_MODE_SIZE (mode);
1502 /* The code above should have handled everything. */
1503 if (data.len > 0)
1504 abort ();
1507 /* Return number of insns required to move L bytes by pieces.
1508 ALIGN (in bytes) is maximum alignment we can assume. */
1510 static int
1511 move_by_pieces_ninsns (l, align)
1512 unsigned int l;
1513 int align;
1515 register int n_insns = 0;
1516 int max_size = MOVE_MAX + 1;
1518 if (! SLOW_UNALIGNED_ACCESS
1519 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1520 align = MOVE_MAX;
1522 while (max_size > 1)
1524 enum machine_mode mode = VOIDmode, tmode;
1525 enum insn_code icode;
1527 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1528 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1529 if (GET_MODE_SIZE (tmode) < max_size)
1530 mode = tmode;
1532 if (mode == VOIDmode)
1533 break;
1535 icode = mov_optab->handlers[(int) mode].insn_code;
1536 if (icode != CODE_FOR_nothing
1537 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1538 GET_MODE_SIZE (mode)))
1539 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1541 max_size = GET_MODE_SIZE (mode);
1544 return n_insns;
1547 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1548 with move instructions for mode MODE. GENFUN is the gen_... function
1549 to make a move insn for that mode. DATA has all the other info. */
1551 static void
1552 move_by_pieces_1 (genfun, mode, data)
1553 rtx (*genfun) PROTO ((rtx, ...));
1554 enum machine_mode mode;
1555 struct move_by_pieces *data;
1557 register int size = GET_MODE_SIZE (mode);
1558 register rtx to1, from1;
1560 while (data->len >= size)
1562 if (data->reverse) data->offset -= size;
1564 to1 = (data->autinc_to
1565 ? gen_rtx_MEM (mode, data->to_addr)
1566 : copy_rtx (change_address (data->to, mode,
1567 plus_constant (data->to_addr,
1568 data->offset))));
1569 MEM_IN_STRUCT_P (to1) = data->to_struct;
1571 from1
1572 = (data->autinc_from
1573 ? gen_rtx_MEM (mode, data->from_addr)
1574 : copy_rtx (change_address (data->from, mode,
1575 plus_constant (data->from_addr,
1576 data->offset))));
1577 MEM_IN_STRUCT_P (from1) = data->from_struct;
1579 #ifdef HAVE_PRE_DECREMENT
1580 if (data->explicit_inc_to < 0)
1581 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1582 if (data->explicit_inc_from < 0)
1583 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1584 #endif
1586 emit_insn ((*genfun) (to1, from1));
1587 #ifdef HAVE_POST_INCREMENT
1588 if (data->explicit_inc_to > 0)
1589 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1590 if (data->explicit_inc_from > 0)
1591 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1592 #endif
1594 if (! data->reverse) data->offset += size;
1596 data->len -= size;
1600 /* Emit code to move a block Y to a block X.
1601 This may be done with string-move instructions,
1602 with multiple scalar move instructions, or with a library call.
1604 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1605 with mode BLKmode.
1606 SIZE is an rtx that says how long they are.
1607 ALIGN is the maximum alignment we can assume they have,
1608 measured in bytes.
1610 Return the address of the new block, if memcpy is called and returns it,
1611 0 otherwise. */
1614 emit_block_move (x, y, size, align)
1615 rtx x, y;
1616 rtx size;
1617 int align;
1619 rtx retval = 0;
1621 if (GET_MODE (x) != BLKmode)
1622 abort ();
1624 if (GET_MODE (y) != BLKmode)
1625 abort ();
1627 x = protect_from_queue (x, 1);
1628 y = protect_from_queue (y, 0);
1629 size = protect_from_queue (size, 0);
1631 if (GET_CODE (x) != MEM)
1632 abort ();
1633 if (GET_CODE (y) != MEM)
1634 abort ();
1635 if (size == 0)
1636 abort ();
1638 if (GET_CODE (size) == CONST_INT
1639 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1640 move_by_pieces (x, y, INTVAL (size), align);
1641 else
1643 /* Try the most limited insn first, because there's no point
1644 including more than one in the machine description unless
1645 the more limited one has some advantage. */
1647 rtx opalign = GEN_INT (align);
1648 enum machine_mode mode;
1650 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1651 mode = GET_MODE_WIDER_MODE (mode))
1653 enum insn_code code = movstr_optab[(int) mode];
1655 if (code != CODE_FOR_nothing
1656 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1657 here because if SIZE is less than the mode mask, as it is
1658 returned by the macro, it will definitely be less than the
1659 actual mode mask. */
1660 && ((GET_CODE (size) == CONST_INT
1661 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1662 <= (GET_MODE_MASK (mode) >> 1)))
1663 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1664 && (insn_operand_predicate[(int) code][0] == 0
1665 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1666 && (insn_operand_predicate[(int) code][1] == 0
1667 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1668 && (insn_operand_predicate[(int) code][3] == 0
1669 || (*insn_operand_predicate[(int) code][3]) (opalign,
1670 VOIDmode)))
1672 rtx op2;
1673 rtx last = get_last_insn ();
1674 rtx pat;
1676 op2 = convert_to_mode (mode, size, 1);
1677 if (insn_operand_predicate[(int) code][2] != 0
1678 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1679 op2 = copy_to_mode_reg (mode, op2);
1681 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1682 if (pat)
1684 emit_insn (pat);
1685 return 0;
1687 else
1688 delete_insns_since (last);
1692 #ifdef TARGET_MEM_FUNCTIONS
1693 retval
1694 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1695 ptr_mode, 3, XEXP (x, 0), Pmode,
1696 XEXP (y, 0), Pmode,
1697 convert_to_mode (TYPE_MODE (sizetype), size,
1698 TREE_UNSIGNED (sizetype)),
1699 TYPE_MODE (sizetype));
1700 #else
1701 emit_library_call (bcopy_libfunc, 0,
1702 VOIDmode, 3, XEXP (y, 0), Pmode,
1703 XEXP (x, 0), Pmode,
1704 convert_to_mode (TYPE_MODE (integer_type_node), size,
1705 TREE_UNSIGNED (integer_type_node)),
1706 TYPE_MODE (integer_type_node));
1707 #endif
1710 return retval;
1713 /* Copy all or part of a value X into registers starting at REGNO.
1714 The number of registers to be filled is NREGS. */
1716 void
1717 move_block_to_reg (regno, x, nregs, mode)
1718 int regno;
1719 rtx x;
1720 int nregs;
1721 enum machine_mode mode;
1723 int i;
1724 #ifdef HAVE_load_multiple
1725 rtx pat;
1726 rtx last;
1727 #endif
1729 if (nregs == 0)
1730 return;
1732 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1733 x = validize_mem (force_const_mem (mode, x));
1735 /* See if the machine can do this with a load multiple insn. */
1736 #ifdef HAVE_load_multiple
1737 if (HAVE_load_multiple)
1739 last = get_last_insn ();
1740 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1741 GEN_INT (nregs));
1742 if (pat)
1744 emit_insn (pat);
1745 return;
1747 else
1748 delete_insns_since (last);
1750 #endif
1752 for (i = 0; i < nregs; i++)
1753 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1754 operand_subword_force (x, i, mode));
1757 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1758 The number of registers to be filled is NREGS. SIZE indicates the number
1759 of bytes in the object X. */
1762 void
1763 move_block_from_reg (regno, x, nregs, size)
1764 int regno;
1765 rtx x;
1766 int nregs;
1767 int size;
1769 int i;
1770 #ifdef HAVE_store_multiple
1771 rtx pat;
1772 rtx last;
1773 #endif
1774 enum machine_mode mode;
1776 /* If SIZE is that of a mode no bigger than a word, just use that
1777 mode's store operation. */
1778 if (size <= UNITS_PER_WORD
1779 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1781 emit_move_insn (change_address (x, mode, NULL),
1782 gen_rtx_REG (mode, regno));
1783 return;
1786 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1787 to the left before storing to memory. Note that the previous test
1788 doesn't handle all cases (e.g. SIZE == 3). */
1789 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1791 rtx tem = operand_subword (x, 0, 1, BLKmode);
1792 rtx shift;
1794 if (tem == 0)
1795 abort ();
1797 shift = expand_shift (LSHIFT_EXPR, word_mode,
1798 gen_rtx_REG (word_mode, regno),
1799 build_int_2 ((UNITS_PER_WORD - size)
1800 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1801 emit_move_insn (tem, shift);
1802 return;
1805 /* See if the machine can do this with a store multiple insn. */
1806 #ifdef HAVE_store_multiple
1807 if (HAVE_store_multiple)
1809 last = get_last_insn ();
1810 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1811 GEN_INT (nregs));
1812 if (pat)
1814 emit_insn (pat);
1815 return;
1817 else
1818 delete_insns_since (last);
1820 #endif
1822 for (i = 0; i < nregs; i++)
1824 rtx tem = operand_subword (x, i, 1, BLKmode);
1826 if (tem == 0)
1827 abort ();
1829 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1833 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1834 registers represented by a PARALLEL. SSIZE represents the total size of
1835 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1836 SRC in bits. */
1837 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1838 the balance will be in what would be the low-order memory addresses, i.e.
1839 left justified for big endian, right justified for little endian. This
1840 happens to be true for the targets currently using this support. If this
1841 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1842 would be needed. */
1844 void
1845 emit_group_load (dst, orig_src, ssize, align)
1846 rtx dst, orig_src;
1847 int align, ssize;
1849 rtx *tmps, src;
1850 int start, i;
1852 if (GET_CODE (dst) != PARALLEL)
1853 abort ();
1855 /* Check for a NULL entry, used to indicate that the parameter goes
1856 both on the stack and in registers. */
1857 if (XEXP (XVECEXP (dst, 0, 0), 0))
1858 start = 0;
1859 else
1860 start = 1;
1862 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1864 /* If we won't be loading directly from memory, protect the real source
1865 from strange tricks we might play. */
1866 src = orig_src;
1867 if (GET_CODE (src) != MEM)
1869 src = gen_reg_rtx (GET_MODE (orig_src));
1870 emit_move_insn (src, orig_src);
1873 /* Process the pieces. */
1874 for (i = start; i < XVECLEN (dst, 0); i++)
1876 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1877 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1878 int bytelen = GET_MODE_SIZE (mode);
1879 int shift = 0;
1881 /* Handle trailing fragments that run over the size of the struct. */
1882 if (ssize >= 0 && bytepos + bytelen > ssize)
1884 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1885 bytelen = ssize - bytepos;
1886 if (bytelen <= 0)
1887 abort();
1890 /* Optimize the access just a bit. */
1891 if (GET_CODE (src) == MEM
1892 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1893 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1894 && bytelen == GET_MODE_SIZE (mode))
1896 tmps[i] = gen_reg_rtx (mode);
1897 emit_move_insn (tmps[i],
1898 change_address (src, mode,
1899 plus_constant (XEXP (src, 0),
1900 bytepos)));
1902 else
1904 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1905 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1906 mode, mode, align, ssize);
1909 if (BYTES_BIG_ENDIAN && shift)
1911 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1912 tmps[i], 0, OPTAB_WIDEN);
1915 emit_queue();
1917 /* Copy the extracted pieces into the proper (probable) hard regs. */
1918 for (i = start; i < XVECLEN (dst, 0); i++)
1919 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1922 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1923 registers represented by a PARALLEL. SSIZE represents the total size of
1924 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1926 void
1927 emit_group_store (orig_dst, src, ssize, align)
1928 rtx orig_dst, src;
1929 int ssize, align;
1931 rtx *tmps, dst;
1932 int start, i;
1934 if (GET_CODE (src) != PARALLEL)
1935 abort ();
1937 /* Check for a NULL entry, used to indicate that the parameter goes
1938 both on the stack and in registers. */
1939 if (XEXP (XVECEXP (src, 0, 0), 0))
1940 start = 0;
1941 else
1942 start = 1;
1944 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1946 /* Copy the (probable) hard regs into pseudos. */
1947 for (i = start; i < XVECLEN (src, 0); i++)
1949 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1950 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1951 emit_move_insn (tmps[i], reg);
1953 emit_queue();
1955 /* If we won't be storing directly into memory, protect the real destination
1956 from strange tricks we might play. */
1957 dst = orig_dst;
1958 if (GET_CODE (dst) != MEM)
1960 dst = gen_reg_rtx (GET_MODE (orig_dst));
1961 /* Make life a bit easier for combine. */
1962 emit_move_insn (dst, const0_rtx);
1964 else if (! MEM_IN_STRUCT_P (dst))
1966 /* store_bit_field requires that memory operations have
1967 mem_in_struct_p set; we might not. */
1969 dst = copy_rtx (orig_dst);
1970 MEM_IN_STRUCT_P (dst) = 1;
1973 /* Process the pieces. */
1974 for (i = start; i < XVECLEN (src, 0); i++)
1976 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1977 enum machine_mode mode = GET_MODE (tmps[i]);
1978 int bytelen = GET_MODE_SIZE (mode);
1980 /* Handle trailing fragments that run over the size of the struct. */
1981 if (ssize >= 0 && bytepos + bytelen > ssize)
1983 if (BYTES_BIG_ENDIAN)
1985 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1986 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
1987 tmps[i], 0, OPTAB_WIDEN);
1989 bytelen = ssize - bytepos;
1992 /* Optimize the access just a bit. */
1993 if (GET_CODE (dst) == MEM
1994 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1995 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1996 && bytelen == GET_MODE_SIZE (mode))
1998 emit_move_insn (change_address (dst, mode,
1999 plus_constant (XEXP (dst, 0),
2000 bytepos)),
2001 tmps[i]);
2003 else
2005 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2006 mode, tmps[i], align, ssize);
2009 emit_queue();
2011 /* Copy from the pseudo into the (probable) hard reg. */
2012 if (GET_CODE (dst) == REG)
2013 emit_move_insn (orig_dst, dst);
2016 /* Add a USE expression for REG to the (possibly empty) list pointed
2017 to by CALL_FUSAGE. REG must denote a hard register. */
2019 void
2020 use_reg (call_fusage, reg)
2021 rtx *call_fusage, reg;
2023 if (GET_CODE (reg) != REG
2024 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2025 abort();
2027 *call_fusage
2028 = gen_rtx_EXPR_LIST (VOIDmode,
2029 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2032 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2033 starting at REGNO. All of these registers must be hard registers. */
2035 void
2036 use_regs (call_fusage, regno, nregs)
2037 rtx *call_fusage;
2038 int regno;
2039 int nregs;
2041 int i;
2043 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2044 abort ();
2046 for (i = 0; i < nregs; i++)
2047 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2050 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2051 PARALLEL REGS. This is for calls that pass values in multiple
2052 non-contiguous locations. The Irix 6 ABI has examples of this. */
2054 void
2055 use_group_regs (call_fusage, regs)
2056 rtx *call_fusage;
2057 rtx regs;
2059 int i;
2061 for (i = 0; i < XVECLEN (regs, 0); i++)
2063 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2065 /* A NULL entry means the parameter goes both on the stack and in
2066 registers. This can also be a MEM for targets that pass values
2067 partially on the stack and partially in registers. */
2068 if (reg != 0 && GET_CODE (reg) == REG)
2069 use_reg (call_fusage, reg);
2073 /* Generate several move instructions to clear LEN bytes of block TO.
2074 (A MEM rtx with BLKmode). The caller must pass TO through
2075 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2076 we can assume. */
2078 static void
2079 clear_by_pieces (to, len, align)
2080 rtx to;
2081 int len, align;
2083 struct clear_by_pieces data;
2084 rtx to_addr = XEXP (to, 0);
2085 int max_size = MOVE_MAX + 1;
2087 data.offset = 0;
2088 data.to_addr = to_addr;
2089 data.to = to;
2090 data.autinc_to
2091 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2092 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2094 data.explicit_inc_to = 0;
2095 data.reverse
2096 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2097 if (data.reverse) data.offset = len;
2098 data.len = len;
2100 data.to_struct = MEM_IN_STRUCT_P (to);
2102 /* If copying requires more than two move insns,
2103 copy addresses to registers (to make displacements shorter)
2104 and use post-increment if available. */
2105 if (!data.autinc_to
2106 && move_by_pieces_ninsns (len, align) > 2)
2108 #ifdef HAVE_PRE_DECREMENT
2109 if (data.reverse && ! data.autinc_to)
2111 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2112 data.autinc_to = 1;
2113 data.explicit_inc_to = -1;
2115 #endif
2116 #ifdef HAVE_POST_INCREMENT
2117 if (! data.reverse && ! data.autinc_to)
2119 data.to_addr = copy_addr_to_reg (to_addr);
2120 data.autinc_to = 1;
2121 data.explicit_inc_to = 1;
2123 #endif
2124 if (!data.autinc_to && CONSTANT_P (to_addr))
2125 data.to_addr = copy_addr_to_reg (to_addr);
2128 if (! SLOW_UNALIGNED_ACCESS
2129 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2130 align = MOVE_MAX;
2132 /* First move what we can in the largest integer mode, then go to
2133 successively smaller modes. */
2135 while (max_size > 1)
2137 enum machine_mode mode = VOIDmode, tmode;
2138 enum insn_code icode;
2140 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2141 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2142 if (GET_MODE_SIZE (tmode) < max_size)
2143 mode = tmode;
2145 if (mode == VOIDmode)
2146 break;
2148 icode = mov_optab->handlers[(int) mode].insn_code;
2149 if (icode != CODE_FOR_nothing
2150 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2151 GET_MODE_SIZE (mode)))
2152 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2154 max_size = GET_MODE_SIZE (mode);
2157 /* The code above should have handled everything. */
2158 if (data.len != 0)
2159 abort ();
2162 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2163 with move instructions for mode MODE. GENFUN is the gen_... function
2164 to make a move insn for that mode. DATA has all the other info. */
2166 static void
2167 clear_by_pieces_1 (genfun, mode, data)
2168 rtx (*genfun) PROTO ((rtx, ...));
2169 enum machine_mode mode;
2170 struct clear_by_pieces *data;
2172 register int size = GET_MODE_SIZE (mode);
2173 register rtx to1;
2175 while (data->len >= size)
2177 if (data->reverse) data->offset -= size;
2179 to1 = (data->autinc_to
2180 ? gen_rtx_MEM (mode, data->to_addr)
2181 : copy_rtx (change_address (data->to, mode,
2182 plus_constant (data->to_addr,
2183 data->offset))));
2184 MEM_IN_STRUCT_P (to1) = data->to_struct;
2186 #ifdef HAVE_PRE_DECREMENT
2187 if (data->explicit_inc_to < 0)
2188 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2189 #endif
2191 emit_insn ((*genfun) (to1, const0_rtx));
2192 #ifdef HAVE_POST_INCREMENT
2193 if (data->explicit_inc_to > 0)
2194 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2195 #endif
2197 if (! data->reverse) data->offset += size;
2199 data->len -= size;
2203 /* Write zeros through the storage of OBJECT.
2204 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2205 the maximum alignment we can is has, measured in bytes.
2207 If we call a function that returns the length of the block, return it. */
2210 clear_storage (object, size, align)
2211 rtx object;
2212 rtx size;
2213 int align;
2215 rtx retval = 0;
2217 if (GET_MODE (object) == BLKmode)
2219 object = protect_from_queue (object, 1);
2220 size = protect_from_queue (size, 0);
2222 if (GET_CODE (size) == CONST_INT
2223 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2224 clear_by_pieces (object, INTVAL (size), align);
2226 else
2228 /* Try the most limited insn first, because there's no point
2229 including more than one in the machine description unless
2230 the more limited one has some advantage. */
2232 rtx opalign = GEN_INT (align);
2233 enum machine_mode mode;
2235 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2236 mode = GET_MODE_WIDER_MODE (mode))
2238 enum insn_code code = clrstr_optab[(int) mode];
2240 if (code != CODE_FOR_nothing
2241 /* We don't need MODE to be narrower than
2242 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2243 the mode mask, as it is returned by the macro, it will
2244 definitely be less than the actual mode mask. */
2245 && ((GET_CODE (size) == CONST_INT
2246 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2247 <= (GET_MODE_MASK (mode) >> 1)))
2248 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2249 && (insn_operand_predicate[(int) code][0] == 0
2250 || (*insn_operand_predicate[(int) code][0]) (object,
2251 BLKmode))
2252 && (insn_operand_predicate[(int) code][2] == 0
2253 || (*insn_operand_predicate[(int) code][2]) (opalign,
2254 VOIDmode)))
2256 rtx op1;
2257 rtx last = get_last_insn ();
2258 rtx pat;
2260 op1 = convert_to_mode (mode, size, 1);
2261 if (insn_operand_predicate[(int) code][1] != 0
2262 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2263 mode))
2264 op1 = copy_to_mode_reg (mode, op1);
2266 pat = GEN_FCN ((int) code) (object, op1, opalign);
2267 if (pat)
2269 emit_insn (pat);
2270 return 0;
2272 else
2273 delete_insns_since (last);
2278 #ifdef TARGET_MEM_FUNCTIONS
2279 retval
2280 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2281 ptr_mode, 3,
2282 XEXP (object, 0), Pmode,
2283 const0_rtx,
2284 TYPE_MODE (integer_type_node),
2285 convert_to_mode
2286 (TYPE_MODE (sizetype), size,
2287 TREE_UNSIGNED (sizetype)),
2288 TYPE_MODE (sizetype));
2289 #else
2290 emit_library_call (bzero_libfunc, 0,
2291 VOIDmode, 2,
2292 XEXP (object, 0), Pmode,
2293 convert_to_mode
2294 (TYPE_MODE (integer_type_node), size,
2295 TREE_UNSIGNED (integer_type_node)),
2296 TYPE_MODE (integer_type_node));
2297 #endif
2300 else
2301 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2303 return retval;
2306 /* Generate code to copy Y into X.
2307 Both Y and X must have the same mode, except that
2308 Y can be a constant with VOIDmode.
2309 This mode cannot be BLKmode; use emit_block_move for that.
2311 Return the last instruction emitted. */
2314 emit_move_insn (x, y)
2315 rtx x, y;
2317 enum machine_mode mode = GET_MODE (x);
2319 x = protect_from_queue (x, 1);
2320 y = protect_from_queue (y, 0);
2322 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2323 abort ();
2325 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2326 y = force_const_mem (mode, y);
2328 /* If X or Y are memory references, verify that their addresses are valid
2329 for the machine. */
2330 if (GET_CODE (x) == MEM
2331 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2332 && ! push_operand (x, GET_MODE (x)))
2333 || (flag_force_addr
2334 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2335 x = change_address (x, VOIDmode, XEXP (x, 0));
2337 if (GET_CODE (y) == MEM
2338 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2339 || (flag_force_addr
2340 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2341 y = change_address (y, VOIDmode, XEXP (y, 0));
2343 if (mode == BLKmode)
2344 abort ();
2346 return emit_move_insn_1 (x, y);
2349 /* Low level part of emit_move_insn.
2350 Called just like emit_move_insn, but assumes X and Y
2351 are basically valid. */
2354 emit_move_insn_1 (x, y)
2355 rtx x, y;
2357 enum machine_mode mode = GET_MODE (x);
2358 enum machine_mode submode;
2359 enum mode_class class = GET_MODE_CLASS (mode);
2360 int i;
2362 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2363 return
2364 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2366 /* Expand complex moves by moving real part and imag part, if possible. */
2367 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2368 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2369 * BITS_PER_UNIT),
2370 (class == MODE_COMPLEX_INT
2371 ? MODE_INT : MODE_FLOAT),
2373 && (mov_optab->handlers[(int) submode].insn_code
2374 != CODE_FOR_nothing))
2376 /* Don't split destination if it is a stack push. */
2377 int stack = push_operand (x, GET_MODE (x));
2379 /* If this is a stack, push the highpart first, so it
2380 will be in the argument order.
2382 In that case, change_address is used only to convert
2383 the mode, not to change the address. */
2384 if (stack)
2386 /* Note that the real part always precedes the imag part in memory
2387 regardless of machine's endianness. */
2388 #ifdef STACK_GROWS_DOWNWARD
2389 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2390 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2391 gen_imagpart (submode, y)));
2392 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2393 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2394 gen_realpart (submode, y)));
2395 #else
2396 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2397 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2398 gen_realpart (submode, y)));
2399 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2400 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2401 gen_imagpart (submode, y)));
2402 #endif
2404 else
2406 /* Show the output dies here. */
2407 if (x != y)
2408 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2410 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2411 (gen_realpart (submode, x), gen_realpart (submode, y)));
2412 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2413 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2416 return get_last_insn ();
2419 /* This will handle any multi-word mode that lacks a move_insn pattern.
2420 However, you will get better code if you define such patterns,
2421 even if they must turn into multiple assembler instructions. */
2422 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2424 rtx last_insn = 0;
2426 #ifdef PUSH_ROUNDING
2428 /* If X is a push on the stack, do the push now and replace
2429 X with a reference to the stack pointer. */
2430 if (push_operand (x, GET_MODE (x)))
2432 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2433 x = change_address (x, VOIDmode, stack_pointer_rtx);
2435 #endif
2437 /* Show the output dies here. */
2438 if (x != y)
2439 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2441 for (i = 0;
2442 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2443 i++)
2445 rtx xpart = operand_subword (x, i, 1, mode);
2446 rtx ypart = operand_subword (y, i, 1, mode);
2448 /* If we can't get a part of Y, put Y into memory if it is a
2449 constant. Otherwise, force it into a register. If we still
2450 can't get a part of Y, abort. */
2451 if (ypart == 0 && CONSTANT_P (y))
2453 y = force_const_mem (mode, y);
2454 ypart = operand_subword (y, i, 1, mode);
2456 else if (ypart == 0)
2457 ypart = operand_subword_force (y, i, mode);
2459 if (xpart == 0 || ypart == 0)
2460 abort ();
2462 last_insn = emit_move_insn (xpart, ypart);
2465 return last_insn;
2467 else
2468 abort ();
2471 /* Pushing data onto the stack. */
2473 /* Push a block of length SIZE (perhaps variable)
2474 and return an rtx to address the beginning of the block.
2475 Note that it is not possible for the value returned to be a QUEUED.
2476 The value may be virtual_outgoing_args_rtx.
2478 EXTRA is the number of bytes of padding to push in addition to SIZE.
2479 BELOW nonzero means this padding comes at low addresses;
2480 otherwise, the padding comes at high addresses. */
2483 push_block (size, extra, below)
2484 rtx size;
2485 int extra, below;
2487 register rtx temp;
2489 size = convert_modes (Pmode, ptr_mode, size, 1);
2490 if (CONSTANT_P (size))
2491 anti_adjust_stack (plus_constant (size, extra));
2492 else if (GET_CODE (size) == REG && extra == 0)
2493 anti_adjust_stack (size);
2494 else
2496 rtx temp = copy_to_mode_reg (Pmode, size);
2497 if (extra != 0)
2498 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2499 temp, 0, OPTAB_LIB_WIDEN);
2500 anti_adjust_stack (temp);
2503 #ifdef STACK_GROWS_DOWNWARD
2504 temp = virtual_outgoing_args_rtx;
2505 if (extra != 0 && below)
2506 temp = plus_constant (temp, extra);
2507 #else
2508 if (GET_CODE (size) == CONST_INT)
2509 temp = plus_constant (virtual_outgoing_args_rtx,
2510 - INTVAL (size) - (below ? 0 : extra));
2511 else if (extra != 0 && !below)
2512 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2513 negate_rtx (Pmode, plus_constant (size, extra)));
2514 else
2515 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2516 negate_rtx (Pmode, size));
2517 #endif
2519 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2523 gen_push_operand ()
2525 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2528 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2529 block of SIZE bytes. */
2531 static rtx
2532 get_push_address (size)
2533 int size;
2535 register rtx temp;
2537 if (STACK_PUSH_CODE == POST_DEC)
2538 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2539 else if (STACK_PUSH_CODE == POST_INC)
2540 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2541 else
2542 temp = stack_pointer_rtx;
2544 return copy_to_reg (temp);
2547 /* Generate code to push X onto the stack, assuming it has mode MODE and
2548 type TYPE.
2549 MODE is redundant except when X is a CONST_INT (since they don't
2550 carry mode info).
2551 SIZE is an rtx for the size of data to be copied (in bytes),
2552 needed only if X is BLKmode.
2554 ALIGN (in bytes) is maximum alignment we can assume.
2556 If PARTIAL and REG are both nonzero, then copy that many of the first
2557 words of X into registers starting with REG, and push the rest of X.
2558 The amount of space pushed is decreased by PARTIAL words,
2559 rounded *down* to a multiple of PARM_BOUNDARY.
2560 REG must be a hard register in this case.
2561 If REG is zero but PARTIAL is not, take any all others actions for an
2562 argument partially in registers, but do not actually load any
2563 registers.
2565 EXTRA is the amount in bytes of extra space to leave next to this arg.
2566 This is ignored if an argument block has already been allocated.
2568 On a machine that lacks real push insns, ARGS_ADDR is the address of
2569 the bottom of the argument block for this call. We use indexing off there
2570 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2571 argument block has not been preallocated.
2573 ARGS_SO_FAR is the size of args previously pushed for this call.
2575 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2576 for arguments passed in registers. If nonzero, it will be the number
2577 of bytes required. */
2579 void
2580 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2581 args_addr, args_so_far, reg_parm_stack_space)
2582 register rtx x;
2583 enum machine_mode mode;
2584 tree type;
2585 rtx size;
2586 int align;
2587 int partial;
2588 rtx reg;
2589 int extra;
2590 rtx args_addr;
2591 rtx args_so_far;
2592 int reg_parm_stack_space;
2594 rtx xinner;
2595 enum direction stack_direction
2596 #ifdef STACK_GROWS_DOWNWARD
2597 = downward;
2598 #else
2599 = upward;
2600 #endif
2602 /* Decide where to pad the argument: `downward' for below,
2603 `upward' for above, or `none' for don't pad it.
2604 Default is below for small data on big-endian machines; else above. */
2605 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2607 /* Invert direction if stack is post-update. */
2608 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2609 if (where_pad != none)
2610 where_pad = (where_pad == downward ? upward : downward);
2612 xinner = x = protect_from_queue (x, 0);
2614 if (mode == BLKmode)
2616 /* Copy a block into the stack, entirely or partially. */
2618 register rtx temp;
2619 int used = partial * UNITS_PER_WORD;
2620 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2621 int skip;
2623 if (size == 0)
2624 abort ();
2626 used -= offset;
2628 /* USED is now the # of bytes we need not copy to the stack
2629 because registers will take care of them. */
2631 if (partial != 0)
2632 xinner = change_address (xinner, BLKmode,
2633 plus_constant (XEXP (xinner, 0), used));
2635 /* If the partial register-part of the arg counts in its stack size,
2636 skip the part of stack space corresponding to the registers.
2637 Otherwise, start copying to the beginning of the stack space,
2638 by setting SKIP to 0. */
2639 skip = (reg_parm_stack_space == 0) ? 0 : used;
2641 #ifdef PUSH_ROUNDING
2642 /* Do it with several push insns if that doesn't take lots of insns
2643 and if there is no difficulty with push insns that skip bytes
2644 on the stack for alignment purposes. */
2645 if (args_addr == 0
2646 && GET_CODE (size) == CONST_INT
2647 && skip == 0
2648 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2649 < MOVE_RATIO)
2650 /* Here we avoid the case of a structure whose weak alignment
2651 forces many pushes of a small amount of data,
2652 and such small pushes do rounding that causes trouble. */
2653 && ((! SLOW_UNALIGNED_ACCESS)
2654 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2655 || PUSH_ROUNDING (align) == align)
2656 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2658 /* Push padding now if padding above and stack grows down,
2659 or if padding below and stack grows up.
2660 But if space already allocated, this has already been done. */
2661 if (extra && args_addr == 0
2662 && where_pad != none && where_pad != stack_direction)
2663 anti_adjust_stack (GEN_INT (extra));
2665 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2666 INTVAL (size) - used, align);
2668 if (flag_check_memory_usage && ! in_check_memory_usage)
2670 rtx temp;
2672 in_check_memory_usage = 1;
2673 temp = get_push_address (INTVAL(size) - used);
2674 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2675 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2676 temp, ptr_mode,
2677 XEXP (xinner, 0), ptr_mode,
2678 GEN_INT (INTVAL(size) - used),
2679 TYPE_MODE (sizetype));
2680 else
2681 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2682 temp, ptr_mode,
2683 GEN_INT (INTVAL(size) - used),
2684 TYPE_MODE (sizetype),
2685 GEN_INT (MEMORY_USE_RW),
2686 TYPE_MODE (integer_type_node));
2687 in_check_memory_usage = 0;
2690 else
2691 #endif /* PUSH_ROUNDING */
2693 /* Otherwise make space on the stack and copy the data
2694 to the address of that space. */
2696 /* Deduct words put into registers from the size we must copy. */
2697 if (partial != 0)
2699 if (GET_CODE (size) == CONST_INT)
2700 size = GEN_INT (INTVAL (size) - used);
2701 else
2702 size = expand_binop (GET_MODE (size), sub_optab, size,
2703 GEN_INT (used), NULL_RTX, 0,
2704 OPTAB_LIB_WIDEN);
2707 /* Get the address of the stack space.
2708 In this case, we do not deal with EXTRA separately.
2709 A single stack adjust will do. */
2710 if (! args_addr)
2712 temp = push_block (size, extra, where_pad == downward);
2713 extra = 0;
2715 else if (GET_CODE (args_so_far) == CONST_INT)
2716 temp = memory_address (BLKmode,
2717 plus_constant (args_addr,
2718 skip + INTVAL (args_so_far)));
2719 else
2720 temp = memory_address (BLKmode,
2721 plus_constant (gen_rtx_PLUS (Pmode,
2722 args_addr,
2723 args_so_far),
2724 skip));
2725 if (flag_check_memory_usage && ! in_check_memory_usage)
2727 rtx target;
2729 in_check_memory_usage = 1;
2730 target = copy_to_reg (temp);
2731 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2732 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2733 target, ptr_mode,
2734 XEXP (xinner, 0), ptr_mode,
2735 size, TYPE_MODE (sizetype));
2736 else
2737 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2738 target, ptr_mode,
2739 size, TYPE_MODE (sizetype),
2740 GEN_INT (MEMORY_USE_RW),
2741 TYPE_MODE (integer_type_node));
2742 in_check_memory_usage = 0;
2745 /* TEMP is the address of the block. Copy the data there. */
2746 if (GET_CODE (size) == CONST_INT
2747 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2748 < MOVE_RATIO))
2750 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2751 INTVAL (size), align);
2752 goto ret;
2754 else
2756 rtx opalign = GEN_INT (align);
2757 enum machine_mode mode;
2758 rtx target = gen_rtx_MEM (BLKmode, temp);
2760 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2761 mode != VOIDmode;
2762 mode = GET_MODE_WIDER_MODE (mode))
2764 enum insn_code code = movstr_optab[(int) mode];
2766 if (code != CODE_FOR_nothing
2767 && ((GET_CODE (size) == CONST_INT
2768 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2769 <= (GET_MODE_MASK (mode) >> 1)))
2770 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2771 && (insn_operand_predicate[(int) code][0] == 0
2772 || ((*insn_operand_predicate[(int) code][0])
2773 (target, BLKmode)))
2774 && (insn_operand_predicate[(int) code][1] == 0
2775 || ((*insn_operand_predicate[(int) code][1])
2776 (xinner, BLKmode)))
2777 && (insn_operand_predicate[(int) code][3] == 0
2778 || ((*insn_operand_predicate[(int) code][3])
2779 (opalign, VOIDmode))))
2781 rtx op2 = convert_to_mode (mode, size, 1);
2782 rtx last = get_last_insn ();
2783 rtx pat;
2785 if (insn_operand_predicate[(int) code][2] != 0
2786 && ! ((*insn_operand_predicate[(int) code][2])
2787 (op2, mode)))
2788 op2 = copy_to_mode_reg (mode, op2);
2790 pat = GEN_FCN ((int) code) (target, xinner,
2791 op2, opalign);
2792 if (pat)
2794 emit_insn (pat);
2795 goto ret;
2797 else
2798 delete_insns_since (last);
2803 #ifndef ACCUMULATE_OUTGOING_ARGS
2804 /* If the source is referenced relative to the stack pointer,
2805 copy it to another register to stabilize it. We do not need
2806 to do this if we know that we won't be changing sp. */
2808 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2809 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2810 temp = copy_to_reg (temp);
2811 #endif
2813 /* Make inhibit_defer_pop nonzero around the library call
2814 to force it to pop the bcopy-arguments right away. */
2815 NO_DEFER_POP;
2816 #ifdef TARGET_MEM_FUNCTIONS
2817 emit_library_call (memcpy_libfunc, 0,
2818 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2819 convert_to_mode (TYPE_MODE (sizetype),
2820 size, TREE_UNSIGNED (sizetype)),
2821 TYPE_MODE (sizetype));
2822 #else
2823 emit_library_call (bcopy_libfunc, 0,
2824 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2825 convert_to_mode (TYPE_MODE (integer_type_node),
2826 size,
2827 TREE_UNSIGNED (integer_type_node)),
2828 TYPE_MODE (integer_type_node));
2829 #endif
2830 OK_DEFER_POP;
2833 else if (partial > 0)
2835 /* Scalar partly in registers. */
2837 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2838 int i;
2839 int not_stack;
2840 /* # words of start of argument
2841 that we must make space for but need not store. */
2842 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2843 int args_offset = INTVAL (args_so_far);
2844 int skip;
2846 /* Push padding now if padding above and stack grows down,
2847 or if padding below and stack grows up.
2848 But if space already allocated, this has already been done. */
2849 if (extra && args_addr == 0
2850 && where_pad != none && where_pad != stack_direction)
2851 anti_adjust_stack (GEN_INT (extra));
2853 /* If we make space by pushing it, we might as well push
2854 the real data. Otherwise, we can leave OFFSET nonzero
2855 and leave the space uninitialized. */
2856 if (args_addr == 0)
2857 offset = 0;
2859 /* Now NOT_STACK gets the number of words that we don't need to
2860 allocate on the stack. */
2861 not_stack = partial - offset;
2863 /* If the partial register-part of the arg counts in its stack size,
2864 skip the part of stack space corresponding to the registers.
2865 Otherwise, start copying to the beginning of the stack space,
2866 by setting SKIP to 0. */
2867 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
2869 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2870 x = validize_mem (force_const_mem (mode, x));
2872 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2873 SUBREGs of such registers are not allowed. */
2874 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2875 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2876 x = copy_to_reg (x);
2878 /* Loop over all the words allocated on the stack for this arg. */
2879 /* We can do it by words, because any scalar bigger than a word
2880 has a size a multiple of a word. */
2881 #ifndef PUSH_ARGS_REVERSED
2882 for (i = not_stack; i < size; i++)
2883 #else
2884 for (i = size - 1; i >= not_stack; i--)
2885 #endif
2886 if (i >= not_stack + offset)
2887 emit_push_insn (operand_subword_force (x, i, mode),
2888 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2889 0, args_addr,
2890 GEN_INT (args_offset + ((i - not_stack + skip)
2891 * UNITS_PER_WORD)),
2892 reg_parm_stack_space);
2894 else
2896 rtx addr;
2897 rtx target = NULL_RTX;
2899 /* Push padding now if padding above and stack grows down,
2900 or if padding below and stack grows up.
2901 But if space already allocated, this has already been done. */
2902 if (extra && args_addr == 0
2903 && where_pad != none && where_pad != stack_direction)
2904 anti_adjust_stack (GEN_INT (extra));
2906 #ifdef PUSH_ROUNDING
2907 if (args_addr == 0)
2908 addr = gen_push_operand ();
2909 else
2910 #endif
2912 if (GET_CODE (args_so_far) == CONST_INT)
2913 addr
2914 = memory_address (mode,
2915 plus_constant (args_addr,
2916 INTVAL (args_so_far)));
2917 else
2918 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2919 args_so_far));
2920 target = addr;
2923 emit_move_insn (gen_rtx_MEM (mode, addr), x);
2925 if (flag_check_memory_usage && ! in_check_memory_usage)
2927 in_check_memory_usage = 1;
2928 if (target == 0)
2929 target = get_push_address (GET_MODE_SIZE (mode));
2931 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2932 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2933 target, ptr_mode,
2934 XEXP (x, 0), ptr_mode,
2935 GEN_INT (GET_MODE_SIZE (mode)),
2936 TYPE_MODE (sizetype));
2937 else
2938 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2939 target, ptr_mode,
2940 GEN_INT (GET_MODE_SIZE (mode)),
2941 TYPE_MODE (sizetype),
2942 GEN_INT (MEMORY_USE_RW),
2943 TYPE_MODE (integer_type_node));
2944 in_check_memory_usage = 0;
2948 ret:
2949 /* If part should go in registers, copy that part
2950 into the appropriate registers. Do this now, at the end,
2951 since mem-to-mem copies above may do function calls. */
2952 if (partial > 0 && reg != 0)
2954 /* Handle calls that pass values in multiple non-contiguous locations.
2955 The Irix 6 ABI has examples of this. */
2956 if (GET_CODE (reg) == PARALLEL)
2957 emit_group_load (reg, x, -1, align); /* ??? size? */
2958 else
2959 move_block_to_reg (REGNO (reg), x, partial, mode);
2962 if (extra && args_addr == 0 && where_pad == stack_direction)
2963 anti_adjust_stack (GEN_INT (extra));
2966 /* Expand an assignment that stores the value of FROM into TO.
2967 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2968 (This may contain a QUEUED rtx;
2969 if the value is constant, this rtx is a constant.)
2970 Otherwise, the returned value is NULL_RTX.
2972 SUGGEST_REG is no longer actually used.
2973 It used to mean, copy the value through a register
2974 and return that register, if that is possible.
2975 We now use WANT_VALUE to decide whether to do this. */
2978 expand_assignment (to, from, want_value, suggest_reg)
2979 tree to, from;
2980 int want_value;
2981 int suggest_reg;
2983 register rtx to_rtx = 0;
2984 rtx result;
2986 /* Don't crash if the lhs of the assignment was erroneous. */
2988 if (TREE_CODE (to) == ERROR_MARK)
2990 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2991 return want_value ? result : NULL_RTX;
2994 /* Assignment of a structure component needs special treatment
2995 if the structure component's rtx is not simply a MEM.
2996 Assignment of an array element at a constant index, and assignment of
2997 an array element in an unaligned packed structure field, has the same
2998 problem. */
3000 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3001 || TREE_CODE (to) == ARRAY_REF)
3003 enum machine_mode mode1;
3004 int bitsize;
3005 int bitpos;
3006 tree offset;
3007 int unsignedp;
3008 int volatilep = 0;
3009 tree tem;
3010 int alignment;
3012 push_temp_slots ();
3013 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3014 &unsignedp, &volatilep, &alignment);
3016 /* If we are going to use store_bit_field and extract_bit_field,
3017 make sure to_rtx will be safe for multiple use. */
3019 if (mode1 == VOIDmode && want_value)
3020 tem = stabilize_reference (tem);
3022 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3023 if (offset != 0)
3025 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3027 if (GET_CODE (to_rtx) != MEM)
3028 abort ();
3030 if (GET_MODE (offset_rtx) != ptr_mode)
3032 #ifdef POINTERS_EXTEND_UNSIGNED
3033 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3034 #else
3035 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3036 #endif
3039 if (GET_CODE (to_rtx) == MEM
3040 && GET_MODE (to_rtx) == BLKmode
3041 && bitsize
3042 && (bitpos % bitsize) == 0
3043 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3044 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3046 rtx temp = change_address (to_rtx, mode1,
3047 plus_constant (XEXP (to_rtx, 0),
3048 (bitpos /
3049 BITS_PER_UNIT)));
3050 if (GET_CODE (XEXP (temp, 0)) == REG)
3051 to_rtx = temp;
3052 else
3053 to_rtx = change_address (to_rtx, mode1,
3054 force_reg (GET_MODE (XEXP (temp, 0)),
3055 XEXP (temp, 0)));
3056 bitpos = 0;
3059 to_rtx = change_address (to_rtx, VOIDmode,
3060 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3061 force_reg (ptr_mode, offset_rtx)));
3063 if (volatilep)
3065 if (GET_CODE (to_rtx) == MEM)
3067 /* When the offset is zero, to_rtx is the address of the
3068 structure we are storing into, and hence may be shared.
3069 We must make a new MEM before setting the volatile bit. */
3070 if (offset == 0)
3071 to_rtx = copy_rtx (to_rtx);
3073 MEM_VOLATILE_P (to_rtx) = 1;
3075 #if 0 /* This was turned off because, when a field is volatile
3076 in an object which is not volatile, the object may be in a register,
3077 and then we would abort over here. */
3078 else
3079 abort ();
3080 #endif
3083 if (TREE_CODE (to) == COMPONENT_REF
3084 && TREE_READONLY (TREE_OPERAND (to, 1)))
3086 if (offset == 0)
3087 to_rtx = copy_rtx (to_rtx);
3089 RTX_UNCHANGING_P (to_rtx) = 1;
3092 /* Check the access. */
3093 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
3095 rtx to_addr;
3096 int size;
3097 int best_mode_size;
3098 enum machine_mode best_mode;
3100 best_mode = get_best_mode (bitsize, bitpos,
3101 TYPE_ALIGN (TREE_TYPE (tem)),
3102 mode1, volatilep);
3103 if (best_mode == VOIDmode)
3104 best_mode = QImode;
3106 best_mode_size = GET_MODE_BITSIZE (best_mode);
3107 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3108 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3109 size *= GET_MODE_SIZE (best_mode);
3111 /* Check the access right of the pointer. */
3112 if (size)
3113 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3114 to_addr, ptr_mode,
3115 GEN_INT (size), TYPE_MODE (sizetype),
3116 GEN_INT (MEMORY_USE_WO),
3117 TYPE_MODE (integer_type_node));
3120 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3121 (want_value
3122 /* Spurious cast makes HPUX compiler happy. */
3123 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3124 : VOIDmode),
3125 unsignedp,
3126 /* Required alignment of containing datum. */
3127 alignment,
3128 int_size_in_bytes (TREE_TYPE (tem)));
3129 preserve_temp_slots (result);
3130 free_temp_slots ();
3131 pop_temp_slots ();
3133 /* If the value is meaningful, convert RESULT to the proper mode.
3134 Otherwise, return nothing. */
3135 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3136 TYPE_MODE (TREE_TYPE (from)),
3137 result,
3138 TREE_UNSIGNED (TREE_TYPE (to)))
3139 : NULL_RTX);
3142 /* If the rhs is a function call and its value is not an aggregate,
3143 call the function before we start to compute the lhs.
3144 This is needed for correct code for cases such as
3145 val = setjmp (buf) on machines where reference to val
3146 requires loading up part of an address in a separate insn.
3148 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3149 a promoted variable where the zero- or sign- extension needs to be done.
3150 Handling this in the normal way is safe because no computation is done
3151 before the call. */
3152 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3153 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3154 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3156 rtx value;
3158 push_temp_slots ();
3159 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3160 if (to_rtx == 0)
3161 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3163 /* Handle calls that return values in multiple non-contiguous locations.
3164 The Irix 6 ABI has examples of this. */
3165 if (GET_CODE (to_rtx) == PARALLEL)
3166 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3167 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3168 else if (GET_MODE (to_rtx) == BLKmode)
3169 emit_block_move (to_rtx, value, expr_size (from),
3170 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3171 else
3172 emit_move_insn (to_rtx, value);
3173 preserve_temp_slots (to_rtx);
3174 free_temp_slots ();
3175 pop_temp_slots ();
3176 return want_value ? to_rtx : NULL_RTX;
3179 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3180 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3182 if (to_rtx == 0)
3184 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3185 if (GET_CODE (to_rtx) == MEM)
3186 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3189 /* Don't move directly into a return register. */
3190 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3192 rtx temp;
3194 push_temp_slots ();
3195 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3196 emit_move_insn (to_rtx, temp);
3197 preserve_temp_slots (to_rtx);
3198 free_temp_slots ();
3199 pop_temp_slots ();
3200 return want_value ? to_rtx : NULL_RTX;
3203 /* In case we are returning the contents of an object which overlaps
3204 the place the value is being stored, use a safe function when copying
3205 a value through a pointer into a structure value return block. */
3206 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3207 && current_function_returns_struct
3208 && !current_function_returns_pcc_struct)
3210 rtx from_rtx, size;
3212 push_temp_slots ();
3213 size = expr_size (from);
3214 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3215 EXPAND_MEMORY_USE_DONT);
3217 /* Copy the rights of the bitmap. */
3218 if (flag_check_memory_usage)
3219 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3220 XEXP (to_rtx, 0), ptr_mode,
3221 XEXP (from_rtx, 0), ptr_mode,
3222 convert_to_mode (TYPE_MODE (sizetype),
3223 size, TREE_UNSIGNED (sizetype)),
3224 TYPE_MODE (sizetype));
3226 #ifdef TARGET_MEM_FUNCTIONS
3227 emit_library_call (memcpy_libfunc, 0,
3228 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3229 XEXP (from_rtx, 0), Pmode,
3230 convert_to_mode (TYPE_MODE (sizetype),
3231 size, TREE_UNSIGNED (sizetype)),
3232 TYPE_MODE (sizetype));
3233 #else
3234 emit_library_call (bcopy_libfunc, 0,
3235 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3236 XEXP (to_rtx, 0), Pmode,
3237 convert_to_mode (TYPE_MODE (integer_type_node),
3238 size, TREE_UNSIGNED (integer_type_node)),
3239 TYPE_MODE (integer_type_node));
3240 #endif
3242 preserve_temp_slots (to_rtx);
3243 free_temp_slots ();
3244 pop_temp_slots ();
3245 return want_value ? to_rtx : NULL_RTX;
3248 /* Compute FROM and store the value in the rtx we got. */
3250 push_temp_slots ();
3251 result = store_expr (from, to_rtx, want_value);
3252 preserve_temp_slots (result);
3253 free_temp_slots ();
3254 pop_temp_slots ();
3255 return want_value ? result : NULL_RTX;
3258 /* Generate code for computing expression EXP,
3259 and storing the value into TARGET.
3260 TARGET may contain a QUEUED rtx.
3262 If WANT_VALUE is nonzero, return a copy of the value
3263 not in TARGET, so that we can be sure to use the proper
3264 value in a containing expression even if TARGET has something
3265 else stored in it. If possible, we copy the value through a pseudo
3266 and return that pseudo. Or, if the value is constant, we try to
3267 return the constant. In some cases, we return a pseudo
3268 copied *from* TARGET.
3270 If the mode is BLKmode then we may return TARGET itself.
3271 It turns out that in BLKmode it doesn't cause a problem.
3272 because C has no operators that could combine two different
3273 assignments into the same BLKmode object with different values
3274 with no sequence point. Will other languages need this to
3275 be more thorough?
3277 If WANT_VALUE is 0, we return NULL, to make sure
3278 to catch quickly any cases where the caller uses the value
3279 and fails to set WANT_VALUE. */
3282 store_expr (exp, target, want_value)
3283 register tree exp;
3284 register rtx target;
3285 int want_value;
3287 register rtx temp;
3288 int dont_return_target = 0;
3290 if (TREE_CODE (exp) == COMPOUND_EXPR)
3292 /* Perform first part of compound expression, then assign from second
3293 part. */
3294 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3295 emit_queue ();
3296 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3298 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3300 /* For conditional expression, get safe form of the target. Then
3301 test the condition, doing the appropriate assignment on either
3302 side. This avoids the creation of unnecessary temporaries.
3303 For non-BLKmode, it is more efficient not to do this. */
3305 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3307 emit_queue ();
3308 target = protect_from_queue (target, 1);
3310 do_pending_stack_adjust ();
3311 NO_DEFER_POP;
3312 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3313 start_cleanup_deferral ();
3314 store_expr (TREE_OPERAND (exp, 1), target, 0);
3315 end_cleanup_deferral ();
3316 emit_queue ();
3317 emit_jump_insn (gen_jump (lab2));
3318 emit_barrier ();
3319 emit_label (lab1);
3320 start_cleanup_deferral ();
3321 store_expr (TREE_OPERAND (exp, 2), target, 0);
3322 end_cleanup_deferral ();
3323 emit_queue ();
3324 emit_label (lab2);
3325 OK_DEFER_POP;
3327 return want_value ? target : NULL_RTX;
3329 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3330 && GET_MODE (target) != BLKmode)
3331 /* If target is in memory and caller wants value in a register instead,
3332 arrange that. Pass TARGET as target for expand_expr so that,
3333 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3334 We know expand_expr will not use the target in that case.
3335 Don't do this if TARGET is volatile because we are supposed
3336 to write it and then read it. */
3338 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3339 GET_MODE (target), 0);
3340 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3341 temp = copy_to_reg (temp);
3342 dont_return_target = 1;
3344 else if (queued_subexp_p (target))
3345 /* If target contains a postincrement, let's not risk
3346 using it as the place to generate the rhs. */
3348 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3350 /* Expand EXP into a new pseudo. */
3351 temp = gen_reg_rtx (GET_MODE (target));
3352 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3354 else
3355 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3357 /* If target is volatile, ANSI requires accessing the value
3358 *from* the target, if it is accessed. So make that happen.
3359 In no case return the target itself. */
3360 if (! MEM_VOLATILE_P (target) && want_value)
3361 dont_return_target = 1;
3363 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3364 /* If this is an scalar in a register that is stored in a wider mode
3365 than the declared mode, compute the result into its declared mode
3366 and then convert to the wider mode. Our value is the computed
3367 expression. */
3369 /* If we don't want a value, we can do the conversion inside EXP,
3370 which will often result in some optimizations. Do the conversion
3371 in two steps: first change the signedness, if needed, then
3372 the extend. But don't do this if the type of EXP is a subtype
3373 of something else since then the conversion might involve
3374 more than just converting modes. */
3375 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3376 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3378 if (TREE_UNSIGNED (TREE_TYPE (exp))
3379 != SUBREG_PROMOTED_UNSIGNED_P (target))
3381 = convert
3382 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3383 TREE_TYPE (exp)),
3384 exp);
3386 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3387 SUBREG_PROMOTED_UNSIGNED_P (target)),
3388 exp);
3391 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3393 /* If TEMP is a volatile MEM and we want a result value, make
3394 the access now so it gets done only once. Likewise if
3395 it contains TARGET. */
3396 if (GET_CODE (temp) == MEM && want_value
3397 && (MEM_VOLATILE_P (temp)
3398 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3399 temp = copy_to_reg (temp);
3401 /* If TEMP is a VOIDmode constant, use convert_modes to make
3402 sure that we properly convert it. */
3403 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3404 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3405 TYPE_MODE (TREE_TYPE (exp)), temp,
3406 SUBREG_PROMOTED_UNSIGNED_P (target));
3408 convert_move (SUBREG_REG (target), temp,
3409 SUBREG_PROMOTED_UNSIGNED_P (target));
3410 return want_value ? temp : NULL_RTX;
3412 else
3414 temp = expand_expr (exp, target, GET_MODE (target), 0);
3415 /* Return TARGET if it's a specified hardware register.
3416 If TARGET is a volatile mem ref, either return TARGET
3417 or return a reg copied *from* TARGET; ANSI requires this.
3419 Otherwise, if TEMP is not TARGET, return TEMP
3420 if it is constant (for efficiency),
3421 or if we really want the correct value. */
3422 if (!(target && GET_CODE (target) == REG
3423 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3424 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3425 && ! rtx_equal_p (temp, target)
3426 && (CONSTANT_P (temp) || want_value))
3427 dont_return_target = 1;
3430 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3431 the same as that of TARGET, adjust the constant. This is needed, for
3432 example, in case it is a CONST_DOUBLE and we want only a word-sized
3433 value. */
3434 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3435 && TREE_CODE (exp) != ERROR_MARK
3436 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3437 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3438 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3440 if (flag_check_memory_usage
3441 && GET_CODE (target) == MEM
3442 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3444 if (GET_CODE (temp) == MEM)
3445 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3446 XEXP (target, 0), ptr_mode,
3447 XEXP (temp, 0), ptr_mode,
3448 expr_size (exp), TYPE_MODE (sizetype));
3449 else
3450 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3451 XEXP (target, 0), ptr_mode,
3452 expr_size (exp), TYPE_MODE (sizetype),
3453 GEN_INT (MEMORY_USE_WO),
3454 TYPE_MODE (integer_type_node));
3457 /* If value was not generated in the target, store it there.
3458 Convert the value to TARGET's type first if nec. */
3460 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3462 target = protect_from_queue (target, 1);
3463 if (GET_MODE (temp) != GET_MODE (target)
3464 && GET_MODE (temp) != VOIDmode)
3466 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3467 if (dont_return_target)
3469 /* In this case, we will return TEMP,
3470 so make sure it has the proper mode.
3471 But don't forget to store the value into TARGET. */
3472 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3473 emit_move_insn (target, temp);
3475 else
3476 convert_move (target, temp, unsignedp);
3479 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3481 /* Handle copying a string constant into an array.
3482 The string constant may be shorter than the array.
3483 So copy just the string's actual length, and clear the rest. */
3484 rtx size;
3485 rtx addr;
3487 /* Get the size of the data type of the string,
3488 which is actually the size of the target. */
3489 size = expr_size (exp);
3490 if (GET_CODE (size) == CONST_INT
3491 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3492 emit_block_move (target, temp, size,
3493 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3494 else
3496 /* Compute the size of the data to copy from the string. */
3497 tree copy_size
3498 = size_binop (MIN_EXPR,
3499 make_tree (sizetype, size),
3500 convert (sizetype,
3501 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3502 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3503 VOIDmode, 0);
3504 rtx label = 0;
3506 /* Copy that much. */
3507 emit_block_move (target, temp, copy_size_rtx,
3508 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3510 /* Figure out how much is left in TARGET that we have to clear.
3511 Do all calculations in ptr_mode. */
3513 addr = XEXP (target, 0);
3514 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3516 if (GET_CODE (copy_size_rtx) == CONST_INT)
3518 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3519 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3521 else
3523 addr = force_reg (ptr_mode, addr);
3524 addr = expand_binop (ptr_mode, add_optab, addr,
3525 copy_size_rtx, NULL_RTX, 0,
3526 OPTAB_LIB_WIDEN);
3528 size = expand_binop (ptr_mode, sub_optab, size,
3529 copy_size_rtx, NULL_RTX, 0,
3530 OPTAB_LIB_WIDEN);
3532 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3533 GET_MODE (size), 0, 0);
3534 label = gen_label_rtx ();
3535 emit_jump_insn (gen_blt (label));
3538 if (size != const0_rtx)
3540 /* Be sure we can write on ADDR. */
3541 if (flag_check_memory_usage)
3542 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3543 addr, ptr_mode,
3544 size, TYPE_MODE (sizetype),
3545 GEN_INT (MEMORY_USE_WO),
3546 TYPE_MODE (integer_type_node));
3547 #ifdef TARGET_MEM_FUNCTIONS
3548 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3549 addr, ptr_mode,
3550 const0_rtx, TYPE_MODE (integer_type_node),
3551 convert_to_mode (TYPE_MODE (sizetype),
3552 size,
3553 TREE_UNSIGNED (sizetype)),
3554 TYPE_MODE (sizetype));
3555 #else
3556 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3557 addr, ptr_mode,
3558 convert_to_mode (TYPE_MODE (integer_type_node),
3559 size,
3560 TREE_UNSIGNED (integer_type_node)),
3561 TYPE_MODE (integer_type_node));
3562 #endif
3565 if (label)
3566 emit_label (label);
3569 /* Handle calls that return values in multiple non-contiguous locations.
3570 The Irix 6 ABI has examples of this. */
3571 else if (GET_CODE (target) == PARALLEL)
3572 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3573 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3574 else if (GET_MODE (temp) == BLKmode)
3575 emit_block_move (target, temp, expr_size (exp),
3576 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3577 else
3578 emit_move_insn (target, temp);
3581 /* If we don't want a value, return NULL_RTX. */
3582 if (! want_value)
3583 return NULL_RTX;
3585 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3586 ??? The latter test doesn't seem to make sense. */
3587 else if (dont_return_target && GET_CODE (temp) != MEM)
3588 return temp;
3590 /* Return TARGET itself if it is a hard register. */
3591 else if (want_value && GET_MODE (target) != BLKmode
3592 && ! (GET_CODE (target) == REG
3593 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3594 return copy_to_reg (target);
3596 else
3597 return target;
3600 /* Return 1 if EXP just contains zeros. */
3602 static int
3603 is_zeros_p (exp)
3604 tree exp;
3606 tree elt;
3608 switch (TREE_CODE (exp))
3610 case CONVERT_EXPR:
3611 case NOP_EXPR:
3612 case NON_LVALUE_EXPR:
3613 return is_zeros_p (TREE_OPERAND (exp, 0));
3615 case INTEGER_CST:
3616 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3618 case COMPLEX_CST:
3619 return
3620 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3622 case REAL_CST:
3623 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3625 case CONSTRUCTOR:
3626 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3627 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3628 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3629 if (! is_zeros_p (TREE_VALUE (elt)))
3630 return 0;
3632 return 1;
3634 default:
3635 return 0;
3639 /* Return 1 if EXP contains mostly (3/4) zeros. */
3641 static int
3642 mostly_zeros_p (exp)
3643 tree exp;
3645 if (TREE_CODE (exp) == CONSTRUCTOR)
3647 int elts = 0, zeros = 0;
3648 tree elt = CONSTRUCTOR_ELTS (exp);
3649 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3651 /* If there are no ranges of true bits, it is all zero. */
3652 return elt == NULL_TREE;
3654 for (; elt; elt = TREE_CHAIN (elt))
3656 /* We do not handle the case where the index is a RANGE_EXPR,
3657 so the statistic will be somewhat inaccurate.
3658 We do make a more accurate count in store_constructor itself,
3659 so since this function is only used for nested array elements,
3660 this should be close enough. */
3661 if (mostly_zeros_p (TREE_VALUE (elt)))
3662 zeros++;
3663 elts++;
3666 return 4 * zeros >= 3 * elts;
3669 return is_zeros_p (exp);
3672 /* Helper function for store_constructor.
3673 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3674 TYPE is the type of the CONSTRUCTOR, not the element type.
3675 CLEARED is as for store_constructor.
3677 This provides a recursive shortcut back to store_constructor when it isn't
3678 necessary to go through store_field. This is so that we can pass through
3679 the cleared field to let store_constructor know that we may not have to
3680 clear a substructure if the outer structure has already been cleared. */
3682 static void
3683 store_constructor_field (target, bitsize, bitpos,
3684 mode, exp, type, cleared)
3685 rtx target;
3686 int bitsize, bitpos;
3687 enum machine_mode mode;
3688 tree exp, type;
3689 int cleared;
3691 if (TREE_CODE (exp) == CONSTRUCTOR
3692 && bitpos % BITS_PER_UNIT == 0
3693 /* If we have a non-zero bitpos for a register target, then we just
3694 let store_field do the bitfield handling. This is unlikely to
3695 generate unnecessary clear instructions anyways. */
3696 && (bitpos == 0 || GET_CODE (target) == MEM))
3698 if (bitpos != 0)
3699 target = change_address (target, VOIDmode,
3700 plus_constant (XEXP (target, 0),
3701 bitpos / BITS_PER_UNIT));
3702 store_constructor (exp, target, cleared);
3704 else
3705 store_field (target, bitsize, bitpos, mode, exp,
3706 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3707 int_size_in_bytes (type));
3710 /* Store the value of constructor EXP into the rtx TARGET.
3711 TARGET is either a REG or a MEM.
3712 CLEARED is true if TARGET is known to have been zero'd. */
3714 static void
3715 store_constructor (exp, target, cleared)
3716 tree exp;
3717 rtx target;
3718 int cleared;
3720 tree type = TREE_TYPE (exp);
3722 /* We know our target cannot conflict, since safe_from_p has been called. */
3723 #if 0
3724 /* Don't try copying piece by piece into a hard register
3725 since that is vulnerable to being clobbered by EXP.
3726 Instead, construct in a pseudo register and then copy it all. */
3727 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3729 rtx temp = gen_reg_rtx (GET_MODE (target));
3730 store_constructor (exp, temp, 0);
3731 emit_move_insn (target, temp);
3732 return;
3734 #endif
3736 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3737 || TREE_CODE (type) == QUAL_UNION_TYPE)
3739 register tree elt;
3741 /* Inform later passes that the whole union value is dead. */
3742 if (TREE_CODE (type) == UNION_TYPE
3743 || TREE_CODE (type) == QUAL_UNION_TYPE)
3744 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3746 /* If we are building a static constructor into a register,
3747 set the initial value as zero so we can fold the value into
3748 a constant. But if more than one register is involved,
3749 this probably loses. */
3750 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3751 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3753 if (! cleared)
3754 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3756 cleared = 1;
3759 /* If the constructor has fewer fields than the structure
3760 or if we are initializing the structure to mostly zeros,
3761 clear the whole structure first. */
3762 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3763 != list_length (TYPE_FIELDS (type)))
3764 || mostly_zeros_p (exp))
3766 if (! cleared)
3767 clear_storage (target, expr_size (exp),
3768 TYPE_ALIGN (type) / BITS_PER_UNIT);
3770 cleared = 1;
3772 else
3773 /* Inform later passes that the old value is dead. */
3774 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3776 /* Store each element of the constructor into
3777 the corresponding field of TARGET. */
3779 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3781 register tree field = TREE_PURPOSE (elt);
3782 register enum machine_mode mode;
3783 int bitsize;
3784 int bitpos = 0;
3785 int unsignedp;
3786 tree pos, constant = 0, offset = 0;
3787 rtx to_rtx = target;
3789 /* Just ignore missing fields.
3790 We cleared the whole structure, above,
3791 if any fields are missing. */
3792 if (field == 0)
3793 continue;
3795 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3796 continue;
3798 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3799 unsignedp = TREE_UNSIGNED (field);
3800 mode = DECL_MODE (field);
3801 if (DECL_BIT_FIELD (field))
3802 mode = VOIDmode;
3804 pos = DECL_FIELD_BITPOS (field);
3805 if (TREE_CODE (pos) == INTEGER_CST)
3806 constant = pos;
3807 else if (TREE_CODE (pos) == PLUS_EXPR
3808 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3809 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3810 else
3811 offset = pos;
3813 if (constant)
3814 bitpos = TREE_INT_CST_LOW (constant);
3816 if (offset)
3818 rtx offset_rtx;
3820 if (contains_placeholder_p (offset))
3821 offset = build (WITH_RECORD_EXPR, sizetype,
3822 offset, make_tree (TREE_TYPE (exp), target));
3824 offset = size_binop (FLOOR_DIV_EXPR, offset,
3825 size_int (BITS_PER_UNIT));
3827 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3828 if (GET_CODE (to_rtx) != MEM)
3829 abort ();
3831 if (GET_MODE (offset_rtx) != ptr_mode)
3833 #ifdef POINTERS_EXTEND_UNSIGNED
3834 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3835 #else
3836 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3837 #endif
3840 to_rtx
3841 = change_address (to_rtx, VOIDmode,
3842 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3843 force_reg (ptr_mode, offset_rtx)));
3845 if (TREE_READONLY (field))
3847 if (GET_CODE (to_rtx) == MEM)
3848 to_rtx = copy_rtx (to_rtx);
3850 RTX_UNCHANGING_P (to_rtx) = 1;
3853 store_constructor_field (to_rtx, bitsize, bitpos,
3854 mode, TREE_VALUE (elt), type, cleared);
3857 else if (TREE_CODE (type) == ARRAY_TYPE)
3859 register tree elt;
3860 register int i;
3861 int need_to_clear;
3862 tree domain = TYPE_DOMAIN (type);
3863 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3864 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3865 tree elttype = TREE_TYPE (type);
3867 /* If the constructor has fewer elements than the array,
3868 clear the whole array first. Similarly if this is
3869 static constructor of a non-BLKmode object. */
3870 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3871 need_to_clear = 1;
3872 else
3874 HOST_WIDE_INT count = 0, zero_count = 0;
3875 need_to_clear = 0;
3876 /* This loop is a more accurate version of the loop in
3877 mostly_zeros_p (it handles RANGE_EXPR in an index).
3878 It is also needed to check for missing elements. */
3879 for (elt = CONSTRUCTOR_ELTS (exp);
3880 elt != NULL_TREE;
3881 elt = TREE_CHAIN (elt))
3883 tree index = TREE_PURPOSE (elt);
3884 HOST_WIDE_INT this_node_count;
3885 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3887 tree lo_index = TREE_OPERAND (index, 0);
3888 tree hi_index = TREE_OPERAND (index, 1);
3889 if (TREE_CODE (lo_index) != INTEGER_CST
3890 || TREE_CODE (hi_index) != INTEGER_CST)
3892 need_to_clear = 1;
3893 break;
3895 this_node_count = TREE_INT_CST_LOW (hi_index)
3896 - TREE_INT_CST_LOW (lo_index) + 1;
3898 else
3899 this_node_count = 1;
3900 count += this_node_count;
3901 if (mostly_zeros_p (TREE_VALUE (elt)))
3902 zero_count += this_node_count;
3904 /* Clear the entire array first if there are any missing elements,
3905 or if the incidence of zero elements is >= 75%. */
3906 if (count < maxelt - minelt + 1
3907 || 4 * zero_count >= 3 * count)
3908 need_to_clear = 1;
3910 if (need_to_clear)
3912 if (! cleared)
3913 clear_storage (target, expr_size (exp),
3914 TYPE_ALIGN (type) / BITS_PER_UNIT);
3915 cleared = 1;
3917 else
3918 /* Inform later passes that the old value is dead. */
3919 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3921 /* Store each element of the constructor into
3922 the corresponding element of TARGET, determined
3923 by counting the elements. */
3924 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3925 elt;
3926 elt = TREE_CHAIN (elt), i++)
3928 register enum machine_mode mode;
3929 int bitsize;
3930 int bitpos;
3931 int unsignedp;
3932 tree value = TREE_VALUE (elt);
3933 tree index = TREE_PURPOSE (elt);
3934 rtx xtarget = target;
3936 if (cleared && is_zeros_p (value))
3937 continue;
3939 mode = TYPE_MODE (elttype);
3940 bitsize = GET_MODE_BITSIZE (mode);
3941 unsignedp = TREE_UNSIGNED (elttype);
3943 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3945 tree lo_index = TREE_OPERAND (index, 0);
3946 tree hi_index = TREE_OPERAND (index, 1);
3947 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3948 struct nesting *loop;
3949 HOST_WIDE_INT lo, hi, count;
3950 tree position;
3952 /* If the range is constant and "small", unroll the loop. */
3953 if (TREE_CODE (lo_index) == INTEGER_CST
3954 && TREE_CODE (hi_index) == INTEGER_CST
3955 && (lo = TREE_INT_CST_LOW (lo_index),
3956 hi = TREE_INT_CST_LOW (hi_index),
3957 count = hi - lo + 1,
3958 (GET_CODE (target) != MEM
3959 || count <= 2
3960 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3961 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3962 <= 40 * 8))))
3964 lo -= minelt; hi -= minelt;
3965 for (; lo <= hi; lo++)
3967 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3968 store_constructor_field (target, bitsize, bitpos,
3969 mode, value, type, cleared);
3972 else
3974 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3975 loop_top = gen_label_rtx ();
3976 loop_end = gen_label_rtx ();
3978 unsignedp = TREE_UNSIGNED (domain);
3980 index = build_decl (VAR_DECL, NULL_TREE, domain);
3982 DECL_RTL (index) = index_r
3983 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3984 &unsignedp, 0));
3986 if (TREE_CODE (value) == SAVE_EXPR
3987 && SAVE_EXPR_RTL (value) == 0)
3989 /* Make sure value gets expanded once before the
3990 loop. */
3991 expand_expr (value, const0_rtx, VOIDmode, 0);
3992 emit_queue ();
3994 store_expr (lo_index, index_r, 0);
3995 loop = expand_start_loop (0);
3997 /* Assign value to element index. */
3998 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3999 size_int (BITS_PER_UNIT));
4000 position = size_binop (MULT_EXPR,
4001 size_binop (MINUS_EXPR, index,
4002 TYPE_MIN_VALUE (domain)),
4003 position);
4004 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4005 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4006 xtarget = change_address (target, mode, addr);
4007 if (TREE_CODE (value) == CONSTRUCTOR)
4008 store_constructor (value, xtarget, cleared);
4009 else
4010 store_expr (value, xtarget, 0);
4012 expand_exit_loop_if_false (loop,
4013 build (LT_EXPR, integer_type_node,
4014 index, hi_index));
4016 expand_increment (build (PREINCREMENT_EXPR,
4017 TREE_TYPE (index),
4018 index, integer_one_node), 0, 0);
4019 expand_end_loop ();
4020 emit_label (loop_end);
4022 /* Needed by stupid register allocation. to extend the
4023 lifetime of pseudo-regs used by target past the end
4024 of the loop. */
4025 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4028 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4029 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4031 rtx pos_rtx, addr;
4032 tree position;
4034 if (index == 0)
4035 index = size_int (i);
4037 if (minelt)
4038 index = size_binop (MINUS_EXPR, index,
4039 TYPE_MIN_VALUE (domain));
4040 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4041 size_int (BITS_PER_UNIT));
4042 position = size_binop (MULT_EXPR, index, position);
4043 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4044 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4045 xtarget = change_address (target, mode, addr);
4046 store_expr (value, xtarget, 0);
4048 else
4050 if (index != 0)
4051 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4052 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4053 else
4054 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4055 store_constructor_field (target, bitsize, bitpos,
4056 mode, value, type, cleared);
4060 /* set constructor assignments */
4061 else if (TREE_CODE (type) == SET_TYPE)
4063 tree elt = CONSTRUCTOR_ELTS (exp);
4064 int nbytes = int_size_in_bytes (type), nbits;
4065 tree domain = TYPE_DOMAIN (type);
4066 tree domain_min, domain_max, bitlength;
4068 /* The default implementation strategy is to extract the constant
4069 parts of the constructor, use that to initialize the target,
4070 and then "or" in whatever non-constant ranges we need in addition.
4072 If a large set is all zero or all ones, it is
4073 probably better to set it using memset (if available) or bzero.
4074 Also, if a large set has just a single range, it may also be
4075 better to first clear all the first clear the set (using
4076 bzero/memset), and set the bits we want. */
4078 /* Check for all zeros. */
4079 if (elt == NULL_TREE)
4081 if (!cleared)
4082 clear_storage (target, expr_size (exp),
4083 TYPE_ALIGN (type) / BITS_PER_UNIT);
4084 return;
4087 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4088 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4089 bitlength = size_binop (PLUS_EXPR,
4090 size_binop (MINUS_EXPR, domain_max, domain_min),
4091 size_one_node);
4093 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4094 abort ();
4095 nbits = TREE_INT_CST_LOW (bitlength);
4097 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4098 are "complicated" (more than one range), initialize (the
4099 constant parts) by copying from a constant. */
4100 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4101 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4103 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4104 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4105 char *bit_buffer = (char *) alloca (nbits);
4106 HOST_WIDE_INT word = 0;
4107 int bit_pos = 0;
4108 int ibit = 0;
4109 int offset = 0; /* In bytes from beginning of set. */
4110 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4111 for (;;)
4113 if (bit_buffer[ibit])
4115 if (BYTES_BIG_ENDIAN)
4116 word |= (1 << (set_word_size - 1 - bit_pos));
4117 else
4118 word |= 1 << bit_pos;
4120 bit_pos++; ibit++;
4121 if (bit_pos >= set_word_size || ibit == nbits)
4123 if (word != 0 || ! cleared)
4125 rtx datum = GEN_INT (word);
4126 rtx to_rtx;
4127 /* The assumption here is that it is safe to use
4128 XEXP if the set is multi-word, but not if
4129 it's single-word. */
4130 if (GET_CODE (target) == MEM)
4132 to_rtx = plus_constant (XEXP (target, 0), offset);
4133 to_rtx = change_address (target, mode, to_rtx);
4135 else if (offset == 0)
4136 to_rtx = target;
4137 else
4138 abort ();
4139 emit_move_insn (to_rtx, datum);
4141 if (ibit == nbits)
4142 break;
4143 word = 0;
4144 bit_pos = 0;
4145 offset += set_word_size / BITS_PER_UNIT;
4149 else if (!cleared)
4151 /* Don't bother clearing storage if the set is all ones. */
4152 if (TREE_CHAIN (elt) != NULL_TREE
4153 || (TREE_PURPOSE (elt) == NULL_TREE
4154 ? nbits != 1
4155 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4156 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4157 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4158 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4159 != nbits))))
4160 clear_storage (target, expr_size (exp),
4161 TYPE_ALIGN (type) / BITS_PER_UNIT);
4164 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4166 /* start of range of element or NULL */
4167 tree startbit = TREE_PURPOSE (elt);
4168 /* end of range of element, or element value */
4169 tree endbit = TREE_VALUE (elt);
4170 #ifdef TARGET_MEM_FUNCTIONS
4171 HOST_WIDE_INT startb, endb;
4172 #endif
4173 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4175 bitlength_rtx = expand_expr (bitlength,
4176 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4178 /* handle non-range tuple element like [ expr ] */
4179 if (startbit == NULL_TREE)
4181 startbit = save_expr (endbit);
4182 endbit = startbit;
4184 startbit = convert (sizetype, startbit);
4185 endbit = convert (sizetype, endbit);
4186 if (! integer_zerop (domain_min))
4188 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4189 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4191 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4192 EXPAND_CONST_ADDRESS);
4193 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4194 EXPAND_CONST_ADDRESS);
4196 if (REG_P (target))
4198 targetx = assign_stack_temp (GET_MODE (target),
4199 GET_MODE_SIZE (GET_MODE (target)),
4201 emit_move_insn (targetx, target);
4203 else if (GET_CODE (target) == MEM)
4204 targetx = target;
4205 else
4206 abort ();
4208 #ifdef TARGET_MEM_FUNCTIONS
4209 /* Optimization: If startbit and endbit are
4210 constants divisible by BITS_PER_UNIT,
4211 call memset instead. */
4212 if (TREE_CODE (startbit) == INTEGER_CST
4213 && TREE_CODE (endbit) == INTEGER_CST
4214 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4215 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4217 emit_library_call (memset_libfunc, 0,
4218 VOIDmode, 3,
4219 plus_constant (XEXP (targetx, 0),
4220 startb / BITS_PER_UNIT),
4221 Pmode,
4222 constm1_rtx, TYPE_MODE (integer_type_node),
4223 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4224 TYPE_MODE (sizetype));
4226 else
4227 #endif
4229 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4230 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4231 bitlength_rtx, TYPE_MODE (sizetype),
4232 startbit_rtx, TYPE_MODE (sizetype),
4233 endbit_rtx, TYPE_MODE (sizetype));
4235 if (REG_P (target))
4236 emit_move_insn (target, targetx);
4240 else
4241 abort ();
4244 /* Store the value of EXP (an expression tree)
4245 into a subfield of TARGET which has mode MODE and occupies
4246 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4247 If MODE is VOIDmode, it means that we are storing into a bit-field.
4249 If VALUE_MODE is VOIDmode, return nothing in particular.
4250 UNSIGNEDP is not used in this case.
4252 Otherwise, return an rtx for the value stored. This rtx
4253 has mode VALUE_MODE if that is convenient to do.
4254 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4256 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4257 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4259 static rtx
4260 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4261 unsignedp, align, total_size)
4262 rtx target;
4263 int bitsize, bitpos;
4264 enum machine_mode mode;
4265 tree exp;
4266 enum machine_mode value_mode;
4267 int unsignedp;
4268 int align;
4269 int total_size;
4271 HOST_WIDE_INT width_mask = 0;
4273 if (TREE_CODE (exp) == ERROR_MARK)
4274 return const0_rtx;
4276 if (bitsize < HOST_BITS_PER_WIDE_INT)
4277 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4279 /* If we are storing into an unaligned field of an aligned union that is
4280 in a register, we may have the mode of TARGET being an integer mode but
4281 MODE == BLKmode. In that case, get an aligned object whose size and
4282 alignment are the same as TARGET and store TARGET into it (we can avoid
4283 the store if the field being stored is the entire width of TARGET). Then
4284 call ourselves recursively to store the field into a BLKmode version of
4285 that object. Finally, load from the object into TARGET. This is not
4286 very efficient in general, but should only be slightly more expensive
4287 than the otherwise-required unaligned accesses. Perhaps this can be
4288 cleaned up later. */
4290 if (mode == BLKmode
4291 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4293 rtx object = assign_stack_temp (GET_MODE (target),
4294 GET_MODE_SIZE (GET_MODE (target)), 0);
4295 rtx blk_object = copy_rtx (object);
4297 MEM_IN_STRUCT_P (object) = 1;
4298 MEM_IN_STRUCT_P (blk_object) = 1;
4299 PUT_MODE (blk_object, BLKmode);
4301 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4302 emit_move_insn (object, target);
4304 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4305 align, total_size);
4307 /* Even though we aren't returning target, we need to
4308 give it the updated value. */
4309 emit_move_insn (target, object);
4311 return blk_object;
4314 /* If the structure is in a register or if the component
4315 is a bit field, we cannot use addressing to access it.
4316 Use bit-field techniques or SUBREG to store in it. */
4318 if (mode == VOIDmode
4319 || (mode != BLKmode && ! direct_store[(int) mode])
4320 || GET_CODE (target) == REG
4321 || GET_CODE (target) == SUBREG
4322 /* If the field isn't aligned enough to store as an ordinary memref,
4323 store it as a bit field. */
4324 || (SLOW_UNALIGNED_ACCESS
4325 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4326 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4328 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4330 /* If BITSIZE is narrower than the size of the type of EXP
4331 we will be narrowing TEMP. Normally, what's wanted are the
4332 low-order bits. However, if EXP's type is a record and this is
4333 big-endian machine, we want the upper BITSIZE bits. */
4334 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4335 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4336 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4337 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4338 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4339 - bitsize),
4340 temp, 1);
4342 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4343 MODE. */
4344 if (mode != VOIDmode && mode != BLKmode
4345 && mode != TYPE_MODE (TREE_TYPE (exp)))
4346 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4348 /* If the modes of TARGET and TEMP are both BLKmode, both
4349 must be in memory and BITPOS must be aligned on a byte
4350 boundary. If so, we simply do a block copy. */
4351 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4353 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4354 || bitpos % BITS_PER_UNIT != 0)
4355 abort ();
4357 target = change_address (target, VOIDmode,
4358 plus_constant (XEXP (target, 0),
4359 bitpos / BITS_PER_UNIT));
4361 emit_block_move (target, temp,
4362 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4363 / BITS_PER_UNIT),
4366 return value_mode == VOIDmode ? const0_rtx : target;
4369 /* Store the value in the bitfield. */
4370 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4371 if (value_mode != VOIDmode)
4373 /* The caller wants an rtx for the value. */
4374 /* If possible, avoid refetching from the bitfield itself. */
4375 if (width_mask != 0
4376 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4378 tree count;
4379 enum machine_mode tmode;
4381 if (unsignedp)
4382 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4383 tmode = GET_MODE (temp);
4384 if (tmode == VOIDmode)
4385 tmode = value_mode;
4386 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4387 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4388 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4390 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4391 NULL_RTX, value_mode, 0, align,
4392 total_size);
4394 return const0_rtx;
4396 else
4398 rtx addr = XEXP (target, 0);
4399 rtx to_rtx;
4401 /* If a value is wanted, it must be the lhs;
4402 so make the address stable for multiple use. */
4404 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4405 && ! CONSTANT_ADDRESS_P (addr)
4406 /* A frame-pointer reference is already stable. */
4407 && ! (GET_CODE (addr) == PLUS
4408 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4409 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4410 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4411 addr = copy_to_reg (addr);
4413 /* Now build a reference to just the desired component. */
4415 to_rtx = copy_rtx (change_address (target, mode,
4416 plus_constant (addr,
4417 (bitpos
4418 / BITS_PER_UNIT))));
4419 MEM_IN_STRUCT_P (to_rtx) = 1;
4421 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4425 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4426 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4427 ARRAY_REFs and find the ultimate containing object, which we return.
4429 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4430 bit position, and *PUNSIGNEDP to the signedness of the field.
4431 If the position of the field is variable, we store a tree
4432 giving the variable offset (in units) in *POFFSET.
4433 This offset is in addition to the bit position.
4434 If the position is not variable, we store 0 in *POFFSET.
4435 We set *PALIGNMENT to the alignment in bytes of the address that will be
4436 computed. This is the alignment of the thing we return if *POFFSET
4437 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4439 If any of the extraction expressions is volatile,
4440 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4442 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4443 is a mode that can be used to access the field. In that case, *PBITSIZE
4444 is redundant.
4446 If the field describes a variable-sized object, *PMODE is set to
4447 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4448 this case, but the address of the object can be found. */
4450 tree
4451 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4452 punsignedp, pvolatilep, palignment)
4453 tree exp;
4454 int *pbitsize;
4455 int *pbitpos;
4456 tree *poffset;
4457 enum machine_mode *pmode;
4458 int *punsignedp;
4459 int *pvolatilep;
4460 int *palignment;
4462 tree orig_exp = exp;
4463 tree size_tree = 0;
4464 enum machine_mode mode = VOIDmode;
4465 tree offset = integer_zero_node;
4466 int alignment = BIGGEST_ALIGNMENT;
4468 if (TREE_CODE (exp) == COMPONENT_REF)
4470 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4471 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4472 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4473 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4475 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4477 size_tree = TREE_OPERAND (exp, 1);
4478 *punsignedp = TREE_UNSIGNED (exp);
4480 else
4482 mode = TYPE_MODE (TREE_TYPE (exp));
4483 *pbitsize = GET_MODE_BITSIZE (mode);
4484 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4487 if (size_tree)
4489 if (TREE_CODE (size_tree) != INTEGER_CST)
4490 mode = BLKmode, *pbitsize = -1;
4491 else
4492 *pbitsize = TREE_INT_CST_LOW (size_tree);
4495 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4496 and find the ultimate containing object. */
4498 *pbitpos = 0;
4500 while (1)
4502 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4504 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4505 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4506 : TREE_OPERAND (exp, 2));
4507 tree constant = integer_zero_node, var = pos;
4509 /* If this field hasn't been filled in yet, don't go
4510 past it. This should only happen when folding expressions
4511 made during type construction. */
4512 if (pos == 0)
4513 break;
4515 /* Assume here that the offset is a multiple of a unit.
4516 If not, there should be an explicitly added constant. */
4517 if (TREE_CODE (pos) == PLUS_EXPR
4518 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4519 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4520 else if (TREE_CODE (pos) == INTEGER_CST)
4521 constant = pos, var = integer_zero_node;
4523 *pbitpos += TREE_INT_CST_LOW (constant);
4524 offset = size_binop (PLUS_EXPR, offset,
4525 size_binop (EXACT_DIV_EXPR, var,
4526 size_int (BITS_PER_UNIT)));
4529 else if (TREE_CODE (exp) == ARRAY_REF)
4531 /* This code is based on the code in case ARRAY_REF in expand_expr
4532 below. We assume here that the size of an array element is
4533 always an integral multiple of BITS_PER_UNIT. */
4535 tree index = TREE_OPERAND (exp, 1);
4536 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4537 tree low_bound
4538 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4539 tree index_type = TREE_TYPE (index);
4540 tree xindex;
4542 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4544 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4545 index);
4546 index_type = TREE_TYPE (index);
4549 if (! integer_zerop (low_bound))
4550 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4552 if (TREE_CODE (index) == INTEGER_CST)
4554 index = convert (sbitsizetype, index);
4555 index_type = TREE_TYPE (index);
4558 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4559 convert (sbitsizetype,
4560 TYPE_SIZE (TREE_TYPE (exp)))));
4562 if (TREE_CODE (xindex) == INTEGER_CST
4563 && TREE_INT_CST_HIGH (xindex) == 0)
4564 *pbitpos += TREE_INT_CST_LOW (xindex);
4565 else
4567 /* Either the bit offset calculated above is not constant, or
4568 it overflowed. In either case, redo the multiplication
4569 against the size in units. This is especially important
4570 in the non-constant case to avoid a division at runtime. */
4571 xindex = fold (build (MULT_EXPR, ssizetype, index,
4572 convert (ssizetype,
4573 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4575 if (contains_placeholder_p (xindex))
4576 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4578 offset = size_binop (PLUS_EXPR, offset, xindex);
4581 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4582 && ! ((TREE_CODE (exp) == NOP_EXPR
4583 || TREE_CODE (exp) == CONVERT_EXPR)
4584 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4585 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4586 != UNION_TYPE))
4587 && (TYPE_MODE (TREE_TYPE (exp))
4588 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4589 break;
4591 /* If any reference in the chain is volatile, the effect is volatile. */
4592 if (TREE_THIS_VOLATILE (exp))
4593 *pvolatilep = 1;
4595 /* If the offset is non-constant already, then we can't assume any
4596 alignment more than the alignment here. */
4597 if (! integer_zerop (offset))
4598 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4600 exp = TREE_OPERAND (exp, 0);
4603 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4604 alignment = MIN (alignment, DECL_ALIGN (exp));
4605 else if (TREE_TYPE (exp) != 0)
4606 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4608 if (integer_zerop (offset))
4609 offset = 0;
4611 if (offset != 0 && contains_placeholder_p (offset))
4612 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4614 *pmode = mode;
4615 *poffset = offset;
4616 *palignment = alignment / BITS_PER_UNIT;
4617 return exp;
4620 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4621 static enum memory_use_mode
4622 get_memory_usage_from_modifier (modifier)
4623 enum expand_modifier modifier;
4625 switch (modifier)
4627 case EXPAND_NORMAL:
4628 case EXPAND_SUM:
4629 return MEMORY_USE_RO;
4630 break;
4631 case EXPAND_MEMORY_USE_WO:
4632 return MEMORY_USE_WO;
4633 break;
4634 case EXPAND_MEMORY_USE_RW:
4635 return MEMORY_USE_RW;
4636 break;
4637 case EXPAND_MEMORY_USE_DONT:
4638 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4639 MEMORY_USE_DONT, because they are modifiers to a call of
4640 expand_expr in the ADDR_EXPR case of expand_expr. */
4641 case EXPAND_CONST_ADDRESS:
4642 case EXPAND_INITIALIZER:
4643 return MEMORY_USE_DONT;
4644 case EXPAND_MEMORY_USE_BAD:
4645 default:
4646 abort ();
4650 /* Given an rtx VALUE that may contain additions and multiplications,
4651 return an equivalent value that just refers to a register or memory.
4652 This is done by generating instructions to perform the arithmetic
4653 and returning a pseudo-register containing the value.
4655 The returned value may be a REG, SUBREG, MEM or constant. */
4658 force_operand (value, target)
4659 rtx value, target;
4661 register optab binoptab = 0;
4662 /* Use a temporary to force order of execution of calls to
4663 `force_operand'. */
4664 rtx tmp;
4665 register rtx op2;
4666 /* Use subtarget as the target for operand 0 of a binary operation. */
4667 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4669 /* Check for a PIC address load. */
4670 if (flag_pic
4671 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4672 && XEXP (value, 0) == pic_offset_table_rtx
4673 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4674 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4675 || GET_CODE (XEXP (value, 1)) == CONST))
4677 if (!subtarget)
4678 subtarget = gen_reg_rtx (GET_MODE (value));
4679 emit_move_insn (subtarget, value);
4680 return subtarget;
4683 if (GET_CODE (value) == PLUS)
4684 binoptab = add_optab;
4685 else if (GET_CODE (value) == MINUS)
4686 binoptab = sub_optab;
4687 else if (GET_CODE (value) == MULT)
4689 op2 = XEXP (value, 1);
4690 if (!CONSTANT_P (op2)
4691 && !(GET_CODE (op2) == REG && op2 != subtarget))
4692 subtarget = 0;
4693 tmp = force_operand (XEXP (value, 0), subtarget);
4694 return expand_mult (GET_MODE (value), tmp,
4695 force_operand (op2, NULL_RTX),
4696 target, 0);
4699 if (binoptab)
4701 op2 = XEXP (value, 1);
4702 if (!CONSTANT_P (op2)
4703 && !(GET_CODE (op2) == REG && op2 != subtarget))
4704 subtarget = 0;
4705 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4707 binoptab = add_optab;
4708 op2 = negate_rtx (GET_MODE (value), op2);
4711 /* Check for an addition with OP2 a constant integer and our first
4712 operand a PLUS of a virtual register and something else. In that
4713 case, we want to emit the sum of the virtual register and the
4714 constant first and then add the other value. This allows virtual
4715 register instantiation to simply modify the constant rather than
4716 creating another one around this addition. */
4717 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4718 && GET_CODE (XEXP (value, 0)) == PLUS
4719 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4720 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4721 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4723 rtx temp = expand_binop (GET_MODE (value), binoptab,
4724 XEXP (XEXP (value, 0), 0), op2,
4725 subtarget, 0, OPTAB_LIB_WIDEN);
4726 return expand_binop (GET_MODE (value), binoptab, temp,
4727 force_operand (XEXP (XEXP (value, 0), 1), 0),
4728 target, 0, OPTAB_LIB_WIDEN);
4731 tmp = force_operand (XEXP (value, 0), subtarget);
4732 return expand_binop (GET_MODE (value), binoptab, tmp,
4733 force_operand (op2, NULL_RTX),
4734 target, 0, OPTAB_LIB_WIDEN);
4735 /* We give UNSIGNEDP = 0 to expand_binop
4736 because the only operations we are expanding here are signed ones. */
4738 return value;
4741 /* Subroutine of expand_expr:
4742 save the non-copied parts (LIST) of an expr (LHS), and return a list
4743 which can restore these values to their previous values,
4744 should something modify their storage. */
4746 static tree
4747 save_noncopied_parts (lhs, list)
4748 tree lhs;
4749 tree list;
4751 tree tail;
4752 tree parts = 0;
4754 for (tail = list; tail; tail = TREE_CHAIN (tail))
4755 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4756 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4757 else
4759 tree part = TREE_VALUE (tail);
4760 tree part_type = TREE_TYPE (part);
4761 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4762 rtx target = assign_temp (part_type, 0, 1, 1);
4763 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4764 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4765 parts = tree_cons (to_be_saved,
4766 build (RTL_EXPR, part_type, NULL_TREE,
4767 (tree) target),
4768 parts);
4769 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4771 return parts;
4774 /* Subroutine of expand_expr:
4775 record the non-copied parts (LIST) of an expr (LHS), and return a list
4776 which specifies the initial values of these parts. */
4778 static tree
4779 init_noncopied_parts (lhs, list)
4780 tree lhs;
4781 tree list;
4783 tree tail;
4784 tree parts = 0;
4786 for (tail = list; tail; tail = TREE_CHAIN (tail))
4787 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4788 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4789 else
4791 tree part = TREE_VALUE (tail);
4792 tree part_type = TREE_TYPE (part);
4793 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4794 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4796 return parts;
4799 /* Subroutine of expand_expr: return nonzero iff there is no way that
4800 EXP can reference X, which is being modified. TOP_P is nonzero if this
4801 call is going to be used to determine whether we need a temporary
4802 for EXP, as opposed to a recursive call to this function.
4804 It is always safe for this routine to return zero since it merely
4805 searches for optimization opportunities. */
4807 static int
4808 safe_from_p (x, exp, top_p)
4809 rtx x;
4810 tree exp;
4811 int top_p;
4813 rtx exp_rtl = 0;
4814 int i, nops;
4815 static int save_expr_count;
4816 static int save_expr_size = 0;
4817 static tree *save_expr_rewritten;
4818 static tree save_expr_trees[256];
4820 if (x == 0
4821 /* If EXP has varying size, we MUST use a target since we currently
4822 have no way of allocating temporaries of variable size
4823 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4824 So we assume here that something at a higher level has prevented a
4825 clash. This is somewhat bogus, but the best we can do. Only
4826 do this when X is BLKmode and when we are at the top level. */
4827 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4828 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4829 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4830 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4831 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4832 != INTEGER_CST)
4833 && GET_MODE (x) == BLKmode))
4834 return 1;
4836 if (top_p && save_expr_size == 0)
4838 int rtn;
4840 save_expr_count = 0;
4841 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
4842 save_expr_rewritten = &save_expr_trees[0];
4844 rtn = safe_from_p (x, exp, 1);
4846 for (i = 0; i < save_expr_count; ++i)
4848 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
4849 abort ();
4850 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
4853 save_expr_size = 0;
4855 return rtn;
4858 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4859 find the underlying pseudo. */
4860 if (GET_CODE (x) == SUBREG)
4862 x = SUBREG_REG (x);
4863 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4864 return 0;
4867 /* If X is a location in the outgoing argument area, it is always safe. */
4868 if (GET_CODE (x) == MEM
4869 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4870 || (GET_CODE (XEXP (x, 0)) == PLUS
4871 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4872 return 1;
4874 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4876 case 'd':
4877 exp_rtl = DECL_RTL (exp);
4878 break;
4880 case 'c':
4881 return 1;
4883 case 'x':
4884 if (TREE_CODE (exp) == TREE_LIST)
4885 return ((TREE_VALUE (exp) == 0
4886 || safe_from_p (x, TREE_VALUE (exp), 0))
4887 && (TREE_CHAIN (exp) == 0
4888 || safe_from_p (x, TREE_CHAIN (exp), 0)));
4889 else if (TREE_CODE (exp) == ERROR_MARK)
4890 return 1; /* An already-visited SAVE_EXPR? */
4891 else
4892 return 0;
4894 case '1':
4895 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4897 case '2':
4898 case '<':
4899 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4900 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
4902 case 'e':
4903 case 'r':
4904 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4905 the expression. If it is set, we conflict iff we are that rtx or
4906 both are in memory. Otherwise, we check all operands of the
4907 expression recursively. */
4909 switch (TREE_CODE (exp))
4911 case ADDR_EXPR:
4912 return (staticp (TREE_OPERAND (exp, 0))
4913 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4914 || TREE_STATIC (exp));
4916 case INDIRECT_REF:
4917 if (GET_CODE (x) == MEM)
4918 return 0;
4919 break;
4921 case CALL_EXPR:
4922 exp_rtl = CALL_EXPR_RTL (exp);
4923 if (exp_rtl == 0)
4925 /* Assume that the call will clobber all hard registers and
4926 all of memory. */
4927 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4928 || GET_CODE (x) == MEM)
4929 return 0;
4932 break;
4934 case RTL_EXPR:
4935 /* If a sequence exists, we would have to scan every instruction
4936 in the sequence to see if it was safe. This is probably not
4937 worthwhile. */
4938 if (RTL_EXPR_SEQUENCE (exp))
4939 return 0;
4941 exp_rtl = RTL_EXPR_RTL (exp);
4942 break;
4944 case WITH_CLEANUP_EXPR:
4945 exp_rtl = RTL_EXPR_RTL (exp);
4946 break;
4948 case CLEANUP_POINT_EXPR:
4949 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4951 case SAVE_EXPR:
4952 exp_rtl = SAVE_EXPR_RTL (exp);
4953 if (exp_rtl)
4954 break;
4956 /* This SAVE_EXPR might appear many times in the top-level
4957 safe_from_p() expression, and if it has a complex
4958 subexpression, examining it multiple times could result
4959 in a combinatorial explosion. E.g. on an Alpha
4960 running at least 200MHz, a Fortran test case compiled with
4961 optimization took about 28 minutes to compile -- even though
4962 it was only a few lines long, and the complicated line causing
4963 so much time to be spent in the earlier version of safe_from_p()
4964 had only 293 or so unique nodes.
4966 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
4967 where it is so we can turn it back in the top-level safe_from_p()
4968 when we're done. */
4970 /* For now, don't bother re-sizing the array. */
4971 if (save_expr_count >= save_expr_size)
4972 return 0;
4973 save_expr_rewritten[save_expr_count++] = exp;
4974 TREE_SET_CODE (exp, ERROR_MARK);
4976 nops = tree_code_length[(int) SAVE_EXPR];
4977 for (i = 0; i < nops; i++)
4978 if (TREE_OPERAND (exp, i) != 0
4979 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
4980 return 0;
4981 return 1;
4983 case BIND_EXPR:
4984 /* The only operand we look at is operand 1. The rest aren't
4985 part of the expression. */
4986 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
4988 case METHOD_CALL_EXPR:
4989 /* This takes a rtx argument, but shouldn't appear here. */
4990 abort ();
4992 default:
4993 break;
4996 /* If we have an rtx, we do not need to scan our operands. */
4997 if (exp_rtl)
4998 break;
5000 nops = tree_code_length[(int) TREE_CODE (exp)];
5001 for (i = 0; i < nops; i++)
5002 if (TREE_OPERAND (exp, i) != 0
5003 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5004 return 0;
5007 /* If we have an rtl, find any enclosed object. Then see if we conflict
5008 with it. */
5009 if (exp_rtl)
5011 if (GET_CODE (exp_rtl) == SUBREG)
5013 exp_rtl = SUBREG_REG (exp_rtl);
5014 if (GET_CODE (exp_rtl) == REG
5015 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5016 return 0;
5019 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5020 are memory and EXP is not readonly. */
5021 return ! (rtx_equal_p (x, exp_rtl)
5022 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5023 && ! TREE_READONLY (exp)));
5026 /* If we reach here, it is safe. */
5027 return 1;
5030 /* Subroutine of expand_expr: return nonzero iff EXP is an
5031 expression whose type is statically determinable. */
5033 static int
5034 fixed_type_p (exp)
5035 tree exp;
5037 if (TREE_CODE (exp) == PARM_DECL
5038 || TREE_CODE (exp) == VAR_DECL
5039 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5040 || TREE_CODE (exp) == COMPONENT_REF
5041 || TREE_CODE (exp) == ARRAY_REF)
5042 return 1;
5043 return 0;
5046 /* Subroutine of expand_expr: return rtx if EXP is a
5047 variable or parameter; else return 0. */
5049 static rtx
5050 var_rtx (exp)
5051 tree exp;
5053 STRIP_NOPS (exp);
5054 switch (TREE_CODE (exp))
5056 case PARM_DECL:
5057 case VAR_DECL:
5058 return DECL_RTL (exp);
5059 default:
5060 return 0;
5064 #ifdef MAX_INTEGER_COMPUTATION_MODE
5065 void
5066 check_max_integer_computation_mode (exp)
5067 tree exp;
5069 enum tree_code code = TREE_CODE (exp);
5070 enum machine_mode mode;
5072 /* First check the type of the overall operation. We need only look at
5073 unary, binary and relational operations. */
5074 if (TREE_CODE_CLASS (code) == '1'
5075 || TREE_CODE_CLASS (code) == '2'
5076 || TREE_CODE_CLASS (code) == '<')
5078 mode = TYPE_MODE (TREE_TYPE (exp));
5079 if (GET_MODE_CLASS (mode) == MODE_INT
5080 && mode > MAX_INTEGER_COMPUTATION_MODE)
5081 fatal ("unsupported wide integer operation");
5084 /* Check operand of a unary op. */
5085 if (TREE_CODE_CLASS (code) == '1')
5087 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5088 if (GET_MODE_CLASS (mode) == MODE_INT
5089 && mode > MAX_INTEGER_COMPUTATION_MODE)
5090 fatal ("unsupported wide integer operation");
5093 /* Check operands of a binary/comparison op. */
5094 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5096 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5097 if (GET_MODE_CLASS (mode) == MODE_INT
5098 && mode > MAX_INTEGER_COMPUTATION_MODE)
5099 fatal ("unsupported wide integer operation");
5101 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5102 if (GET_MODE_CLASS (mode) == MODE_INT
5103 && mode > MAX_INTEGER_COMPUTATION_MODE)
5104 fatal ("unsupported wide integer operation");
5107 #endif
5110 /* expand_expr: generate code for computing expression EXP.
5111 An rtx for the computed value is returned. The value is never null.
5112 In the case of a void EXP, const0_rtx is returned.
5114 The value may be stored in TARGET if TARGET is nonzero.
5115 TARGET is just a suggestion; callers must assume that
5116 the rtx returned may not be the same as TARGET.
5118 If TARGET is CONST0_RTX, it means that the value will be ignored.
5120 If TMODE is not VOIDmode, it suggests generating the
5121 result in mode TMODE. But this is done only when convenient.
5122 Otherwise, TMODE is ignored and the value generated in its natural mode.
5123 TMODE is just a suggestion; callers must assume that
5124 the rtx returned may not have mode TMODE.
5126 Note that TARGET may have neither TMODE nor MODE. In that case, it
5127 probably will not be used.
5129 If MODIFIER is EXPAND_SUM then when EXP is an addition
5130 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5131 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5132 products as above, or REG or MEM, or constant.
5133 Ordinarily in such cases we would output mul or add instructions
5134 and then return a pseudo reg containing the sum.
5136 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5137 it also marks a label as absolutely required (it can't be dead).
5138 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5139 This is used for outputting expressions used in initializers.
5141 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5142 with a constant address even if that address is not normally legitimate.
5143 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5146 expand_expr (exp, target, tmode, modifier)
5147 register tree exp;
5148 rtx target;
5149 enum machine_mode tmode;
5150 enum expand_modifier modifier;
5152 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5153 This is static so it will be accessible to our recursive callees. */
5154 static tree placeholder_list = 0;
5155 register rtx op0, op1, temp;
5156 tree type = TREE_TYPE (exp);
5157 int unsignedp = TREE_UNSIGNED (type);
5158 register enum machine_mode mode = TYPE_MODE (type);
5159 register enum tree_code code = TREE_CODE (exp);
5160 optab this_optab;
5161 /* Use subtarget as the target for operand 0 of a binary operation. */
5162 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5163 rtx original_target = target;
5164 int ignore = (target == const0_rtx
5165 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5166 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5167 || code == COND_EXPR)
5168 && TREE_CODE (type) == VOID_TYPE));
5169 tree context;
5170 /* Used by check-memory-usage to make modifier read only. */
5171 enum expand_modifier ro_modifier;
5173 /* Make a read-only version of the modifier. */
5174 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5175 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5176 ro_modifier = modifier;
5177 else
5178 ro_modifier = EXPAND_NORMAL;
5180 /* Don't use hard regs as subtargets, because the combiner
5181 can only handle pseudo regs. */
5182 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5183 subtarget = 0;
5184 /* Avoid subtargets inside loops,
5185 since they hide some invariant expressions. */
5186 if (preserve_subexpressions_p ())
5187 subtarget = 0;
5189 /* If we are going to ignore this result, we need only do something
5190 if there is a side-effect somewhere in the expression. If there
5191 is, short-circuit the most common cases here. Note that we must
5192 not call expand_expr with anything but const0_rtx in case this
5193 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5195 if (ignore)
5197 if (! TREE_SIDE_EFFECTS (exp))
5198 return const0_rtx;
5200 /* Ensure we reference a volatile object even if value is ignored. */
5201 if (TREE_THIS_VOLATILE (exp)
5202 && TREE_CODE (exp) != FUNCTION_DECL
5203 && mode != VOIDmode && mode != BLKmode)
5205 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5206 if (GET_CODE (temp) == MEM)
5207 temp = copy_to_reg (temp);
5208 return const0_rtx;
5211 if (TREE_CODE_CLASS (code) == '1')
5212 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5213 VOIDmode, ro_modifier);
5214 else if (TREE_CODE_CLASS (code) == '2'
5215 || TREE_CODE_CLASS (code) == '<')
5217 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5218 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5219 return const0_rtx;
5221 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5222 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5223 /* If the second operand has no side effects, just evaluate
5224 the first. */
5225 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5226 VOIDmode, ro_modifier);
5228 target = 0;
5231 #ifdef MAX_INTEGER_COMPUTATION_MODE
5232 if (target)
5234 enum machine_mode mode = GET_MODE (target);
5236 if (GET_MODE_CLASS (mode) == MODE_INT
5237 && mode > MAX_INTEGER_COMPUTATION_MODE)
5238 fatal ("unsupported wide integer operation");
5241 if (GET_MODE_CLASS (tmode) == MODE_INT
5242 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5243 fatal ("unsupported wide integer operation");
5245 check_max_integer_computation_mode (exp);
5246 #endif
5248 /* If will do cse, generate all results into pseudo registers
5249 since 1) that allows cse to find more things
5250 and 2) otherwise cse could produce an insn the machine
5251 cannot support. */
5253 if (! cse_not_expected && mode != BLKmode && target
5254 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5255 target = subtarget;
5257 switch (code)
5259 case LABEL_DECL:
5261 tree function = decl_function_context (exp);
5262 /* Handle using a label in a containing function. */
5263 if (function != current_function_decl
5264 && function != inline_function_decl && function != 0)
5266 struct function *p = find_function_data (function);
5267 /* Allocate in the memory associated with the function
5268 that the label is in. */
5269 push_obstacks (p->function_obstack,
5270 p->function_maybepermanent_obstack);
5272 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5273 label_rtx (exp),
5274 p->forced_labels);
5275 pop_obstacks ();
5277 else if (modifier == EXPAND_INITIALIZER)
5278 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5279 label_rtx (exp), forced_labels);
5280 temp = gen_rtx_MEM (FUNCTION_MODE,
5281 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5282 if (function != current_function_decl
5283 && function != inline_function_decl && function != 0)
5284 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5285 return temp;
5288 case PARM_DECL:
5289 if (DECL_RTL (exp) == 0)
5291 error_with_decl (exp, "prior parameter's size depends on `%s'");
5292 return CONST0_RTX (mode);
5295 /* ... fall through ... */
5297 case VAR_DECL:
5298 /* If a static var's type was incomplete when the decl was written,
5299 but the type is complete now, lay out the decl now. */
5300 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5301 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5303 push_obstacks_nochange ();
5304 end_temporary_allocation ();
5305 layout_decl (exp, 0);
5306 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5307 pop_obstacks ();
5310 /* Only check automatic variables. Currently, function arguments are
5311 not checked (this can be done at compile-time with prototypes).
5312 Aggregates are not checked. */
5313 if (flag_check_memory_usage && code == VAR_DECL
5314 && GET_CODE (DECL_RTL (exp)) == MEM
5315 && DECL_CONTEXT (exp) != NULL_TREE
5316 && ! TREE_STATIC (exp)
5317 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5319 enum memory_use_mode memory_usage;
5320 memory_usage = get_memory_usage_from_modifier (modifier);
5322 if (memory_usage != MEMORY_USE_DONT)
5323 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5324 XEXP (DECL_RTL (exp), 0), ptr_mode,
5325 GEN_INT (int_size_in_bytes (type)),
5326 TYPE_MODE (sizetype),
5327 GEN_INT (memory_usage),
5328 TYPE_MODE (integer_type_node));
5331 /* ... fall through ... */
5333 case FUNCTION_DECL:
5334 case RESULT_DECL:
5335 if (DECL_RTL (exp) == 0)
5336 abort ();
5338 /* Ensure variable marked as used even if it doesn't go through
5339 a parser. If it hasn't be used yet, write out an external
5340 definition. */
5341 if (! TREE_USED (exp))
5343 assemble_external (exp);
5344 TREE_USED (exp) = 1;
5347 /* Show we haven't gotten RTL for this yet. */
5348 temp = 0;
5350 /* Handle variables inherited from containing functions. */
5351 context = decl_function_context (exp);
5353 /* We treat inline_function_decl as an alias for the current function
5354 because that is the inline function whose vars, types, etc.
5355 are being merged into the current function.
5356 See expand_inline_function. */
5358 if (context != 0 && context != current_function_decl
5359 && context != inline_function_decl
5360 /* If var is static, we don't need a static chain to access it. */
5361 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5362 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5364 rtx addr;
5366 /* Mark as non-local and addressable. */
5367 DECL_NONLOCAL (exp) = 1;
5368 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5369 abort ();
5370 mark_addressable (exp);
5371 if (GET_CODE (DECL_RTL (exp)) != MEM)
5372 abort ();
5373 addr = XEXP (DECL_RTL (exp), 0);
5374 if (GET_CODE (addr) == MEM)
5375 addr = gen_rtx_MEM (Pmode,
5376 fix_lexical_addr (XEXP (addr, 0), exp));
5377 else
5378 addr = fix_lexical_addr (addr, exp);
5379 temp = change_address (DECL_RTL (exp), mode, addr);
5382 /* This is the case of an array whose size is to be determined
5383 from its initializer, while the initializer is still being parsed.
5384 See expand_decl. */
5386 else if (GET_CODE (DECL_RTL (exp)) == MEM
5387 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5388 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5389 XEXP (DECL_RTL (exp), 0));
5391 /* If DECL_RTL is memory, we are in the normal case and either
5392 the address is not valid or it is not a register and -fforce-addr
5393 is specified, get the address into a register. */
5395 else if (GET_CODE (DECL_RTL (exp)) == MEM
5396 && modifier != EXPAND_CONST_ADDRESS
5397 && modifier != EXPAND_SUM
5398 && modifier != EXPAND_INITIALIZER
5399 && (! memory_address_p (DECL_MODE (exp),
5400 XEXP (DECL_RTL (exp), 0))
5401 || (flag_force_addr
5402 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5403 temp = change_address (DECL_RTL (exp), VOIDmode,
5404 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5406 /* If we got something, return it. But first, set the alignment
5407 the address is a register. */
5408 if (temp != 0)
5410 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5411 mark_reg_pointer (XEXP (temp, 0),
5412 DECL_ALIGN (exp) / BITS_PER_UNIT);
5414 return temp;
5417 /* If the mode of DECL_RTL does not match that of the decl, it
5418 must be a promoted value. We return a SUBREG of the wanted mode,
5419 but mark it so that we know that it was already extended. */
5421 if (GET_CODE (DECL_RTL (exp)) == REG
5422 && GET_MODE (DECL_RTL (exp)) != mode)
5424 /* Get the signedness used for this variable. Ensure we get the
5425 same mode we got when the variable was declared. */
5426 if (GET_MODE (DECL_RTL (exp))
5427 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5428 abort ();
5430 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5431 SUBREG_PROMOTED_VAR_P (temp) = 1;
5432 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5433 return temp;
5436 return DECL_RTL (exp);
5438 case INTEGER_CST:
5439 return immed_double_const (TREE_INT_CST_LOW (exp),
5440 TREE_INT_CST_HIGH (exp),
5441 mode);
5443 case CONST_DECL:
5444 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5445 EXPAND_MEMORY_USE_BAD);
5447 case REAL_CST:
5448 /* If optimized, generate immediate CONST_DOUBLE
5449 which will be turned into memory by reload if necessary.
5451 We used to force a register so that loop.c could see it. But
5452 this does not allow gen_* patterns to perform optimizations with
5453 the constants. It also produces two insns in cases like "x = 1.0;".
5454 On most machines, floating-point constants are not permitted in
5455 many insns, so we'd end up copying it to a register in any case.
5457 Now, we do the copying in expand_binop, if appropriate. */
5458 return immed_real_const (exp);
5460 case COMPLEX_CST:
5461 case STRING_CST:
5462 if (! TREE_CST_RTL (exp))
5463 output_constant_def (exp);
5465 /* TREE_CST_RTL probably contains a constant address.
5466 On RISC machines where a constant address isn't valid,
5467 make some insns to get that address into a register. */
5468 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5469 && modifier != EXPAND_CONST_ADDRESS
5470 && modifier != EXPAND_INITIALIZER
5471 && modifier != EXPAND_SUM
5472 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5473 || (flag_force_addr
5474 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5475 return change_address (TREE_CST_RTL (exp), VOIDmode,
5476 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5477 return TREE_CST_RTL (exp);
5479 case EXPR_WITH_FILE_LOCATION:
5481 rtx to_return;
5482 char *saved_input_filename = input_filename;
5483 int saved_lineno = lineno;
5484 input_filename = EXPR_WFL_FILENAME (exp);
5485 lineno = EXPR_WFL_LINENO (exp);
5486 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5487 emit_line_note (input_filename, lineno);
5488 /* Possibly avoid switching back and force here */
5489 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5490 input_filename = saved_input_filename;
5491 lineno = saved_lineno;
5492 return to_return;
5495 case SAVE_EXPR:
5496 context = decl_function_context (exp);
5498 /* If this SAVE_EXPR was at global context, assume we are an
5499 initialization function and move it into our context. */
5500 if (context == 0)
5501 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5503 /* We treat inline_function_decl as an alias for the current function
5504 because that is the inline function whose vars, types, etc.
5505 are being merged into the current function.
5506 See expand_inline_function. */
5507 if (context == current_function_decl || context == inline_function_decl)
5508 context = 0;
5510 /* If this is non-local, handle it. */
5511 if (context)
5513 /* The following call just exists to abort if the context is
5514 not of a containing function. */
5515 find_function_data (context);
5517 temp = SAVE_EXPR_RTL (exp);
5518 if (temp && GET_CODE (temp) == REG)
5520 put_var_into_stack (exp);
5521 temp = SAVE_EXPR_RTL (exp);
5523 if (temp == 0 || GET_CODE (temp) != MEM)
5524 abort ();
5525 return change_address (temp, mode,
5526 fix_lexical_addr (XEXP (temp, 0), exp));
5528 if (SAVE_EXPR_RTL (exp) == 0)
5530 if (mode == VOIDmode)
5531 temp = const0_rtx;
5532 else
5533 temp = assign_temp (type, 3, 0, 0);
5535 SAVE_EXPR_RTL (exp) = temp;
5536 if (!optimize && GET_CODE (temp) == REG)
5537 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5538 save_expr_regs);
5540 /* If the mode of TEMP does not match that of the expression, it
5541 must be a promoted value. We pass store_expr a SUBREG of the
5542 wanted mode but mark it so that we know that it was already
5543 extended. Note that `unsignedp' was modified above in
5544 this case. */
5546 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5548 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5549 SUBREG_PROMOTED_VAR_P (temp) = 1;
5550 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5553 if (temp == const0_rtx)
5554 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5555 EXPAND_MEMORY_USE_BAD);
5556 else
5557 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5559 TREE_USED (exp) = 1;
5562 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5563 must be a promoted value. We return a SUBREG of the wanted mode,
5564 but mark it so that we know that it was already extended. */
5566 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5567 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5569 /* Compute the signedness and make the proper SUBREG. */
5570 promote_mode (type, mode, &unsignedp, 0);
5571 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5572 SUBREG_PROMOTED_VAR_P (temp) = 1;
5573 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5574 return temp;
5577 return SAVE_EXPR_RTL (exp);
5579 case UNSAVE_EXPR:
5581 rtx temp;
5582 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5583 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5584 return temp;
5587 case PLACEHOLDER_EXPR:
5589 tree placeholder_expr;
5591 /* If there is an object on the head of the placeholder list,
5592 see if some object in it of type TYPE or a pointer to it. For
5593 further information, see tree.def. */
5594 for (placeholder_expr = placeholder_list;
5595 placeholder_expr != 0;
5596 placeholder_expr = TREE_CHAIN (placeholder_expr))
5598 tree need_type = TYPE_MAIN_VARIANT (type);
5599 tree object = 0;
5600 tree old_list = placeholder_list;
5601 tree elt;
5603 /* Find the outermost reference that is of the type we want.
5604 If none, see if any object has a type that is a pointer to
5605 the type we want. */
5606 for (elt = TREE_PURPOSE (placeholder_expr);
5607 elt != 0 && object == 0;
5609 = ((TREE_CODE (elt) == COMPOUND_EXPR
5610 || TREE_CODE (elt) == COND_EXPR)
5611 ? TREE_OPERAND (elt, 1)
5612 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5613 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5614 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5615 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5616 ? TREE_OPERAND (elt, 0) : 0))
5617 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5618 object = elt;
5620 for (elt = TREE_PURPOSE (placeholder_expr);
5621 elt != 0 && object == 0;
5623 = ((TREE_CODE (elt) == COMPOUND_EXPR
5624 || TREE_CODE (elt) == COND_EXPR)
5625 ? TREE_OPERAND (elt, 1)
5626 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5627 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5628 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5629 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5630 ? TREE_OPERAND (elt, 0) : 0))
5631 if (POINTER_TYPE_P (TREE_TYPE (elt))
5632 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5633 == need_type))
5634 object = build1 (INDIRECT_REF, need_type, elt);
5636 if (object != 0)
5638 /* Expand this object skipping the list entries before
5639 it was found in case it is also a PLACEHOLDER_EXPR.
5640 In that case, we want to translate it using subsequent
5641 entries. */
5642 placeholder_list = TREE_CHAIN (placeholder_expr);
5643 temp = expand_expr (object, original_target, tmode,
5644 ro_modifier);
5645 placeholder_list = old_list;
5646 return temp;
5651 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5652 abort ();
5654 case WITH_RECORD_EXPR:
5655 /* Put the object on the placeholder list, expand our first operand,
5656 and pop the list. */
5657 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5658 placeholder_list);
5659 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5660 tmode, ro_modifier);
5661 placeholder_list = TREE_CHAIN (placeholder_list);
5662 return target;
5664 case EXIT_EXPR:
5665 expand_exit_loop_if_false (NULL_PTR,
5666 invert_truthvalue (TREE_OPERAND (exp, 0)));
5667 return const0_rtx;
5669 case LOOP_EXPR:
5670 push_temp_slots ();
5671 expand_start_loop (1);
5672 expand_expr_stmt (TREE_OPERAND (exp, 0));
5673 expand_end_loop ();
5674 pop_temp_slots ();
5676 return const0_rtx;
5678 case BIND_EXPR:
5680 tree vars = TREE_OPERAND (exp, 0);
5681 int vars_need_expansion = 0;
5683 /* Need to open a binding contour here because
5684 if there are any cleanups they must be contained here. */
5685 expand_start_bindings (0);
5687 /* Mark the corresponding BLOCK for output in its proper place. */
5688 if (TREE_OPERAND (exp, 2) != 0
5689 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5690 insert_block (TREE_OPERAND (exp, 2));
5692 /* If VARS have not yet been expanded, expand them now. */
5693 while (vars)
5695 if (DECL_RTL (vars) == 0)
5697 vars_need_expansion = 1;
5698 expand_decl (vars);
5700 expand_decl_init (vars);
5701 vars = TREE_CHAIN (vars);
5704 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5706 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5708 return temp;
5711 case RTL_EXPR:
5712 if (RTL_EXPR_SEQUENCE (exp))
5714 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5715 abort ();
5716 emit_insns (RTL_EXPR_SEQUENCE (exp));
5717 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5719 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5720 free_temps_for_rtl_expr (exp);
5721 return RTL_EXPR_RTL (exp);
5723 case CONSTRUCTOR:
5724 /* If we don't need the result, just ensure we evaluate any
5725 subexpressions. */
5726 if (ignore)
5728 tree elt;
5729 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5730 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5731 EXPAND_MEMORY_USE_BAD);
5732 return const0_rtx;
5735 /* All elts simple constants => refer to a constant in memory. But
5736 if this is a non-BLKmode mode, let it store a field at a time
5737 since that should make a CONST_INT or CONST_DOUBLE when we
5738 fold. Likewise, if we have a target we can use, it is best to
5739 store directly into the target unless the type is large enough
5740 that memcpy will be used. If we are making an initializer and
5741 all operands are constant, put it in memory as well. */
5742 else if ((TREE_STATIC (exp)
5743 && ((mode == BLKmode
5744 && ! (target != 0 && safe_from_p (target, exp, 1)))
5745 || TREE_ADDRESSABLE (exp)
5746 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5747 && (move_by_pieces_ninsns
5748 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5749 TYPE_ALIGN (type) / BITS_PER_UNIT)
5750 > MOVE_RATIO)
5751 && ! mostly_zeros_p (exp))))
5752 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5754 rtx constructor = output_constant_def (exp);
5755 if (modifier != EXPAND_CONST_ADDRESS
5756 && modifier != EXPAND_INITIALIZER
5757 && modifier != EXPAND_SUM
5758 && (! memory_address_p (GET_MODE (constructor),
5759 XEXP (constructor, 0))
5760 || (flag_force_addr
5761 && GET_CODE (XEXP (constructor, 0)) != REG)))
5762 constructor = change_address (constructor, VOIDmode,
5763 XEXP (constructor, 0));
5764 return constructor;
5767 else
5769 /* Handle calls that pass values in multiple non-contiguous
5770 locations. The Irix 6 ABI has examples of this. */
5771 if (target == 0 || ! safe_from_p (target, exp, 1)
5772 || GET_CODE (target) == PARALLEL)
5774 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5775 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5776 else
5777 target = assign_temp (type, 0, 1, 1);
5780 if (TREE_READONLY (exp))
5782 if (GET_CODE (target) == MEM)
5783 target = copy_rtx (target);
5785 RTX_UNCHANGING_P (target) = 1;
5788 store_constructor (exp, target, 0);
5789 return target;
5792 case INDIRECT_REF:
5794 tree exp1 = TREE_OPERAND (exp, 0);
5795 tree exp2;
5796 tree index;
5797 tree string = string_constant (exp1, &index);
5798 int i;
5800 /* Try to optimize reads from const strings. */
5801 if (string
5802 && TREE_CODE (string) == STRING_CST
5803 && TREE_CODE (index) == INTEGER_CST
5804 && !TREE_INT_CST_HIGH (index)
5805 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5806 && GET_MODE_CLASS (mode) == MODE_INT
5807 && GET_MODE_SIZE (mode) == 1
5808 && modifier != EXPAND_MEMORY_USE_WO)
5809 return GEN_INT (TREE_STRING_POINTER (string)[i]);
5811 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5812 op0 = memory_address (mode, op0);
5814 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5816 enum memory_use_mode memory_usage;
5817 memory_usage = get_memory_usage_from_modifier (modifier);
5819 if (memory_usage != MEMORY_USE_DONT)
5821 in_check_memory_usage = 1;
5822 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5823 op0, ptr_mode,
5824 GEN_INT (int_size_in_bytes (type)),
5825 TYPE_MODE (sizetype),
5826 GEN_INT (memory_usage),
5827 TYPE_MODE (integer_type_node));
5828 in_check_memory_usage = 0;
5832 temp = gen_rtx_MEM (mode, op0);
5833 /* If address was computed by addition,
5834 mark this as an element of an aggregate. */
5835 if (TREE_CODE (exp1) == PLUS_EXPR
5836 || (TREE_CODE (exp1) == SAVE_EXPR
5837 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
5838 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5839 || (TREE_CODE (exp1) == ADDR_EXPR
5840 && (exp2 = TREE_OPERAND (exp1, 0))
5841 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5842 MEM_IN_STRUCT_P (temp) = 1;
5844 /* If the pointer is actually a REFERENCE_TYPE, this could be pointing
5845 into some aggregate too. In theory we could fold this into the
5846 previous check and use rtx_addr_varies_p there too.
5848 However, this seems safer. */
5849 if (!MEM_IN_STRUCT_P (temp)
5850 && (TREE_CODE (TREE_TYPE (exp1)) == REFERENCE_TYPE
5851 /* This may have been an array reference to the first element
5852 that was optimized away from being an addition. */
5853 || (TREE_CODE (exp1) == NOP_EXPR
5854 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
5855 == REFERENCE_TYPE)
5856 || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
5857 == POINTER_TYPE)
5858 && (AGGREGATE_TYPE_P
5859 (TREE_TYPE (TREE_TYPE
5860 (TREE_OPERAND (exp1, 0))))))))))
5861 MEM_IN_STRUCT_P (temp) = ! rtx_addr_varies_p (temp);
5863 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5864 MEM_ALIAS_SET (temp) = get_alias_set (exp);
5866 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5867 here, because, in C and C++, the fact that a location is accessed
5868 through a pointer to const does not mean that the value there can
5869 never change. Languages where it can never change should
5870 also set TREE_STATIC. */
5871 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5872 return temp;
5875 case ARRAY_REF:
5876 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5877 abort ();
5880 tree array = TREE_OPERAND (exp, 0);
5881 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5882 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5883 tree index = TREE_OPERAND (exp, 1);
5884 tree index_type = TREE_TYPE (index);
5885 HOST_WIDE_INT i;
5887 /* Optimize the special-case of a zero lower bound.
5889 We convert the low_bound to sizetype to avoid some problems
5890 with constant folding. (E.g. suppose the lower bound is 1,
5891 and its mode is QI. Without the conversion, (ARRAY
5892 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5893 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5895 But sizetype isn't quite right either (especially if
5896 the lowbound is negative). FIXME */
5898 if (! integer_zerop (low_bound))
5899 index = fold (build (MINUS_EXPR, index_type, index,
5900 convert (sizetype, low_bound)));
5902 /* Fold an expression like: "foo"[2].
5903 This is not done in fold so it won't happen inside &.
5904 Don't fold if this is for wide characters since it's too
5905 difficult to do correctly and this is a very rare case. */
5907 if (TREE_CODE (array) == STRING_CST
5908 && TREE_CODE (index) == INTEGER_CST
5909 && !TREE_INT_CST_HIGH (index)
5910 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5911 && GET_MODE_CLASS (mode) == MODE_INT
5912 && GET_MODE_SIZE (mode) == 1)
5913 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5915 /* If this is a constant index into a constant array,
5916 just get the value from the array. Handle both the cases when
5917 we have an explicit constructor and when our operand is a variable
5918 that was declared const. */
5920 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5922 if (TREE_CODE (index) == INTEGER_CST
5923 && TREE_INT_CST_HIGH (index) == 0)
5925 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5927 i = TREE_INT_CST_LOW (index);
5928 while (elem && i--)
5929 elem = TREE_CHAIN (elem);
5930 if (elem)
5931 return expand_expr (fold (TREE_VALUE (elem)), target,
5932 tmode, ro_modifier);
5936 else if (optimize >= 1
5937 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5938 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5939 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5941 if (TREE_CODE (index) == INTEGER_CST)
5943 tree init = DECL_INITIAL (array);
5945 i = TREE_INT_CST_LOW (index);
5946 if (TREE_CODE (init) == CONSTRUCTOR)
5948 tree elem = CONSTRUCTOR_ELTS (init);
5950 while (elem
5951 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5952 elem = TREE_CHAIN (elem);
5953 if (elem)
5954 return expand_expr (fold (TREE_VALUE (elem)), target,
5955 tmode, ro_modifier);
5957 else if (TREE_CODE (init) == STRING_CST
5958 && TREE_INT_CST_HIGH (index) == 0
5959 && (TREE_INT_CST_LOW (index)
5960 < TREE_STRING_LENGTH (init)))
5961 return (GEN_INT
5962 (TREE_STRING_POINTER
5963 (init)[TREE_INT_CST_LOW (index)]));
5968 /* ... fall through ... */
5970 case COMPONENT_REF:
5971 case BIT_FIELD_REF:
5972 /* If the operand is a CONSTRUCTOR, we can just extract the
5973 appropriate field if it is present. Don't do this if we have
5974 already written the data since we want to refer to that copy
5975 and varasm.c assumes that's what we'll do. */
5976 if (code != ARRAY_REF
5977 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5978 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5980 tree elt;
5982 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5983 elt = TREE_CHAIN (elt))
5984 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5985 /* We can normally use the value of the field in the
5986 CONSTRUCTOR. However, if this is a bitfield in
5987 an integral mode that we can fit in a HOST_WIDE_INT,
5988 we must mask only the number of bits in the bitfield,
5989 since this is done implicitly by the constructor. If
5990 the bitfield does not meet either of those conditions,
5991 we can't do this optimization. */
5992 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5993 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5994 == MODE_INT)
5995 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5996 <= HOST_BITS_PER_WIDE_INT))))
5998 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5999 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6001 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6003 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6005 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6006 op0 = expand_and (op0, op1, target);
6008 else
6010 enum machine_mode imode
6011 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6012 tree count
6013 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6016 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6017 target, 0);
6018 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6019 target, 0);
6023 return op0;
6028 enum machine_mode mode1;
6029 int bitsize;
6030 int bitpos;
6031 tree offset;
6032 int volatilep = 0;
6033 int alignment;
6034 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6035 &mode1, &unsignedp, &volatilep,
6036 &alignment);
6038 /* If we got back the original object, something is wrong. Perhaps
6039 we are evaluating an expression too early. In any event, don't
6040 infinitely recurse. */
6041 if (tem == exp)
6042 abort ();
6044 /* If TEM's type is a union of variable size, pass TARGET to the inner
6045 computation, since it will need a temporary and TARGET is known
6046 to have to do. This occurs in unchecked conversion in Ada. */
6048 op0 = expand_expr (tem,
6049 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6050 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6051 != INTEGER_CST)
6052 ? target : NULL_RTX),
6053 VOIDmode,
6054 modifier == EXPAND_INITIALIZER
6055 ? modifier : EXPAND_NORMAL);
6057 /* If this is a constant, put it into a register if it is a
6058 legitimate constant and memory if it isn't. */
6059 if (CONSTANT_P (op0))
6061 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6062 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6063 op0 = force_reg (mode, op0);
6064 else
6065 op0 = validize_mem (force_const_mem (mode, op0));
6068 if (offset != 0)
6070 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6072 if (GET_CODE (op0) != MEM)
6073 abort ();
6075 if (GET_MODE (offset_rtx) != ptr_mode)
6077 #ifdef POINTERS_EXTEND_UNSIGNED
6078 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6079 #else
6080 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6081 #endif
6084 if (GET_CODE (op0) == MEM
6085 && GET_MODE (op0) == BLKmode
6086 && bitsize
6087 && (bitpos % bitsize) == 0
6088 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6089 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6091 rtx temp = change_address (op0, mode1,
6092 plus_constant (XEXP (op0, 0),
6093 (bitpos /
6094 BITS_PER_UNIT)));
6095 if (GET_CODE (XEXP (temp, 0)) == REG)
6096 op0 = temp;
6097 else
6098 op0 = change_address (op0, mode1,
6099 force_reg (GET_MODE (XEXP (temp, 0)),
6100 XEXP (temp, 0)));
6101 bitpos = 0;
6105 op0 = change_address (op0, VOIDmode,
6106 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6107 force_reg (ptr_mode, offset_rtx)));
6110 /* Don't forget about volatility even if this is a bitfield. */
6111 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6113 op0 = copy_rtx (op0);
6114 MEM_VOLATILE_P (op0) = 1;
6117 /* Check the access. */
6118 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
6120 enum memory_use_mode memory_usage;
6121 memory_usage = get_memory_usage_from_modifier (modifier);
6123 if (memory_usage != MEMORY_USE_DONT)
6125 rtx to;
6126 int size;
6128 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6129 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6131 /* Check the access right of the pointer. */
6132 if (size > BITS_PER_UNIT)
6133 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6134 to, ptr_mode,
6135 GEN_INT (size / BITS_PER_UNIT),
6136 TYPE_MODE (sizetype),
6137 GEN_INT (memory_usage),
6138 TYPE_MODE (integer_type_node));
6142 /* In cases where an aligned union has an unaligned object
6143 as a field, we might be extracting a BLKmode value from
6144 an integer-mode (e.g., SImode) object. Handle this case
6145 by doing the extract into an object as wide as the field
6146 (which we know to be the width of a basic mode), then
6147 storing into memory, and changing the mode to BLKmode.
6148 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6149 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6150 if (mode1 == VOIDmode
6151 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6152 || (modifier != EXPAND_CONST_ADDRESS
6153 && modifier != EXPAND_INITIALIZER
6154 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6155 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6156 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6157 /* If the field isn't aligned enough to fetch as a memref,
6158 fetch it as a bit field. */
6159 || (SLOW_UNALIGNED_ACCESS
6160 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
6161 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6163 enum machine_mode ext_mode = mode;
6165 if (ext_mode == BLKmode)
6166 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6168 if (ext_mode == BLKmode)
6170 /* In this case, BITPOS must start at a byte boundary and
6171 TARGET, if specified, must be a MEM. */
6172 if (GET_CODE (op0) != MEM
6173 || (target != 0 && GET_CODE (target) != MEM)
6174 || bitpos % BITS_PER_UNIT != 0)
6175 abort ();
6177 op0 = change_address (op0, VOIDmode,
6178 plus_constant (XEXP (op0, 0),
6179 bitpos / BITS_PER_UNIT));
6180 if (target == 0)
6181 target = assign_temp (type, 0, 1, 1);
6183 emit_block_move (target, op0,
6184 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6185 / BITS_PER_UNIT),
6188 return target;
6191 op0 = validize_mem (op0);
6193 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6194 mark_reg_pointer (XEXP (op0, 0), alignment);
6196 op0 = extract_bit_field (op0, bitsize, bitpos,
6197 unsignedp, target, ext_mode, ext_mode,
6198 alignment,
6199 int_size_in_bytes (TREE_TYPE (tem)));
6201 /* If the result is a record type and BITSIZE is narrower than
6202 the mode of OP0, an integral mode, and this is a big endian
6203 machine, we must put the field into the high-order bits. */
6204 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6205 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6206 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6207 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6208 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6209 - bitsize),
6210 op0, 1);
6212 if (mode == BLKmode)
6214 rtx new = assign_stack_temp (ext_mode,
6215 bitsize / BITS_PER_UNIT, 0);
6217 emit_move_insn (new, op0);
6218 op0 = copy_rtx (new);
6219 PUT_MODE (op0, BLKmode);
6220 MEM_IN_STRUCT_P (op0) = 1;
6223 return op0;
6226 /* If the result is BLKmode, use that to access the object
6227 now as well. */
6228 if (mode == BLKmode)
6229 mode1 = BLKmode;
6231 /* Get a reference to just this component. */
6232 if (modifier == EXPAND_CONST_ADDRESS
6233 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6234 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6235 (bitpos / BITS_PER_UNIT)));
6236 else
6237 op0 = change_address (op0, mode1,
6238 plus_constant (XEXP (op0, 0),
6239 (bitpos / BITS_PER_UNIT)));
6241 if (GET_CODE (op0) == MEM)
6242 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6244 if (GET_CODE (XEXP (op0, 0)) == REG)
6245 mark_reg_pointer (XEXP (op0, 0), alignment);
6247 MEM_IN_STRUCT_P (op0) = 1;
6248 MEM_VOLATILE_P (op0) |= volatilep;
6249 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6250 || modifier == EXPAND_CONST_ADDRESS
6251 || modifier == EXPAND_INITIALIZER)
6252 return op0;
6253 else if (target == 0)
6254 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6256 convert_move (target, op0, unsignedp);
6257 return target;
6260 /* Intended for a reference to a buffer of a file-object in Pascal.
6261 But it's not certain that a special tree code will really be
6262 necessary for these. INDIRECT_REF might work for them. */
6263 case BUFFER_REF:
6264 abort ();
6266 case IN_EXPR:
6268 /* Pascal set IN expression.
6270 Algorithm:
6271 rlo = set_low - (set_low%bits_per_word);
6272 the_word = set [ (index - rlo)/bits_per_word ];
6273 bit_index = index % bits_per_word;
6274 bitmask = 1 << bit_index;
6275 return !!(the_word & bitmask); */
6277 tree set = TREE_OPERAND (exp, 0);
6278 tree index = TREE_OPERAND (exp, 1);
6279 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6280 tree set_type = TREE_TYPE (set);
6281 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6282 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6283 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6284 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6285 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6286 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6287 rtx setaddr = XEXP (setval, 0);
6288 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6289 rtx rlow;
6290 rtx diff, quo, rem, addr, bit, result;
6292 preexpand_calls (exp);
6294 /* If domain is empty, answer is no. Likewise if index is constant
6295 and out of bounds. */
6296 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6297 && TREE_CODE (set_low_bound) == INTEGER_CST
6298 && tree_int_cst_lt (set_high_bound, set_low_bound))
6299 || (TREE_CODE (index) == INTEGER_CST
6300 && TREE_CODE (set_low_bound) == INTEGER_CST
6301 && tree_int_cst_lt (index, set_low_bound))
6302 || (TREE_CODE (set_high_bound) == INTEGER_CST
6303 && TREE_CODE (index) == INTEGER_CST
6304 && tree_int_cst_lt (set_high_bound, index))))
6305 return const0_rtx;
6307 if (target == 0)
6308 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6310 /* If we get here, we have to generate the code for both cases
6311 (in range and out of range). */
6313 op0 = gen_label_rtx ();
6314 op1 = gen_label_rtx ();
6316 if (! (GET_CODE (index_val) == CONST_INT
6317 && GET_CODE (lo_r) == CONST_INT))
6319 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6320 GET_MODE (index_val), iunsignedp, 0);
6321 emit_jump_insn (gen_blt (op1));
6324 if (! (GET_CODE (index_val) == CONST_INT
6325 && GET_CODE (hi_r) == CONST_INT))
6327 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6328 GET_MODE (index_val), iunsignedp, 0);
6329 emit_jump_insn (gen_bgt (op1));
6332 /* Calculate the element number of bit zero in the first word
6333 of the set. */
6334 if (GET_CODE (lo_r) == CONST_INT)
6335 rlow = GEN_INT (INTVAL (lo_r)
6336 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6337 else
6338 rlow = expand_binop (index_mode, and_optab, lo_r,
6339 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6340 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6342 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6343 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6345 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6346 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6347 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6348 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6350 addr = memory_address (byte_mode,
6351 expand_binop (index_mode, add_optab, diff,
6352 setaddr, NULL_RTX, iunsignedp,
6353 OPTAB_LIB_WIDEN));
6355 /* Extract the bit we want to examine */
6356 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6357 gen_rtx_MEM (byte_mode, addr),
6358 make_tree (TREE_TYPE (index), rem),
6359 NULL_RTX, 1);
6360 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6361 GET_MODE (target) == byte_mode ? target : 0,
6362 1, OPTAB_LIB_WIDEN);
6364 if (result != target)
6365 convert_move (target, result, 1);
6367 /* Output the code to handle the out-of-range case. */
6368 emit_jump (op0);
6369 emit_label (op1);
6370 emit_move_insn (target, const0_rtx);
6371 emit_label (op0);
6372 return target;
6375 case WITH_CLEANUP_EXPR:
6376 if (RTL_EXPR_RTL (exp) == 0)
6378 RTL_EXPR_RTL (exp)
6379 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6380 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6382 /* That's it for this cleanup. */
6383 TREE_OPERAND (exp, 2) = 0;
6385 return RTL_EXPR_RTL (exp);
6387 case CLEANUP_POINT_EXPR:
6389 extern int temp_slot_level;
6390 /* Start a new binding layer that will keep track of all cleanup
6391 actions to be performed. */
6392 expand_start_bindings (0);
6394 target_temp_slot_level = temp_slot_level;
6396 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6397 /* If we're going to use this value, load it up now. */
6398 if (! ignore)
6399 op0 = force_not_mem (op0);
6400 preserve_temp_slots (op0);
6401 expand_end_bindings (NULL_TREE, 0, 0);
6403 return op0;
6405 case CALL_EXPR:
6406 /* Check for a built-in function. */
6407 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6408 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6409 == FUNCTION_DECL)
6410 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6411 return expand_builtin (exp, target, subtarget, tmode, ignore);
6413 /* If this call was expanded already by preexpand_calls,
6414 just return the result we got. */
6415 if (CALL_EXPR_RTL (exp) != 0)
6416 return CALL_EXPR_RTL (exp);
6418 return expand_call (exp, target, ignore);
6420 case NON_LVALUE_EXPR:
6421 case NOP_EXPR:
6422 case CONVERT_EXPR:
6423 case REFERENCE_EXPR:
6424 if (TREE_CODE (type) == UNION_TYPE)
6426 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6427 if (target == 0)
6429 if (mode != BLKmode)
6430 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6431 else
6432 target = assign_temp (type, 0, 1, 1);
6435 if (GET_CODE (target) == MEM)
6436 /* Store data into beginning of memory target. */
6437 store_expr (TREE_OPERAND (exp, 0),
6438 change_address (target, TYPE_MODE (valtype), 0), 0);
6440 else if (GET_CODE (target) == REG)
6441 /* Store this field into a union of the proper type. */
6442 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6443 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6444 VOIDmode, 0, 1,
6445 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6446 else
6447 abort ();
6449 /* Return the entire union. */
6450 return target;
6453 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6455 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6456 ro_modifier);
6458 /* If the signedness of the conversion differs and OP0 is
6459 a promoted SUBREG, clear that indication since we now
6460 have to do the proper extension. */
6461 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6462 && GET_CODE (op0) == SUBREG)
6463 SUBREG_PROMOTED_VAR_P (op0) = 0;
6465 return op0;
6468 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6469 if (GET_MODE (op0) == mode)
6470 return op0;
6472 /* If OP0 is a constant, just convert it into the proper mode. */
6473 if (CONSTANT_P (op0))
6474 return
6475 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6476 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6478 if (modifier == EXPAND_INITIALIZER)
6479 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6481 if (target == 0)
6482 return
6483 convert_to_mode (mode, op0,
6484 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6485 else
6486 convert_move (target, op0,
6487 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6488 return target;
6490 case PLUS_EXPR:
6491 /* We come here from MINUS_EXPR when the second operand is a
6492 constant. */
6493 plus_expr:
6494 this_optab = add_optab;
6496 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6497 something else, make sure we add the register to the constant and
6498 then to the other thing. This case can occur during strength
6499 reduction and doing it this way will produce better code if the
6500 frame pointer or argument pointer is eliminated.
6502 fold-const.c will ensure that the constant is always in the inner
6503 PLUS_EXPR, so the only case we need to do anything about is if
6504 sp, ap, or fp is our second argument, in which case we must swap
6505 the innermost first argument and our second argument. */
6507 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6508 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6509 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6510 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6511 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6512 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6514 tree t = TREE_OPERAND (exp, 1);
6516 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6517 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6520 /* If the result is to be ptr_mode and we are adding an integer to
6521 something, we might be forming a constant. So try to use
6522 plus_constant. If it produces a sum and we can't accept it,
6523 use force_operand. This allows P = &ARR[const] to generate
6524 efficient code on machines where a SYMBOL_REF is not a valid
6525 address.
6527 If this is an EXPAND_SUM call, always return the sum. */
6528 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6529 || mode == ptr_mode)
6531 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6532 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6533 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6535 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6536 EXPAND_SUM);
6537 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6538 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6539 op1 = force_operand (op1, target);
6540 return op1;
6543 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6544 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6545 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6547 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6548 EXPAND_SUM);
6549 if (! CONSTANT_P (op0))
6551 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6552 VOIDmode, modifier);
6553 /* Don't go to both_summands if modifier
6554 says it's not right to return a PLUS. */
6555 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6556 goto binop2;
6557 goto both_summands;
6559 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6560 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6561 op0 = force_operand (op0, target);
6562 return op0;
6566 /* No sense saving up arithmetic to be done
6567 if it's all in the wrong mode to form part of an address.
6568 And force_operand won't know whether to sign-extend or
6569 zero-extend. */
6570 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6571 || mode != ptr_mode)
6572 goto binop;
6574 preexpand_calls (exp);
6575 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6576 subtarget = 0;
6578 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6579 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6581 both_summands:
6582 /* Make sure any term that's a sum with a constant comes last. */
6583 if (GET_CODE (op0) == PLUS
6584 && CONSTANT_P (XEXP (op0, 1)))
6586 temp = op0;
6587 op0 = op1;
6588 op1 = temp;
6590 /* If adding to a sum including a constant,
6591 associate it to put the constant outside. */
6592 if (GET_CODE (op1) == PLUS
6593 && CONSTANT_P (XEXP (op1, 1)))
6595 rtx constant_term = const0_rtx;
6597 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6598 if (temp != 0)
6599 op0 = temp;
6600 /* Ensure that MULT comes first if there is one. */
6601 else if (GET_CODE (op0) == MULT)
6602 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6603 else
6604 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6606 /* Let's also eliminate constants from op0 if possible. */
6607 op0 = eliminate_constant_term (op0, &constant_term);
6609 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6610 their sum should be a constant. Form it into OP1, since the
6611 result we want will then be OP0 + OP1. */
6613 temp = simplify_binary_operation (PLUS, mode, constant_term,
6614 XEXP (op1, 1));
6615 if (temp != 0)
6616 op1 = temp;
6617 else
6618 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6621 /* Put a constant term last and put a multiplication first. */
6622 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6623 temp = op1, op1 = op0, op0 = temp;
6625 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6626 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6628 case MINUS_EXPR:
6629 /* For initializers, we are allowed to return a MINUS of two
6630 symbolic constants. Here we handle all cases when both operands
6631 are constant. */
6632 /* Handle difference of two symbolic constants,
6633 for the sake of an initializer. */
6634 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6635 && really_constant_p (TREE_OPERAND (exp, 0))
6636 && really_constant_p (TREE_OPERAND (exp, 1)))
6638 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6639 VOIDmode, ro_modifier);
6640 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6641 VOIDmode, ro_modifier);
6643 /* If the last operand is a CONST_INT, use plus_constant of
6644 the negated constant. Else make the MINUS. */
6645 if (GET_CODE (op1) == CONST_INT)
6646 return plus_constant (op0, - INTVAL (op1));
6647 else
6648 return gen_rtx_MINUS (mode, op0, op1);
6650 /* Convert A - const to A + (-const). */
6651 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6653 tree negated = fold (build1 (NEGATE_EXPR, type,
6654 TREE_OPERAND (exp, 1)));
6656 /* Deal with the case where we can't negate the constant
6657 in TYPE. */
6658 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6660 tree newtype = signed_type (type);
6661 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6662 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6663 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6665 if (! TREE_OVERFLOW (newneg))
6666 return expand_expr (convert (type,
6667 build (PLUS_EXPR, newtype,
6668 newop0, newneg)),
6669 target, tmode, ro_modifier);
6671 else
6673 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6674 goto plus_expr;
6677 this_optab = sub_optab;
6678 goto binop;
6680 case MULT_EXPR:
6681 preexpand_calls (exp);
6682 /* If first operand is constant, swap them.
6683 Thus the following special case checks need only
6684 check the second operand. */
6685 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6687 register tree t1 = TREE_OPERAND (exp, 0);
6688 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6689 TREE_OPERAND (exp, 1) = t1;
6692 /* Attempt to return something suitable for generating an
6693 indexed address, for machines that support that. */
6695 if (modifier == EXPAND_SUM && mode == ptr_mode
6696 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6697 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6699 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6700 EXPAND_SUM);
6702 /* Apply distributive law if OP0 is x+c. */
6703 if (GET_CODE (op0) == PLUS
6704 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6705 return gen_rtx_PLUS (mode,
6706 gen_rtx_MULT (mode, XEXP (op0, 0),
6707 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6708 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6709 * INTVAL (XEXP (op0, 1))));
6711 if (GET_CODE (op0) != REG)
6712 op0 = force_operand (op0, NULL_RTX);
6713 if (GET_CODE (op0) != REG)
6714 op0 = copy_to_mode_reg (mode, op0);
6716 return gen_rtx_MULT (mode, op0,
6717 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6720 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6721 subtarget = 0;
6723 /* Check for multiplying things that have been extended
6724 from a narrower type. If this machine supports multiplying
6725 in that narrower type with a result in the desired type,
6726 do it that way, and avoid the explicit type-conversion. */
6727 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6728 && TREE_CODE (type) == INTEGER_TYPE
6729 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6730 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6731 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6732 && int_fits_type_p (TREE_OPERAND (exp, 1),
6733 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6734 /* Don't use a widening multiply if a shift will do. */
6735 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6736 > HOST_BITS_PER_WIDE_INT)
6737 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6739 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6740 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6742 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6743 /* If both operands are extended, they must either both
6744 be zero-extended or both be sign-extended. */
6745 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6747 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6749 enum machine_mode innermode
6750 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6751 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6752 ? smul_widen_optab : umul_widen_optab);
6753 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6754 ? umul_widen_optab : smul_widen_optab);
6755 if (mode == GET_MODE_WIDER_MODE (innermode))
6757 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6759 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6760 NULL_RTX, VOIDmode, 0);
6761 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6762 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6763 VOIDmode, 0);
6764 else
6765 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6766 NULL_RTX, VOIDmode, 0);
6767 goto binop2;
6769 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6770 && innermode == word_mode)
6772 rtx htem;
6773 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6774 NULL_RTX, VOIDmode, 0);
6775 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6776 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6777 VOIDmode, 0);
6778 else
6779 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6780 NULL_RTX, VOIDmode, 0);
6781 temp = expand_binop (mode, other_optab, op0, op1, target,
6782 unsignedp, OPTAB_LIB_WIDEN);
6783 htem = expand_mult_highpart_adjust (innermode,
6784 gen_highpart (innermode, temp),
6785 op0, op1,
6786 gen_highpart (innermode, temp),
6787 unsignedp);
6788 emit_move_insn (gen_highpart (innermode, temp), htem);
6789 return temp;
6793 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6794 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6795 return expand_mult (mode, op0, op1, target, unsignedp);
6797 case TRUNC_DIV_EXPR:
6798 case FLOOR_DIV_EXPR:
6799 case CEIL_DIV_EXPR:
6800 case ROUND_DIV_EXPR:
6801 case EXACT_DIV_EXPR:
6802 preexpand_calls (exp);
6803 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6804 subtarget = 0;
6805 /* Possible optimization: compute the dividend with EXPAND_SUM
6806 then if the divisor is constant can optimize the case
6807 where some terms of the dividend have coeffs divisible by it. */
6808 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6809 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6810 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6812 case RDIV_EXPR:
6813 this_optab = flodiv_optab;
6814 goto binop;
6816 case TRUNC_MOD_EXPR:
6817 case FLOOR_MOD_EXPR:
6818 case CEIL_MOD_EXPR:
6819 case ROUND_MOD_EXPR:
6820 preexpand_calls (exp);
6821 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6822 subtarget = 0;
6823 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6824 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6825 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6827 case FIX_ROUND_EXPR:
6828 case FIX_FLOOR_EXPR:
6829 case FIX_CEIL_EXPR:
6830 abort (); /* Not used for C. */
6832 case FIX_TRUNC_EXPR:
6833 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6834 if (target == 0)
6835 target = gen_reg_rtx (mode);
6836 expand_fix (target, op0, unsignedp);
6837 return target;
6839 case FLOAT_EXPR:
6840 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6841 if (target == 0)
6842 target = gen_reg_rtx (mode);
6843 /* expand_float can't figure out what to do if FROM has VOIDmode.
6844 So give it the correct mode. With -O, cse will optimize this. */
6845 if (GET_MODE (op0) == VOIDmode)
6846 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6847 op0);
6848 expand_float (target, op0,
6849 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6850 return target;
6852 case NEGATE_EXPR:
6853 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6854 temp = expand_unop (mode, neg_optab, op0, target, 0);
6855 if (temp == 0)
6856 abort ();
6857 return temp;
6859 case ABS_EXPR:
6860 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6862 /* Handle complex values specially. */
6863 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6864 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6865 return expand_complex_abs (mode, op0, target, unsignedp);
6867 /* Unsigned abs is simply the operand. Testing here means we don't
6868 risk generating incorrect code below. */
6869 if (TREE_UNSIGNED (type))
6870 return op0;
6872 return expand_abs (mode, op0, target, unsignedp,
6873 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
6875 case MAX_EXPR:
6876 case MIN_EXPR:
6877 target = original_target;
6878 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
6879 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6880 || GET_MODE (target) != mode
6881 || (GET_CODE (target) == REG
6882 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6883 target = gen_reg_rtx (mode);
6884 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6885 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6887 /* First try to do it with a special MIN or MAX instruction.
6888 If that does not win, use a conditional jump to select the proper
6889 value. */
6890 this_optab = (TREE_UNSIGNED (type)
6891 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6892 : (code == MIN_EXPR ? smin_optab : smax_optab));
6894 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6895 OPTAB_WIDEN);
6896 if (temp != 0)
6897 return temp;
6899 /* At this point, a MEM target is no longer useful; we will get better
6900 code without it. */
6902 if (GET_CODE (target) == MEM)
6903 target = gen_reg_rtx (mode);
6905 if (target != op0)
6906 emit_move_insn (target, op0);
6908 op0 = gen_label_rtx ();
6910 /* If this mode is an integer too wide to compare properly,
6911 compare word by word. Rely on cse to optimize constant cases. */
6912 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6914 if (code == MAX_EXPR)
6915 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6916 target, op1, NULL_RTX, op0);
6917 else
6918 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6919 op1, target, NULL_RTX, op0);
6920 emit_move_insn (target, op1);
6922 else
6924 if (code == MAX_EXPR)
6925 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6926 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6927 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6928 else
6929 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6930 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6931 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6932 if (temp == const0_rtx)
6933 emit_move_insn (target, op1);
6934 else if (temp != const_true_rtx)
6936 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6937 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6938 else
6939 abort ();
6940 emit_move_insn (target, op1);
6943 emit_label (op0);
6944 return target;
6946 case BIT_NOT_EXPR:
6947 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6948 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6949 if (temp == 0)
6950 abort ();
6951 return temp;
6953 case FFS_EXPR:
6954 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6955 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6956 if (temp == 0)
6957 abort ();
6958 return temp;
6960 /* ??? Can optimize bitwise operations with one arg constant.
6961 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6962 and (a bitwise1 b) bitwise2 b (etc)
6963 but that is probably not worth while. */
6965 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6966 boolean values when we want in all cases to compute both of them. In
6967 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6968 as actual zero-or-1 values and then bitwise anding. In cases where
6969 there cannot be any side effects, better code would be made by
6970 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6971 how to recognize those cases. */
6973 case TRUTH_AND_EXPR:
6974 case BIT_AND_EXPR:
6975 this_optab = and_optab;
6976 goto binop;
6978 case TRUTH_OR_EXPR:
6979 case BIT_IOR_EXPR:
6980 this_optab = ior_optab;
6981 goto binop;
6983 case TRUTH_XOR_EXPR:
6984 case BIT_XOR_EXPR:
6985 this_optab = xor_optab;
6986 goto binop;
6988 case LSHIFT_EXPR:
6989 case RSHIFT_EXPR:
6990 case LROTATE_EXPR:
6991 case RROTATE_EXPR:
6992 preexpand_calls (exp);
6993 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6994 subtarget = 0;
6995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6996 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6997 unsignedp);
6999 /* Could determine the answer when only additive constants differ. Also,
7000 the addition of one can be handled by changing the condition. */
7001 case LT_EXPR:
7002 case LE_EXPR:
7003 case GT_EXPR:
7004 case GE_EXPR:
7005 case EQ_EXPR:
7006 case NE_EXPR:
7007 preexpand_calls (exp);
7008 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7009 if (temp != 0)
7010 return temp;
7012 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7013 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7014 && original_target
7015 && GET_CODE (original_target) == REG
7016 && (GET_MODE (original_target)
7017 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7019 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7020 VOIDmode, 0);
7022 if (temp != original_target)
7023 temp = copy_to_reg (temp);
7025 op1 = gen_label_rtx ();
7026 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
7027 GET_MODE (temp), unsignedp, 0);
7028 emit_jump_insn (gen_beq (op1));
7029 emit_move_insn (temp, const1_rtx);
7030 emit_label (op1);
7031 return temp;
7034 /* If no set-flag instruction, must generate a conditional
7035 store into a temporary variable. Drop through
7036 and handle this like && and ||. */
7038 case TRUTH_ANDIF_EXPR:
7039 case TRUTH_ORIF_EXPR:
7040 if (! ignore
7041 && (target == 0 || ! safe_from_p (target, exp, 1)
7042 /* Make sure we don't have a hard reg (such as function's return
7043 value) live across basic blocks, if not optimizing. */
7044 || (!optimize && GET_CODE (target) == REG
7045 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7046 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7048 if (target)
7049 emit_clr_insn (target);
7051 op1 = gen_label_rtx ();
7052 jumpifnot (exp, op1);
7054 if (target)
7055 emit_0_to_1_insn (target);
7057 emit_label (op1);
7058 return ignore ? const0_rtx : target;
7060 case TRUTH_NOT_EXPR:
7061 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7062 /* The parser is careful to generate TRUTH_NOT_EXPR
7063 only with operands that are always zero or one. */
7064 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7065 target, 1, OPTAB_LIB_WIDEN);
7066 if (temp == 0)
7067 abort ();
7068 return temp;
7070 case COMPOUND_EXPR:
7071 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7072 emit_queue ();
7073 return expand_expr (TREE_OPERAND (exp, 1),
7074 (ignore ? const0_rtx : target),
7075 VOIDmode, 0);
7077 case COND_EXPR:
7078 /* If we would have a "singleton" (see below) were it not for a
7079 conversion in each arm, bring that conversion back out. */
7080 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7081 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7082 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7083 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7085 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7086 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7088 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7089 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7090 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7091 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7092 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7093 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7094 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7095 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7096 return expand_expr (build1 (NOP_EXPR, type,
7097 build (COND_EXPR, TREE_TYPE (true),
7098 TREE_OPERAND (exp, 0),
7099 true, false)),
7100 target, tmode, modifier);
7104 /* Note that COND_EXPRs whose type is a structure or union
7105 are required to be constructed to contain assignments of
7106 a temporary variable, so that we can evaluate them here
7107 for side effect only. If type is void, we must do likewise. */
7109 /* If an arm of the branch requires a cleanup,
7110 only that cleanup is performed. */
7112 tree singleton = 0;
7113 tree binary_op = 0, unary_op = 0;
7115 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7116 convert it to our mode, if necessary. */
7117 if (integer_onep (TREE_OPERAND (exp, 1))
7118 && integer_zerop (TREE_OPERAND (exp, 2))
7119 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7121 if (ignore)
7123 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7124 ro_modifier);
7125 return const0_rtx;
7128 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7129 if (GET_MODE (op0) == mode)
7130 return op0;
7132 if (target == 0)
7133 target = gen_reg_rtx (mode);
7134 convert_move (target, op0, unsignedp);
7135 return target;
7138 /* Check for X ? A + B : A. If we have this, we can copy A to the
7139 output and conditionally add B. Similarly for unary operations.
7140 Don't do this if X has side-effects because those side effects
7141 might affect A or B and the "?" operation is a sequence point in
7142 ANSI. (operand_equal_p tests for side effects.) */
7144 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7145 && operand_equal_p (TREE_OPERAND (exp, 2),
7146 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7147 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7148 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7149 && operand_equal_p (TREE_OPERAND (exp, 1),
7150 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7151 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7152 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7153 && operand_equal_p (TREE_OPERAND (exp, 2),
7154 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7155 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7156 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7157 && operand_equal_p (TREE_OPERAND (exp, 1),
7158 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7159 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7161 /* If we are not to produce a result, we have no target. Otherwise,
7162 if a target was specified use it; it will not be used as an
7163 intermediate target unless it is safe. If no target, use a
7164 temporary. */
7166 if (ignore)
7167 temp = 0;
7168 else if (original_target
7169 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7170 || (singleton && GET_CODE (original_target) == REG
7171 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7172 && original_target == var_rtx (singleton)))
7173 && GET_MODE (original_target) == mode
7174 #ifdef HAVE_conditional_move
7175 && (! can_conditionally_move_p (mode)
7176 || GET_CODE (original_target) == REG
7177 || TREE_ADDRESSABLE (type))
7178 #endif
7179 && ! (GET_CODE (original_target) == MEM
7180 && MEM_VOLATILE_P (original_target)))
7181 temp = original_target;
7182 else if (TREE_ADDRESSABLE (type))
7183 abort ();
7184 else
7185 temp = assign_temp (type, 0, 0, 1);
7187 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7188 do the test of X as a store-flag operation, do this as
7189 A + ((X != 0) << log C). Similarly for other simple binary
7190 operators. Only do for C == 1 if BRANCH_COST is low. */
7191 if (temp && singleton && binary_op
7192 && (TREE_CODE (binary_op) == PLUS_EXPR
7193 || TREE_CODE (binary_op) == MINUS_EXPR
7194 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7195 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7196 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7197 : integer_onep (TREE_OPERAND (binary_op, 1)))
7198 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7200 rtx result;
7201 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7202 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7203 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7204 : xor_optab);
7206 /* If we had X ? A : A + 1, do this as A + (X == 0).
7208 We have to invert the truth value here and then put it
7209 back later if do_store_flag fails. We cannot simply copy
7210 TREE_OPERAND (exp, 0) to another variable and modify that
7211 because invert_truthvalue can modify the tree pointed to
7212 by its argument. */
7213 if (singleton == TREE_OPERAND (exp, 1))
7214 TREE_OPERAND (exp, 0)
7215 = invert_truthvalue (TREE_OPERAND (exp, 0));
7217 result = do_store_flag (TREE_OPERAND (exp, 0),
7218 (safe_from_p (temp, singleton, 1)
7219 ? temp : NULL_RTX),
7220 mode, BRANCH_COST <= 1);
7222 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7223 result = expand_shift (LSHIFT_EXPR, mode, result,
7224 build_int_2 (tree_log2
7225 (TREE_OPERAND
7226 (binary_op, 1)),
7228 (safe_from_p (temp, singleton, 1)
7229 ? temp : NULL_RTX), 0);
7231 if (result)
7233 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7234 return expand_binop (mode, boptab, op1, result, temp,
7235 unsignedp, OPTAB_LIB_WIDEN);
7237 else if (singleton == TREE_OPERAND (exp, 1))
7238 TREE_OPERAND (exp, 0)
7239 = invert_truthvalue (TREE_OPERAND (exp, 0));
7242 do_pending_stack_adjust ();
7243 NO_DEFER_POP;
7244 op0 = gen_label_rtx ();
7246 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7248 if (temp != 0)
7250 /* If the target conflicts with the other operand of the
7251 binary op, we can't use it. Also, we can't use the target
7252 if it is a hard register, because evaluating the condition
7253 might clobber it. */
7254 if ((binary_op
7255 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7256 || (GET_CODE (temp) == REG
7257 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7258 temp = gen_reg_rtx (mode);
7259 store_expr (singleton, temp, 0);
7261 else
7262 expand_expr (singleton,
7263 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7264 if (singleton == TREE_OPERAND (exp, 1))
7265 jumpif (TREE_OPERAND (exp, 0), op0);
7266 else
7267 jumpifnot (TREE_OPERAND (exp, 0), op0);
7269 start_cleanup_deferral ();
7270 if (binary_op && temp == 0)
7271 /* Just touch the other operand. */
7272 expand_expr (TREE_OPERAND (binary_op, 1),
7273 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7274 else if (binary_op)
7275 store_expr (build (TREE_CODE (binary_op), type,
7276 make_tree (type, temp),
7277 TREE_OPERAND (binary_op, 1)),
7278 temp, 0);
7279 else
7280 store_expr (build1 (TREE_CODE (unary_op), type,
7281 make_tree (type, temp)),
7282 temp, 0);
7283 op1 = op0;
7285 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7286 comparison operator. If we have one of these cases, set the
7287 output to A, branch on A (cse will merge these two references),
7288 then set the output to FOO. */
7289 else if (temp
7290 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7291 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7292 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7293 TREE_OPERAND (exp, 1), 0)
7294 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7295 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7296 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7298 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7299 temp = gen_reg_rtx (mode);
7300 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7301 jumpif (TREE_OPERAND (exp, 0), op0);
7303 start_cleanup_deferral ();
7304 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7305 op1 = op0;
7307 else if (temp
7308 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7309 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7310 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7311 TREE_OPERAND (exp, 2), 0)
7312 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7313 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7314 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7316 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7317 temp = gen_reg_rtx (mode);
7318 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7319 jumpifnot (TREE_OPERAND (exp, 0), op0);
7321 start_cleanup_deferral ();
7322 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7323 op1 = op0;
7325 else
7327 op1 = gen_label_rtx ();
7328 jumpifnot (TREE_OPERAND (exp, 0), op0);
7330 start_cleanup_deferral ();
7331 if (temp != 0)
7332 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7333 else
7334 expand_expr (TREE_OPERAND (exp, 1),
7335 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7336 end_cleanup_deferral ();
7337 emit_queue ();
7338 emit_jump_insn (gen_jump (op1));
7339 emit_barrier ();
7340 emit_label (op0);
7341 start_cleanup_deferral ();
7342 if (temp != 0)
7343 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7344 else
7345 expand_expr (TREE_OPERAND (exp, 2),
7346 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7349 end_cleanup_deferral ();
7351 emit_queue ();
7352 emit_label (op1);
7353 OK_DEFER_POP;
7355 return temp;
7358 case TARGET_EXPR:
7360 /* Something needs to be initialized, but we didn't know
7361 where that thing was when building the tree. For example,
7362 it could be the return value of a function, or a parameter
7363 to a function which lays down in the stack, or a temporary
7364 variable which must be passed by reference.
7366 We guarantee that the expression will either be constructed
7367 or copied into our original target. */
7369 tree slot = TREE_OPERAND (exp, 0);
7370 tree cleanups = NULL_TREE;
7371 tree exp1;
7373 if (TREE_CODE (slot) != VAR_DECL)
7374 abort ();
7376 if (! ignore)
7377 target = original_target;
7379 if (target == 0)
7381 if (DECL_RTL (slot) != 0)
7383 target = DECL_RTL (slot);
7384 /* If we have already expanded the slot, so don't do
7385 it again. (mrs) */
7386 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7387 return target;
7389 else
7391 target = assign_temp (type, 2, 0, 1);
7392 /* All temp slots at this level must not conflict. */
7393 preserve_temp_slots (target);
7394 DECL_RTL (slot) = target;
7395 if (TREE_ADDRESSABLE (slot))
7397 TREE_ADDRESSABLE (slot) = 0;
7398 mark_addressable (slot);
7401 /* Since SLOT is not known to the called function
7402 to belong to its stack frame, we must build an explicit
7403 cleanup. This case occurs when we must build up a reference
7404 to pass the reference as an argument. In this case,
7405 it is very likely that such a reference need not be
7406 built here. */
7408 if (TREE_OPERAND (exp, 2) == 0)
7409 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7410 cleanups = TREE_OPERAND (exp, 2);
7413 else
7415 /* This case does occur, when expanding a parameter which
7416 needs to be constructed on the stack. The target
7417 is the actual stack address that we want to initialize.
7418 The function we call will perform the cleanup in this case. */
7420 /* If we have already assigned it space, use that space,
7421 not target that we were passed in, as our target
7422 parameter is only a hint. */
7423 if (DECL_RTL (slot) != 0)
7425 target = DECL_RTL (slot);
7426 /* If we have already expanded the slot, so don't do
7427 it again. (mrs) */
7428 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7429 return target;
7431 else
7433 DECL_RTL (slot) = target;
7434 /* If we must have an addressable slot, then make sure that
7435 the RTL that we just stored in slot is OK. */
7436 if (TREE_ADDRESSABLE (slot))
7438 TREE_ADDRESSABLE (slot) = 0;
7439 mark_addressable (slot);
7444 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7445 /* Mark it as expanded. */
7446 TREE_OPERAND (exp, 1) = NULL_TREE;
7448 TREE_USED (slot) = 1;
7449 store_expr (exp1, target, 0);
7451 expand_decl_cleanup (NULL_TREE, cleanups);
7453 return target;
7456 case INIT_EXPR:
7458 tree lhs = TREE_OPERAND (exp, 0);
7459 tree rhs = TREE_OPERAND (exp, 1);
7460 tree noncopied_parts = 0;
7461 tree lhs_type = TREE_TYPE (lhs);
7463 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7464 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7465 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7466 TYPE_NONCOPIED_PARTS (lhs_type));
7467 while (noncopied_parts != 0)
7469 expand_assignment (TREE_VALUE (noncopied_parts),
7470 TREE_PURPOSE (noncopied_parts), 0, 0);
7471 noncopied_parts = TREE_CHAIN (noncopied_parts);
7473 return temp;
7476 case MODIFY_EXPR:
7478 /* If lhs is complex, expand calls in rhs before computing it.
7479 That's so we don't compute a pointer and save it over a call.
7480 If lhs is simple, compute it first so we can give it as a
7481 target if the rhs is just a call. This avoids an extra temp and copy
7482 and that prevents a partial-subsumption which makes bad code.
7483 Actually we could treat component_ref's of vars like vars. */
7485 tree lhs = TREE_OPERAND (exp, 0);
7486 tree rhs = TREE_OPERAND (exp, 1);
7487 tree noncopied_parts = 0;
7488 tree lhs_type = TREE_TYPE (lhs);
7490 temp = 0;
7492 if (TREE_CODE (lhs) != VAR_DECL
7493 && TREE_CODE (lhs) != RESULT_DECL
7494 && TREE_CODE (lhs) != PARM_DECL
7495 && ! (TREE_CODE (lhs) == INDIRECT_REF
7496 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7497 preexpand_calls (exp);
7499 /* Check for |= or &= of a bitfield of size one into another bitfield
7500 of size 1. In this case, (unless we need the result of the
7501 assignment) we can do this more efficiently with a
7502 test followed by an assignment, if necessary.
7504 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7505 things change so we do, this code should be enhanced to
7506 support it. */
7507 if (ignore
7508 && TREE_CODE (lhs) == COMPONENT_REF
7509 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7510 || TREE_CODE (rhs) == BIT_AND_EXPR)
7511 && TREE_OPERAND (rhs, 0) == lhs
7512 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7513 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7514 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7516 rtx label = gen_label_rtx ();
7518 do_jump (TREE_OPERAND (rhs, 1),
7519 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7520 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7521 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7522 (TREE_CODE (rhs) == BIT_IOR_EXPR
7523 ? integer_one_node
7524 : integer_zero_node)),
7525 0, 0);
7526 do_pending_stack_adjust ();
7527 emit_label (label);
7528 return const0_rtx;
7531 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7532 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7533 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7534 TYPE_NONCOPIED_PARTS (lhs_type));
7536 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7537 while (noncopied_parts != 0)
7539 expand_assignment (TREE_PURPOSE (noncopied_parts),
7540 TREE_VALUE (noncopied_parts), 0, 0);
7541 noncopied_parts = TREE_CHAIN (noncopied_parts);
7543 return temp;
7546 case RETURN_EXPR:
7547 if (!TREE_OPERAND (exp, 0))
7548 expand_null_return ();
7549 else
7550 expand_return (TREE_OPERAND (exp, 0));
7551 return const0_rtx;
7553 case PREINCREMENT_EXPR:
7554 case PREDECREMENT_EXPR:
7555 return expand_increment (exp, 0, ignore);
7557 case POSTINCREMENT_EXPR:
7558 case POSTDECREMENT_EXPR:
7559 /* Faster to treat as pre-increment if result is not used. */
7560 return expand_increment (exp, ! ignore, ignore);
7562 case ADDR_EXPR:
7563 /* If nonzero, TEMP will be set to the address of something that might
7564 be a MEM corresponding to a stack slot. */
7565 temp = 0;
7567 /* Are we taking the address of a nested function? */
7568 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7569 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7570 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7571 && ! TREE_STATIC (exp))
7573 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7574 op0 = force_operand (op0, target);
7576 /* If we are taking the address of something erroneous, just
7577 return a zero. */
7578 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7579 return const0_rtx;
7580 else
7582 /* We make sure to pass const0_rtx down if we came in with
7583 ignore set, to avoid doing the cleanups twice for something. */
7584 op0 = expand_expr (TREE_OPERAND (exp, 0),
7585 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7586 (modifier == EXPAND_INITIALIZER
7587 ? modifier : EXPAND_CONST_ADDRESS));
7589 /* If we are going to ignore the result, OP0 will have been set
7590 to const0_rtx, so just return it. Don't get confused and
7591 think we are taking the address of the constant. */
7592 if (ignore)
7593 return op0;
7595 op0 = protect_from_queue (op0, 0);
7597 /* We would like the object in memory. If it is a constant,
7598 we can have it be statically allocated into memory. For
7599 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7600 memory and store the value into it. */
7602 if (CONSTANT_P (op0))
7603 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7604 op0);
7605 else if (GET_CODE (op0) == MEM)
7607 mark_temp_addr_taken (op0);
7608 temp = XEXP (op0, 0);
7611 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7612 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7614 /* If this object is in a register, it must be not
7615 be BLKmode. */
7616 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7617 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7619 mark_temp_addr_taken (memloc);
7620 emit_move_insn (memloc, op0);
7621 op0 = memloc;
7624 if (GET_CODE (op0) != MEM)
7625 abort ();
7627 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7629 temp = XEXP (op0, 0);
7630 #ifdef POINTERS_EXTEND_UNSIGNED
7631 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7632 && mode == ptr_mode)
7633 temp = convert_memory_address (ptr_mode, temp);
7634 #endif
7635 return temp;
7638 op0 = force_operand (XEXP (op0, 0), target);
7641 if (flag_force_addr && GET_CODE (op0) != REG)
7642 op0 = force_reg (Pmode, op0);
7644 if (GET_CODE (op0) == REG
7645 && ! REG_USERVAR_P (op0))
7646 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7648 /* If we might have had a temp slot, add an equivalent address
7649 for it. */
7650 if (temp != 0)
7651 update_temp_slot_address (temp, op0);
7653 #ifdef POINTERS_EXTEND_UNSIGNED
7654 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7655 && mode == ptr_mode)
7656 op0 = convert_memory_address (ptr_mode, op0);
7657 #endif
7659 return op0;
7661 case ENTRY_VALUE_EXPR:
7662 abort ();
7664 /* COMPLEX type for Extended Pascal & Fortran */
7665 case COMPLEX_EXPR:
7667 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7668 rtx insns;
7670 /* Get the rtx code of the operands. */
7671 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7672 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7674 if (! target)
7675 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7677 start_sequence ();
7679 /* Move the real (op0) and imaginary (op1) parts to their location. */
7680 emit_move_insn (gen_realpart (mode, target), op0);
7681 emit_move_insn (gen_imagpart (mode, target), op1);
7683 insns = get_insns ();
7684 end_sequence ();
7686 /* Complex construction should appear as a single unit. */
7687 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7688 each with a separate pseudo as destination.
7689 It's not correct for flow to treat them as a unit. */
7690 if (GET_CODE (target) != CONCAT)
7691 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7692 else
7693 emit_insns (insns);
7695 return target;
7698 case REALPART_EXPR:
7699 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7700 return gen_realpart (mode, op0);
7702 case IMAGPART_EXPR:
7703 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7704 return gen_imagpart (mode, op0);
7706 case CONJ_EXPR:
7708 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7709 rtx imag_t;
7710 rtx insns;
7712 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7714 if (! target)
7715 target = gen_reg_rtx (mode);
7717 start_sequence ();
7719 /* Store the realpart and the negated imagpart to target. */
7720 emit_move_insn (gen_realpart (partmode, target),
7721 gen_realpart (partmode, op0));
7723 imag_t = gen_imagpart (partmode, target);
7724 temp = expand_unop (partmode, neg_optab,
7725 gen_imagpart (partmode, op0), imag_t, 0);
7726 if (temp != imag_t)
7727 emit_move_insn (imag_t, temp);
7729 insns = get_insns ();
7730 end_sequence ();
7732 /* Conjugate should appear as a single unit
7733 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7734 each with a separate pseudo as destination.
7735 It's not correct for flow to treat them as a unit. */
7736 if (GET_CODE (target) != CONCAT)
7737 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7738 else
7739 emit_insns (insns);
7741 return target;
7744 case TRY_CATCH_EXPR:
7746 tree handler = TREE_OPERAND (exp, 1);
7748 expand_eh_region_start ();
7750 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7752 expand_eh_region_end (handler);
7754 return op0;
7757 case POPDCC_EXPR:
7759 rtx dcc = get_dynamic_cleanup_chain ();
7760 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
7761 return const0_rtx;
7764 case POPDHC_EXPR:
7766 rtx dhc = get_dynamic_handler_chain ();
7767 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
7768 return const0_rtx;
7771 case ERROR_MARK:
7772 op0 = CONST0_RTX (tmode);
7773 if (op0 != 0)
7774 return op0;
7775 return const0_rtx;
7777 default:
7778 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7781 /* Here to do an ordinary binary operator, generating an instruction
7782 from the optab already placed in `this_optab'. */
7783 binop:
7784 preexpand_calls (exp);
7785 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7786 subtarget = 0;
7787 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7788 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7789 binop2:
7790 temp = expand_binop (mode, this_optab, op0, op1, target,
7791 unsignedp, OPTAB_LIB_WIDEN);
7792 if (temp == 0)
7793 abort ();
7794 return temp;
7799 /* Return the alignment in bits of EXP, a pointer valued expression.
7800 But don't return more than MAX_ALIGN no matter what.
7801 The alignment returned is, by default, the alignment of the thing that
7802 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7804 Otherwise, look at the expression to see if we can do better, i.e., if the
7805 expression is actually pointing at an object whose alignment is tighter. */
7807 static int
7808 get_pointer_alignment (exp, max_align)
7809 tree exp;
7810 unsigned max_align;
7812 unsigned align, inner;
7814 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7815 return 0;
7817 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7818 align = MIN (align, max_align);
7820 while (1)
7822 switch (TREE_CODE (exp))
7824 case NOP_EXPR:
7825 case CONVERT_EXPR:
7826 case NON_LVALUE_EXPR:
7827 exp = TREE_OPERAND (exp, 0);
7828 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7829 return align;
7830 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7831 align = MIN (inner, max_align);
7832 break;
7834 case PLUS_EXPR:
7835 /* If sum of pointer + int, restrict our maximum alignment to that
7836 imposed by the integer. If not, we can't do any better than
7837 ALIGN. */
7838 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7839 return align;
7841 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7842 & (max_align - 1))
7843 != 0)
7844 max_align >>= 1;
7846 exp = TREE_OPERAND (exp, 0);
7847 break;
7849 case ADDR_EXPR:
7850 /* See what we are pointing at and look at its alignment. */
7851 exp = TREE_OPERAND (exp, 0);
7852 if (TREE_CODE (exp) == FUNCTION_DECL)
7853 align = FUNCTION_BOUNDARY;
7854 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7855 align = DECL_ALIGN (exp);
7856 #ifdef CONSTANT_ALIGNMENT
7857 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7858 align = CONSTANT_ALIGNMENT (exp, align);
7859 #endif
7860 return MIN (align, max_align);
7862 default:
7863 return align;
7868 /* Return the tree node and offset if a given argument corresponds to
7869 a string constant. */
7871 static tree
7872 string_constant (arg, ptr_offset)
7873 tree arg;
7874 tree *ptr_offset;
7876 STRIP_NOPS (arg);
7878 if (TREE_CODE (arg) == ADDR_EXPR
7879 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7881 *ptr_offset = integer_zero_node;
7882 return TREE_OPERAND (arg, 0);
7884 else if (TREE_CODE (arg) == PLUS_EXPR)
7886 tree arg0 = TREE_OPERAND (arg, 0);
7887 tree arg1 = TREE_OPERAND (arg, 1);
7889 STRIP_NOPS (arg0);
7890 STRIP_NOPS (arg1);
7892 if (TREE_CODE (arg0) == ADDR_EXPR
7893 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7895 *ptr_offset = arg1;
7896 return TREE_OPERAND (arg0, 0);
7898 else if (TREE_CODE (arg1) == ADDR_EXPR
7899 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7901 *ptr_offset = arg0;
7902 return TREE_OPERAND (arg1, 0);
7906 return 0;
7909 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7910 way, because it could contain a zero byte in the middle.
7911 TREE_STRING_LENGTH is the size of the character array, not the string.
7913 Unfortunately, string_constant can't access the values of const char
7914 arrays with initializers, so neither can we do so here. */
7916 static tree
7917 c_strlen (src)
7918 tree src;
7920 tree offset_node;
7921 int offset, max;
7922 char *ptr;
7924 src = string_constant (src, &offset_node);
7925 if (src == 0)
7926 return 0;
7927 max = TREE_STRING_LENGTH (src);
7928 ptr = TREE_STRING_POINTER (src);
7929 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7931 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7932 compute the offset to the following null if we don't know where to
7933 start searching for it. */
7934 int i;
7935 for (i = 0; i < max; i++)
7936 if (ptr[i] == 0)
7937 return 0;
7938 /* We don't know the starting offset, but we do know that the string
7939 has no internal zero bytes. We can assume that the offset falls
7940 within the bounds of the string; otherwise, the programmer deserves
7941 what he gets. Subtract the offset from the length of the string,
7942 and return that. */
7943 /* This would perhaps not be valid if we were dealing with named
7944 arrays in addition to literal string constants. */
7945 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7948 /* We have a known offset into the string. Start searching there for
7949 a null character. */
7950 if (offset_node == 0)
7951 offset = 0;
7952 else
7954 /* Did we get a long long offset? If so, punt. */
7955 if (TREE_INT_CST_HIGH (offset_node) != 0)
7956 return 0;
7957 offset = TREE_INT_CST_LOW (offset_node);
7959 /* If the offset is known to be out of bounds, warn, and call strlen at
7960 runtime. */
7961 if (offset < 0 || offset > max)
7963 warning ("offset outside bounds of constant string");
7964 return 0;
7966 /* Use strlen to search for the first zero byte. Since any strings
7967 constructed with build_string will have nulls appended, we win even
7968 if we get handed something like (char[4])"abcd".
7970 Since OFFSET is our starting index into the string, no further
7971 calculation is needed. */
7972 return size_int (strlen (ptr + offset));
7976 expand_builtin_return_addr (fndecl_code, count, tem)
7977 enum built_in_function fndecl_code;
7978 int count;
7979 rtx tem;
7981 int i;
7983 /* Some machines need special handling before we can access
7984 arbitrary frames. For example, on the sparc, we must first flush
7985 all register windows to the stack. */
7986 #ifdef SETUP_FRAME_ADDRESSES
7987 if (count > 0)
7988 SETUP_FRAME_ADDRESSES ();
7989 #endif
7991 /* On the sparc, the return address is not in the frame, it is in a
7992 register. There is no way to access it off of the current frame
7993 pointer, but it can be accessed off the previous frame pointer by
7994 reading the value from the register window save area. */
7995 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7996 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7997 count--;
7998 #endif
8000 /* Scan back COUNT frames to the specified frame. */
8001 for (i = 0; i < count; i++)
8003 /* Assume the dynamic chain pointer is in the word that the
8004 frame address points to, unless otherwise specified. */
8005 #ifdef DYNAMIC_CHAIN_ADDRESS
8006 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8007 #endif
8008 tem = memory_address (Pmode, tem);
8009 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8012 /* For __builtin_frame_address, return what we've got. */
8013 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8014 return tem;
8016 /* For __builtin_return_address, Get the return address from that
8017 frame. */
8018 #ifdef RETURN_ADDR_RTX
8019 tem = RETURN_ADDR_RTX (count, tem);
8020 #else
8021 tem = memory_address (Pmode,
8022 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8023 tem = gen_rtx_MEM (Pmode, tem);
8024 #endif
8025 return tem;
8028 /* __builtin_setjmp is passed a pointer to an array of five words (not
8029 all will be used on all machines). It operates similarly to the C
8030 library function of the same name, but is more efficient. Much of
8031 the code below (and for longjmp) is copied from the handling of
8032 non-local gotos.
8034 NOTE: This is intended for use by GNAT and the exception handling
8035 scheme in the compiler and will only work in the method used by
8036 them. */
8039 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8040 rtx buf_addr;
8041 rtx target;
8042 rtx first_label, next_label;
8044 rtx lab1 = gen_label_rtx ();
8045 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8046 enum machine_mode value_mode;
8047 rtx stack_save;
8049 value_mode = TYPE_MODE (integer_type_node);
8051 #ifdef POINTERS_EXTEND_UNSIGNED
8052 buf_addr = convert_memory_address (Pmode, buf_addr);
8053 #endif
8055 buf_addr = force_reg (Pmode, buf_addr);
8057 if (target == 0 || GET_CODE (target) != REG
8058 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8059 target = gen_reg_rtx (value_mode);
8061 emit_queue ();
8063 /* We store the frame pointer and the address of lab1 in the buffer
8064 and use the rest of it for the stack save area, which is
8065 machine-dependent. */
8067 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8068 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8069 #endif
8071 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8072 BUILTIN_SETJMP_FRAME_VALUE);
8073 emit_move_insn (validize_mem
8074 (gen_rtx_MEM (Pmode,
8075 plus_constant (buf_addr,
8076 GET_MODE_SIZE (Pmode)))),
8077 gen_rtx_LABEL_REF (Pmode, lab1));
8079 stack_save = gen_rtx_MEM (sa_mode,
8080 plus_constant (buf_addr,
8081 2 * GET_MODE_SIZE (Pmode)));
8082 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8084 /* If there is further processing to do, do it. */
8085 #ifdef HAVE_builtin_setjmp_setup
8086 if (HAVE_builtin_setjmp_setup)
8087 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8088 #endif
8090 /* Set TARGET to zero and branch to the first-time-through label. */
8091 emit_move_insn (target, const0_rtx);
8092 emit_jump_insn (gen_jump (first_label));
8093 emit_barrier ();
8094 emit_label (lab1);
8096 /* Tell flow about the strange goings on. */
8097 current_function_has_nonlocal_label = 1;
8099 /* Clobber the FP when we get here, so we have to make sure it's
8100 marked as used by this function. */
8101 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8103 /* Mark the static chain as clobbered here so life information
8104 doesn't get messed up for it. */
8105 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8107 /* Now put in the code to restore the frame pointer, and argument
8108 pointer, if needed. The code below is from expand_end_bindings
8109 in stmt.c; see detailed documentation there. */
8110 #ifdef HAVE_nonlocal_goto
8111 if (! HAVE_nonlocal_goto)
8112 #endif
8113 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8115 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8116 if (fixed_regs[ARG_POINTER_REGNUM])
8118 #ifdef ELIMINABLE_REGS
8119 int i;
8120 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8122 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8123 if (elim_regs[i].from == ARG_POINTER_REGNUM
8124 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8125 break;
8127 if (i == sizeof elim_regs / sizeof elim_regs [0])
8128 #endif
8130 /* Now restore our arg pointer from the address at which it
8131 was saved in our stack frame.
8132 If there hasn't be space allocated for it yet, make
8133 some now. */
8134 if (arg_pointer_save_area == 0)
8135 arg_pointer_save_area
8136 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8137 emit_move_insn (virtual_incoming_args_rtx,
8138 copy_to_reg (arg_pointer_save_area));
8141 #endif
8143 #ifdef HAVE_builtin_setjmp_receiver
8144 if (HAVE_builtin_setjmp_receiver)
8145 emit_insn (gen_builtin_setjmp_receiver (lab1));
8146 else
8147 #endif
8148 #ifdef HAVE_nonlocal_goto_receiver
8149 if (HAVE_nonlocal_goto_receiver)
8150 emit_insn (gen_nonlocal_goto_receiver ());
8151 else
8152 #endif
8154 ; /* Nothing */
8157 /* Set TARGET, and branch to the next-time-through label. */
8158 emit_move_insn (target, const1_rtx);
8159 emit_jump_insn (gen_jump (next_label));
8160 emit_barrier ();
8162 return target;
8165 void
8166 expand_builtin_longjmp (buf_addr, value)
8167 rtx buf_addr, value;
8169 rtx fp, lab, stack;
8170 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8172 #ifdef POINTERS_EXTEND_UNSIGNED
8173 buf_addr = convert_memory_address (Pmode, buf_addr);
8174 #endif
8175 buf_addr = force_reg (Pmode, buf_addr);
8177 /* We used to store value in static_chain_rtx, but that fails if pointers
8178 are smaller than integers. We instead require that the user must pass
8179 a second argument of 1, because that is what builtin_setjmp will
8180 return. This also makes EH slightly more efficient, since we are no
8181 longer copying around a value that we don't care about. */
8182 if (value != const1_rtx)
8183 abort ();
8185 #ifdef HAVE_builtin_longjmp
8186 if (HAVE_builtin_longjmp)
8187 emit_insn (gen_builtin_longjmp (buf_addr));
8188 else
8189 #endif
8191 fp = gen_rtx_MEM (Pmode, buf_addr);
8192 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8193 GET_MODE_SIZE (Pmode)));
8195 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8196 2 * GET_MODE_SIZE (Pmode)));
8198 /* Pick up FP, label, and SP from the block and jump. This code is
8199 from expand_goto in stmt.c; see there for detailed comments. */
8200 #if HAVE_nonlocal_goto
8201 if (HAVE_nonlocal_goto)
8202 /* We have to pass a value to the nonlocal_goto pattern that will
8203 get copied into the static_chain pointer, but it does not matter
8204 what that value is, because builtin_setjmp does not use it. */
8205 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8206 else
8207 #endif
8209 lab = copy_to_reg (lab);
8211 emit_move_insn (hard_frame_pointer_rtx, fp);
8212 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8214 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8215 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8216 emit_indirect_jump (lab);
8221 static rtx
8222 get_memory_rtx (exp)
8223 tree exp;
8225 rtx mem;
8226 int is_aggregate;
8228 mem = gen_rtx_MEM (BLKmode,
8229 memory_address (BLKmode,
8230 expand_expr (exp, NULL_RTX,
8231 ptr_mode, EXPAND_SUM)));
8233 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8235 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8236 if the value is the address of a structure or if the expression is
8237 cast to a pointer to structure type. */
8238 is_aggregate = 0;
8240 while (TREE_CODE (exp) == NOP_EXPR)
8242 tree cast_type = TREE_TYPE (exp);
8243 if (TREE_CODE (cast_type) == POINTER_TYPE
8244 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8246 is_aggregate = 1;
8247 break;
8249 exp = TREE_OPERAND (exp, 0);
8252 if (is_aggregate == 0)
8254 tree type;
8256 if (TREE_CODE (exp) == ADDR_EXPR)
8257 /* If this is the address of an object, check whether the
8258 object is an array. */
8259 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8260 else
8261 type = TREE_TYPE (TREE_TYPE (exp));
8262 is_aggregate = AGGREGATE_TYPE_P (type);
8265 MEM_IN_STRUCT_P (mem) = is_aggregate;
8266 return mem;
8270 /* Expand an expression EXP that calls a built-in function,
8271 with result going to TARGET if that's convenient
8272 (and in mode MODE if that's convenient).
8273 SUBTARGET may be used as the target for computing one of EXP's operands.
8274 IGNORE is nonzero if the value is to be ignored. */
8276 #define CALLED_AS_BUILT_IN(NODE) \
8277 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8279 static rtx
8280 expand_builtin (exp, target, subtarget, mode, ignore)
8281 tree exp;
8282 rtx target;
8283 rtx subtarget;
8284 enum machine_mode mode;
8285 int ignore;
8287 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8288 tree arglist = TREE_OPERAND (exp, 1);
8289 rtx op0;
8290 rtx lab1, insns;
8291 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8292 optab builtin_optab;
8294 switch (DECL_FUNCTION_CODE (fndecl))
8296 case BUILT_IN_ABS:
8297 case BUILT_IN_LABS:
8298 case BUILT_IN_FABS:
8299 /* build_function_call changes these into ABS_EXPR. */
8300 abort ();
8302 case BUILT_IN_SIN:
8303 case BUILT_IN_COS:
8304 /* Treat these like sqrt, but only if the user asks for them. */
8305 if (! flag_fast_math)
8306 break;
8307 case BUILT_IN_FSQRT:
8308 /* If not optimizing, call the library function. */
8309 if (! optimize)
8310 break;
8312 if (arglist == 0
8313 /* Arg could be wrong type if user redeclared this fcn wrong. */
8314 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8315 break;
8317 /* Stabilize and compute the argument. */
8318 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8319 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8321 exp = copy_node (exp);
8322 arglist = copy_node (arglist);
8323 TREE_OPERAND (exp, 1) = arglist;
8324 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8326 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8328 /* Make a suitable register to place result in. */
8329 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8331 emit_queue ();
8332 start_sequence ();
8334 switch (DECL_FUNCTION_CODE (fndecl))
8336 case BUILT_IN_SIN:
8337 builtin_optab = sin_optab; break;
8338 case BUILT_IN_COS:
8339 builtin_optab = cos_optab; break;
8340 case BUILT_IN_FSQRT:
8341 builtin_optab = sqrt_optab; break;
8342 default:
8343 abort ();
8346 /* Compute into TARGET.
8347 Set TARGET to wherever the result comes back. */
8348 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8349 builtin_optab, op0, target, 0);
8351 /* If we were unable to expand via the builtin, stop the
8352 sequence (without outputting the insns) and break, causing
8353 a call to the library function. */
8354 if (target == 0)
8356 end_sequence ();
8357 break;
8360 /* Check the results by default. But if flag_fast_math is turned on,
8361 then assume sqrt will always be called with valid arguments. */
8363 if (! flag_fast_math)
8365 /* Don't define the builtin FP instructions
8366 if your machine is not IEEE. */
8367 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8368 abort ();
8370 lab1 = gen_label_rtx ();
8372 /* Test the result; if it is NaN, set errno=EDOM because
8373 the argument was not in the domain. */
8374 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8375 emit_jump_insn (gen_beq (lab1));
8377 #ifdef TARGET_EDOM
8379 #ifdef GEN_ERRNO_RTX
8380 rtx errno_rtx = GEN_ERRNO_RTX;
8381 #else
8382 rtx errno_rtx
8383 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8384 #endif
8386 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8388 #else
8389 /* We can't set errno=EDOM directly; let the library call do it.
8390 Pop the arguments right away in case the call gets deleted. */
8391 NO_DEFER_POP;
8392 expand_call (exp, target, 0);
8393 OK_DEFER_POP;
8394 #endif
8396 emit_label (lab1);
8399 /* Output the entire sequence. */
8400 insns = get_insns ();
8401 end_sequence ();
8402 emit_insns (insns);
8404 return target;
8406 case BUILT_IN_FMOD:
8407 break;
8409 /* __builtin_apply_args returns block of memory allocated on
8410 the stack into which is stored the arg pointer, structure
8411 value address, static chain, and all the registers that might
8412 possibly be used in performing a function call. The code is
8413 moved to the start of the function so the incoming values are
8414 saved. */
8415 case BUILT_IN_APPLY_ARGS:
8416 /* Don't do __builtin_apply_args more than once in a function.
8417 Save the result of the first call and reuse it. */
8418 if (apply_args_value != 0)
8419 return apply_args_value;
8421 /* When this function is called, it means that registers must be
8422 saved on entry to this function. So we migrate the
8423 call to the first insn of this function. */
8424 rtx temp;
8425 rtx seq;
8427 start_sequence ();
8428 temp = expand_builtin_apply_args ();
8429 seq = get_insns ();
8430 end_sequence ();
8432 apply_args_value = temp;
8434 /* Put the sequence after the NOTE that starts the function.
8435 If this is inside a SEQUENCE, make the outer-level insn
8436 chain current, so the code is placed at the start of the
8437 function. */
8438 push_topmost_sequence ();
8439 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8440 pop_topmost_sequence ();
8441 return temp;
8444 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8445 FUNCTION with a copy of the parameters described by
8446 ARGUMENTS, and ARGSIZE. It returns a block of memory
8447 allocated on the stack into which is stored all the registers
8448 that might possibly be used for returning the result of a
8449 function. ARGUMENTS is the value returned by
8450 __builtin_apply_args. ARGSIZE is the number of bytes of
8451 arguments that must be copied. ??? How should this value be
8452 computed? We'll also need a safe worst case value for varargs
8453 functions. */
8454 case BUILT_IN_APPLY:
8455 if (arglist == 0
8456 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8457 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8458 || TREE_CHAIN (arglist) == 0
8459 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8460 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8461 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8462 return const0_rtx;
8463 else
8465 int i;
8466 tree t;
8467 rtx ops[3];
8469 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8470 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8472 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8475 /* __builtin_return (RESULT) causes the function to return the
8476 value described by RESULT. RESULT is address of the block of
8477 memory returned by __builtin_apply. */
8478 case BUILT_IN_RETURN:
8479 if (arglist
8480 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8481 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8482 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8483 NULL_RTX, VOIDmode, 0));
8484 return const0_rtx;
8486 case BUILT_IN_SAVEREGS:
8487 /* Don't do __builtin_saveregs more than once in a function.
8488 Save the result of the first call and reuse it. */
8489 if (saveregs_value != 0)
8490 return saveregs_value;
8492 /* When this function is called, it means that registers must be
8493 saved on entry to this function. So we migrate the
8494 call to the first insn of this function. */
8495 rtx temp;
8496 rtx seq;
8498 /* Now really call the function. `expand_call' does not call
8499 expand_builtin, so there is no danger of infinite recursion here. */
8500 start_sequence ();
8502 #ifdef EXPAND_BUILTIN_SAVEREGS
8503 /* Do whatever the machine needs done in this case. */
8504 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8505 #else
8506 /* The register where the function returns its value
8507 is likely to have something else in it, such as an argument.
8508 So preserve that register around the call. */
8510 if (value_mode != VOIDmode)
8512 rtx valreg = hard_libcall_value (value_mode);
8513 rtx saved_valreg = gen_reg_rtx (value_mode);
8515 emit_move_insn (saved_valreg, valreg);
8516 temp = expand_call (exp, target, ignore);
8517 emit_move_insn (valreg, saved_valreg);
8519 else
8520 /* Generate the call, putting the value in a pseudo. */
8521 temp = expand_call (exp, target, ignore);
8522 #endif
8524 seq = get_insns ();
8525 end_sequence ();
8527 saveregs_value = temp;
8529 /* Put the sequence after the NOTE that starts the function.
8530 If this is inside a SEQUENCE, make the outer-level insn
8531 chain current, so the code is placed at the start of the
8532 function. */
8533 push_topmost_sequence ();
8534 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8535 pop_topmost_sequence ();
8536 return temp;
8539 /* __builtin_args_info (N) returns word N of the arg space info
8540 for the current function. The number and meanings of words
8541 is controlled by the definition of CUMULATIVE_ARGS. */
8542 case BUILT_IN_ARGS_INFO:
8544 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8545 int *word_ptr = (int *) &current_function_args_info;
8546 #if 0
8547 /* These are used by the code below that is if 0'ed away */
8548 int i;
8549 tree type, elts, result;
8550 #endif
8552 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8553 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8554 __FILE__, __LINE__);
8556 if (arglist != 0)
8558 tree arg = TREE_VALUE (arglist);
8559 if (TREE_CODE (arg) != INTEGER_CST)
8560 error ("argument of `__builtin_args_info' must be constant");
8561 else
8563 int wordnum = TREE_INT_CST_LOW (arg);
8565 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8566 error ("argument of `__builtin_args_info' out of range");
8567 else
8568 return GEN_INT (word_ptr[wordnum]);
8571 else
8572 error ("missing argument in `__builtin_args_info'");
8574 return const0_rtx;
8576 #if 0
8577 for (i = 0; i < nwords; i++)
8578 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8580 type = build_array_type (integer_type_node,
8581 build_index_type (build_int_2 (nwords, 0)));
8582 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8583 TREE_CONSTANT (result) = 1;
8584 TREE_STATIC (result) = 1;
8585 result = build (INDIRECT_REF, build_pointer_type (type), result);
8586 TREE_CONSTANT (result) = 1;
8587 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8588 #endif
8591 /* Return the address of the first anonymous stack arg. */
8592 case BUILT_IN_NEXT_ARG:
8594 tree fntype = TREE_TYPE (current_function_decl);
8596 if ((TYPE_ARG_TYPES (fntype) == 0
8597 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8598 == void_type_node))
8599 && ! current_function_varargs)
8601 error ("`va_start' used in function with fixed args");
8602 return const0_rtx;
8605 if (arglist)
8607 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8608 tree arg = TREE_VALUE (arglist);
8610 /* Strip off all nops for the sake of the comparison. This
8611 is not quite the same as STRIP_NOPS. It does more.
8612 We must also strip off INDIRECT_EXPR for C++ reference
8613 parameters. */
8614 while (TREE_CODE (arg) == NOP_EXPR
8615 || TREE_CODE (arg) == CONVERT_EXPR
8616 || TREE_CODE (arg) == NON_LVALUE_EXPR
8617 || TREE_CODE (arg) == INDIRECT_REF)
8618 arg = TREE_OPERAND (arg, 0);
8619 if (arg != last_parm)
8620 warning ("second parameter of `va_start' not last named argument");
8622 else if (! current_function_varargs)
8623 /* Evidently an out of date version of <stdarg.h>; can't validate
8624 va_start's second argument, but can still work as intended. */
8625 warning ("`__builtin_next_arg' called without an argument");
8628 return expand_binop (Pmode, add_optab,
8629 current_function_internal_arg_pointer,
8630 current_function_arg_offset_rtx,
8631 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8633 case BUILT_IN_CLASSIFY_TYPE:
8634 if (arglist != 0)
8636 tree type = TREE_TYPE (TREE_VALUE (arglist));
8637 enum tree_code code = TREE_CODE (type);
8638 if (code == VOID_TYPE)
8639 return GEN_INT (void_type_class);
8640 if (code == INTEGER_TYPE)
8641 return GEN_INT (integer_type_class);
8642 if (code == CHAR_TYPE)
8643 return GEN_INT (char_type_class);
8644 if (code == ENUMERAL_TYPE)
8645 return GEN_INT (enumeral_type_class);
8646 if (code == BOOLEAN_TYPE)
8647 return GEN_INT (boolean_type_class);
8648 if (code == POINTER_TYPE)
8649 return GEN_INT (pointer_type_class);
8650 if (code == REFERENCE_TYPE)
8651 return GEN_INT (reference_type_class);
8652 if (code == OFFSET_TYPE)
8653 return GEN_INT (offset_type_class);
8654 if (code == REAL_TYPE)
8655 return GEN_INT (real_type_class);
8656 if (code == COMPLEX_TYPE)
8657 return GEN_INT (complex_type_class);
8658 if (code == FUNCTION_TYPE)
8659 return GEN_INT (function_type_class);
8660 if (code == METHOD_TYPE)
8661 return GEN_INT (method_type_class);
8662 if (code == RECORD_TYPE)
8663 return GEN_INT (record_type_class);
8664 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8665 return GEN_INT (union_type_class);
8666 if (code == ARRAY_TYPE)
8668 if (TYPE_STRING_FLAG (type))
8669 return GEN_INT (string_type_class);
8670 else
8671 return GEN_INT (array_type_class);
8673 if (code == SET_TYPE)
8674 return GEN_INT (set_type_class);
8675 if (code == FILE_TYPE)
8676 return GEN_INT (file_type_class);
8677 if (code == LANG_TYPE)
8678 return GEN_INT (lang_type_class);
8680 return GEN_INT (no_type_class);
8682 case BUILT_IN_CONSTANT_P:
8683 if (arglist == 0)
8684 return const0_rtx;
8685 else
8687 tree arg = TREE_VALUE (arglist);
8689 STRIP_NOPS (arg);
8690 if (really_constant_p (arg)
8691 || (TREE_CODE (arg) == ADDR_EXPR
8692 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
8693 return const1_rtx;
8695 /* Only emit CONSTANT_P_RTX if CSE will be run.
8696 Moreover, we don't want to expand trees that have side effects,
8697 as the original __builtin_constant_p did not evaluate its
8698 argument at all, and we would break existing usage by changing
8699 this. This quirk was generally useful, eliminating a bit of hair
8700 in the writing of the macros that use this function. Now the
8701 same thing can be better accomplished in an inline function. */
8703 if (! cse_not_expected && ! TREE_SIDE_EFFECTS (arg))
8705 /* Lazy fixup of old code: issue a warning and fail the test. */
8706 if (! can_handle_constant_p)
8708 warning ("Delayed evaluation of __builtin_constant_p not supported on this target.");
8709 warning ("Please report this as a bug to egcs-bugs@cygnus.com.");
8710 return const0_rtx;
8712 return gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
8713 expand_expr (arg, NULL_RTX,
8714 VOIDmode, 0));
8717 return const0_rtx;
8720 case BUILT_IN_FRAME_ADDRESS:
8721 /* The argument must be a nonnegative integer constant.
8722 It counts the number of frames to scan up the stack.
8723 The value is the address of that frame. */
8724 case BUILT_IN_RETURN_ADDRESS:
8725 /* The argument must be a nonnegative integer constant.
8726 It counts the number of frames to scan up the stack.
8727 The value is the return address saved in that frame. */
8728 if (arglist == 0)
8729 /* Warning about missing arg was already issued. */
8730 return const0_rtx;
8731 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8732 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8734 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8735 error ("invalid arg to `__builtin_frame_address'");
8736 else
8737 error ("invalid arg to `__builtin_return_address'");
8738 return const0_rtx;
8740 else
8742 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8743 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8744 hard_frame_pointer_rtx);
8746 /* Some ports cannot access arbitrary stack frames. */
8747 if (tem == NULL)
8749 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8750 warning ("unsupported arg to `__builtin_frame_address'");
8751 else
8752 warning ("unsupported arg to `__builtin_return_address'");
8753 return const0_rtx;
8756 /* For __builtin_frame_address, return what we've got. */
8757 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8758 return tem;
8760 if (GET_CODE (tem) != REG)
8761 tem = copy_to_reg (tem);
8762 return tem;
8765 /* Returns the address of the area where the structure is returned.
8766 0 otherwise. */
8767 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8768 if (arglist != 0
8769 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8770 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8771 return const0_rtx;
8772 else
8773 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8775 case BUILT_IN_ALLOCA:
8776 if (arglist == 0
8777 /* Arg could be non-integer if user redeclared this fcn wrong. */
8778 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8779 break;
8781 /* Compute the argument. */
8782 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8784 /* Allocate the desired space. */
8785 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8787 case BUILT_IN_FFS:
8788 /* If not optimizing, call the library function. */
8789 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8790 break;
8792 if (arglist == 0
8793 /* Arg could be non-integer if user redeclared this fcn wrong. */
8794 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8795 break;
8797 /* Compute the argument. */
8798 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8799 /* Compute ffs, into TARGET if possible.
8800 Set TARGET to wherever the result comes back. */
8801 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8802 ffs_optab, op0, target, 1);
8803 if (target == 0)
8804 abort ();
8805 return target;
8807 case BUILT_IN_STRLEN:
8808 /* If not optimizing, call the library function. */
8809 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8810 break;
8812 if (arglist == 0
8813 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8814 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8815 break;
8816 else
8818 tree src = TREE_VALUE (arglist);
8819 tree len = c_strlen (src);
8821 int align
8822 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8824 rtx result, src_rtx, char_rtx;
8825 enum machine_mode insn_mode = value_mode, char_mode;
8826 enum insn_code icode;
8828 /* If the length is known, just return it. */
8829 if (len != 0)
8830 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8832 /* If SRC is not a pointer type, don't do this operation inline. */
8833 if (align == 0)
8834 break;
8836 /* Call a function if we can't compute strlen in the right mode. */
8838 while (insn_mode != VOIDmode)
8840 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8841 if (icode != CODE_FOR_nothing)
8842 break;
8844 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8846 if (insn_mode == VOIDmode)
8847 break;
8849 /* Make a place to write the result of the instruction. */
8850 result = target;
8851 if (! (result != 0
8852 && GET_CODE (result) == REG
8853 && GET_MODE (result) == insn_mode
8854 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8855 result = gen_reg_rtx (insn_mode);
8857 /* Make sure the operands are acceptable to the predicates. */
8859 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8860 result = gen_reg_rtx (insn_mode);
8861 src_rtx = memory_address (BLKmode,
8862 expand_expr (src, NULL_RTX, ptr_mode,
8863 EXPAND_NORMAL));
8865 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8866 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8868 /* Check the string is readable and has an end. */
8869 if (flag_check_memory_usage)
8870 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8871 src_rtx, ptr_mode,
8872 GEN_INT (MEMORY_USE_RO),
8873 TYPE_MODE (integer_type_node));
8875 char_rtx = const0_rtx;
8876 char_mode = insn_operand_mode[(int)icode][2];
8877 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8878 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8880 emit_insn (GEN_FCN (icode) (result,
8881 gen_rtx_MEM (BLKmode, src_rtx),
8882 char_rtx, GEN_INT (align)));
8884 /* Return the value in the proper mode for this function. */
8885 if (GET_MODE (result) == value_mode)
8886 return result;
8887 else if (target != 0)
8889 convert_move (target, result, 0);
8890 return target;
8892 else
8893 return convert_to_mode (value_mode, result, 0);
8896 case BUILT_IN_STRCPY:
8897 /* If not optimizing, call the library function. */
8898 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8899 break;
8901 if (arglist == 0
8902 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8903 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8904 || TREE_CHAIN (arglist) == 0
8905 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8906 break;
8907 else
8909 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8911 if (len == 0)
8912 break;
8914 len = size_binop (PLUS_EXPR, len, integer_one_node);
8916 chainon (arglist, build_tree_list (NULL_TREE, len));
8919 /* Drops in. */
8920 case BUILT_IN_MEMCPY:
8921 /* If not optimizing, call the library function. */
8922 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8923 break;
8925 if (arglist == 0
8926 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8927 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8928 || TREE_CHAIN (arglist) == 0
8929 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8930 != POINTER_TYPE)
8931 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8932 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8933 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8934 != INTEGER_TYPE))
8935 break;
8936 else
8938 tree dest = TREE_VALUE (arglist);
8939 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8940 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8942 int src_align
8943 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8944 int dest_align
8945 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8946 rtx dest_mem, src_mem, dest_addr, len_rtx;
8948 /* If either SRC or DEST is not a pointer type, don't do
8949 this operation in-line. */
8950 if (src_align == 0 || dest_align == 0)
8952 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8953 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8954 break;
8957 dest_mem = get_memory_rtx (dest);
8958 src_mem = get_memory_rtx (src);
8959 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8961 /* Just copy the rights of SRC to the rights of DEST. */
8962 if (flag_check_memory_usage)
8963 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8964 XEXP (dest_mem, 0), ptr_mode,
8965 XEXP (src_mem, 0), ptr_mode,
8966 len_rtx, TYPE_MODE (sizetype));
8968 /* Copy word part most expediently. */
8969 dest_addr
8970 = emit_block_move (dest_mem, src_mem, len_rtx,
8971 MIN (src_align, dest_align));
8973 if (dest_addr == 0)
8974 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
8976 return dest_addr;
8979 case BUILT_IN_MEMSET:
8980 /* If not optimizing, call the library function. */
8981 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8982 break;
8984 if (arglist == 0
8985 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8986 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8987 || TREE_CHAIN (arglist) == 0
8988 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8989 != INTEGER_TYPE)
8990 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8991 || (INTEGER_TYPE
8992 != (TREE_CODE (TREE_TYPE
8993 (TREE_VALUE
8994 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8995 break;
8996 else
8998 tree dest = TREE_VALUE (arglist);
8999 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9000 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9002 int dest_align
9003 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9004 rtx dest_mem, dest_addr, len_rtx;
9006 /* If DEST is not a pointer type, don't do this
9007 operation in-line. */
9008 if (dest_align == 0)
9009 break;
9011 /* If the arguments have side-effects, then we can only evaluate
9012 them at most once. The following code evaluates them twice if
9013 they are not constants because we break out to expand_call
9014 in that case. They can't be constants if they have side-effects
9015 so we can check for that first. Alternatively, we could call
9016 save_expr to make multiple evaluation safe. */
9017 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9018 break;
9020 /* If VAL is not 0, don't do this operation in-line. */
9021 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9022 break;
9024 /* If LEN does not expand to a constant, don't do this
9025 operation in-line. */
9026 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9027 if (GET_CODE (len_rtx) != CONST_INT)
9028 break;
9030 dest_mem = get_memory_rtx (dest);
9032 /* Just check DST is writable and mark it as readable. */
9033 if (flag_check_memory_usage)
9034 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9035 XEXP (dest_mem, 0), ptr_mode,
9036 len_rtx, TYPE_MODE (sizetype),
9037 GEN_INT (MEMORY_USE_WO),
9038 TYPE_MODE (integer_type_node));
9041 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9043 if (dest_addr == 0)
9044 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9046 return dest_addr;
9049 /* These comparison functions need an instruction that returns an actual
9050 index. An ordinary compare that just sets the condition codes
9051 is not enough. */
9052 #ifdef HAVE_cmpstrsi
9053 case BUILT_IN_STRCMP:
9054 /* If not optimizing, call the library function. */
9055 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9056 break;
9058 /* If we need to check memory accesses, call the library function. */
9059 if (flag_check_memory_usage)
9060 break;
9062 if (arglist == 0
9063 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9064 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9065 || TREE_CHAIN (arglist) == 0
9066 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9067 break;
9068 else if (!HAVE_cmpstrsi)
9069 break;
9071 tree arg1 = TREE_VALUE (arglist);
9072 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9073 tree len, len2;
9075 len = c_strlen (arg1);
9076 if (len)
9077 len = size_binop (PLUS_EXPR, integer_one_node, len);
9078 len2 = c_strlen (arg2);
9079 if (len2)
9080 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9082 /* If we don't have a constant length for the first, use the length
9083 of the second, if we know it. We don't require a constant for
9084 this case; some cost analysis could be done if both are available
9085 but neither is constant. For now, assume they're equally cheap.
9087 If both strings have constant lengths, use the smaller. This
9088 could arise if optimization results in strcpy being called with
9089 two fixed strings, or if the code was machine-generated. We should
9090 add some code to the `memcmp' handler below to deal with such
9091 situations, someday. */
9092 if (!len || TREE_CODE (len) != INTEGER_CST)
9094 if (len2)
9095 len = len2;
9096 else if (len == 0)
9097 break;
9099 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9101 if (tree_int_cst_lt (len2, len))
9102 len = len2;
9105 chainon (arglist, build_tree_list (NULL_TREE, len));
9108 /* Drops in. */
9109 case BUILT_IN_MEMCMP:
9110 /* If not optimizing, call the library function. */
9111 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9112 break;
9114 /* If we need to check memory accesses, call the library function. */
9115 if (flag_check_memory_usage)
9116 break;
9118 if (arglist == 0
9119 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9120 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9121 || TREE_CHAIN (arglist) == 0
9122 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9123 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9124 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9125 break;
9126 else if (!HAVE_cmpstrsi)
9127 break;
9129 tree arg1 = TREE_VALUE (arglist);
9130 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9131 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9132 rtx result;
9134 int arg1_align
9135 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9136 int arg2_align
9137 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9138 enum machine_mode insn_mode
9139 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9141 /* If we don't have POINTER_TYPE, call the function. */
9142 if (arg1_align == 0 || arg2_align == 0)
9144 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9145 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9146 break;
9149 /* Make a place to write the result of the instruction. */
9150 result = target;
9151 if (! (result != 0
9152 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9153 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9154 result = gen_reg_rtx (insn_mode);
9156 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9157 get_memory_rtx (arg2),
9158 expand_expr (len, NULL_RTX, VOIDmode, 0),
9159 GEN_INT (MIN (arg1_align, arg2_align))));
9161 /* Return the value in the proper mode for this function. */
9162 mode = TYPE_MODE (TREE_TYPE (exp));
9163 if (GET_MODE (result) == mode)
9164 return result;
9165 else if (target != 0)
9167 convert_move (target, result, 0);
9168 return target;
9170 else
9171 return convert_to_mode (mode, result, 0);
9173 #else
9174 case BUILT_IN_STRCMP:
9175 case BUILT_IN_MEMCMP:
9176 break;
9177 #endif
9179 case BUILT_IN_SETJMP:
9180 if (arglist == 0
9181 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9182 break;
9183 else
9185 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9186 VOIDmode, 0);
9187 rtx lab = gen_label_rtx ();
9188 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9189 emit_label (lab);
9190 return ret;
9193 /* __builtin_longjmp is passed a pointer to an array of five words.
9194 It's similar to the C library longjmp function but works with
9195 __builtin_setjmp above. */
9196 case BUILT_IN_LONGJMP:
9197 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9198 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9199 break;
9200 else
9202 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9203 VOIDmode, 0);
9204 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9205 NULL_RTX, VOIDmode, 0);
9207 if (value != const1_rtx)
9209 error ("__builtin_longjmp second argument must be 1");
9210 return const0_rtx;
9213 expand_builtin_longjmp (buf_addr, value);
9214 return const0_rtx;
9217 case BUILT_IN_TRAP:
9218 #ifdef HAVE_trap
9219 if (HAVE_trap)
9220 emit_insn (gen_trap ());
9221 else
9222 #endif
9223 error ("__builtin_trap not supported by this target");
9224 emit_barrier ();
9225 return const0_rtx;
9227 /* Various hooks for the DWARF 2 __throw routine. */
9228 case BUILT_IN_UNWIND_INIT:
9229 expand_builtin_unwind_init ();
9230 return const0_rtx;
9231 case BUILT_IN_FP:
9232 return frame_pointer_rtx;
9233 case BUILT_IN_SP:
9234 return stack_pointer_rtx;
9235 #ifdef DWARF2_UNWIND_INFO
9236 case BUILT_IN_DWARF_FP_REGNUM:
9237 return expand_builtin_dwarf_fp_regnum ();
9238 case BUILT_IN_DWARF_REG_SIZE:
9239 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9240 #endif
9241 case BUILT_IN_FROB_RETURN_ADDR:
9242 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9243 case BUILT_IN_EXTRACT_RETURN_ADDR:
9244 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9245 case BUILT_IN_SET_RETURN_ADDR_REG:
9246 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
9247 return const0_rtx;
9248 case BUILT_IN_EH_STUB_OLD:
9249 return expand_builtin_eh_stub_old ();
9250 case BUILT_IN_EH_STUB:
9251 return expand_builtin_eh_stub ();
9252 case BUILT_IN_SET_EH_REGS:
9253 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
9254 TREE_VALUE (TREE_CHAIN (arglist)));
9255 return const0_rtx;
9257 default: /* just do library call, if unknown builtin */
9258 error ("built-in function `%s' not currently supported",
9259 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9262 /* The switch statement above can drop through to cause the function
9263 to be called normally. */
9265 return expand_call (exp, target, ignore);
9268 /* Built-in functions to perform an untyped call and return. */
9270 /* For each register that may be used for calling a function, this
9271 gives a mode used to copy the register's value. VOIDmode indicates
9272 the register is not used for calling a function. If the machine
9273 has register windows, this gives only the outbound registers.
9274 INCOMING_REGNO gives the corresponding inbound register. */
9275 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9277 /* For each register that may be used for returning values, this gives
9278 a mode used to copy the register's value. VOIDmode indicates the
9279 register is not used for returning values. If the machine has
9280 register windows, this gives only the outbound registers.
9281 INCOMING_REGNO gives the corresponding inbound register. */
9282 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9284 /* For each register that may be used for calling a function, this
9285 gives the offset of that register into the block returned by
9286 __builtin_apply_args. 0 indicates that the register is not
9287 used for calling a function. */
9288 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9290 /* Return the offset of register REGNO into the block returned by
9291 __builtin_apply_args. This is not declared static, since it is
9292 needed in objc-act.c. */
9294 int
9295 apply_args_register_offset (regno)
9296 int regno;
9298 apply_args_size ();
9300 /* Arguments are always put in outgoing registers (in the argument
9301 block) if such make sense. */
9302 #ifdef OUTGOING_REGNO
9303 regno = OUTGOING_REGNO(regno);
9304 #endif
9305 return apply_args_reg_offset[regno];
9308 /* Return the size required for the block returned by __builtin_apply_args,
9309 and initialize apply_args_mode. */
9311 static int
9312 apply_args_size ()
9314 static int size = -1;
9315 int align, regno;
9316 enum machine_mode mode;
9318 /* The values computed by this function never change. */
9319 if (size < 0)
9321 /* The first value is the incoming arg-pointer. */
9322 size = GET_MODE_SIZE (Pmode);
9324 /* The second value is the structure value address unless this is
9325 passed as an "invisible" first argument. */
9326 if (struct_value_rtx)
9327 size += GET_MODE_SIZE (Pmode);
9329 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9330 if (FUNCTION_ARG_REGNO_P (regno))
9332 /* Search for the proper mode for copying this register's
9333 value. I'm not sure this is right, but it works so far. */
9334 enum machine_mode best_mode = VOIDmode;
9336 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9337 mode != VOIDmode;
9338 mode = GET_MODE_WIDER_MODE (mode))
9339 if (HARD_REGNO_MODE_OK (regno, mode)
9340 && HARD_REGNO_NREGS (regno, mode) == 1)
9341 best_mode = mode;
9343 if (best_mode == VOIDmode)
9344 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9345 mode != VOIDmode;
9346 mode = GET_MODE_WIDER_MODE (mode))
9347 if (HARD_REGNO_MODE_OK (regno, mode)
9348 && (mov_optab->handlers[(int) mode].insn_code
9349 != CODE_FOR_nothing))
9350 best_mode = mode;
9352 mode = best_mode;
9353 if (mode == VOIDmode)
9354 abort ();
9356 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9357 if (size % align != 0)
9358 size = CEIL (size, align) * align;
9359 apply_args_reg_offset[regno] = size;
9360 size += GET_MODE_SIZE (mode);
9361 apply_args_mode[regno] = mode;
9363 else
9365 apply_args_mode[regno] = VOIDmode;
9366 apply_args_reg_offset[regno] = 0;
9369 return size;
9372 /* Return the size required for the block returned by __builtin_apply,
9373 and initialize apply_result_mode. */
9375 static int
9376 apply_result_size ()
9378 static int size = -1;
9379 int align, regno;
9380 enum machine_mode mode;
9382 /* The values computed by this function never change. */
9383 if (size < 0)
9385 size = 0;
9387 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9388 if (FUNCTION_VALUE_REGNO_P (regno))
9390 /* Search for the proper mode for copying this register's
9391 value. I'm not sure this is right, but it works so far. */
9392 enum machine_mode best_mode = VOIDmode;
9394 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9395 mode != TImode;
9396 mode = GET_MODE_WIDER_MODE (mode))
9397 if (HARD_REGNO_MODE_OK (regno, mode))
9398 best_mode = mode;
9400 if (best_mode == VOIDmode)
9401 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9402 mode != VOIDmode;
9403 mode = GET_MODE_WIDER_MODE (mode))
9404 if (HARD_REGNO_MODE_OK (regno, mode)
9405 && (mov_optab->handlers[(int) mode].insn_code
9406 != CODE_FOR_nothing))
9407 best_mode = mode;
9409 mode = best_mode;
9410 if (mode == VOIDmode)
9411 abort ();
9413 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9414 if (size % align != 0)
9415 size = CEIL (size, align) * align;
9416 size += GET_MODE_SIZE (mode);
9417 apply_result_mode[regno] = mode;
9419 else
9420 apply_result_mode[regno] = VOIDmode;
9422 /* Allow targets that use untyped_call and untyped_return to override
9423 the size so that machine-specific information can be stored here. */
9424 #ifdef APPLY_RESULT_SIZE
9425 size = APPLY_RESULT_SIZE;
9426 #endif
9428 return size;
9431 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9432 /* Create a vector describing the result block RESULT. If SAVEP is true,
9433 the result block is used to save the values; otherwise it is used to
9434 restore the values. */
9436 static rtx
9437 result_vector (savep, result)
9438 int savep;
9439 rtx result;
9441 int regno, size, align, nelts;
9442 enum machine_mode mode;
9443 rtx reg, mem;
9444 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9446 size = nelts = 0;
9447 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9448 if ((mode = apply_result_mode[regno]) != VOIDmode)
9450 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9451 if (size % align != 0)
9452 size = CEIL (size, align) * align;
9453 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9454 mem = change_address (result, mode,
9455 plus_constant (XEXP (result, 0), size));
9456 savevec[nelts++] = (savep
9457 ? gen_rtx_SET (VOIDmode, mem, reg)
9458 : gen_rtx_SET (VOIDmode, reg, mem));
9459 size += GET_MODE_SIZE (mode);
9461 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9463 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9465 /* Save the state required to perform an untyped call with the same
9466 arguments as were passed to the current function. */
9468 static rtx
9469 expand_builtin_apply_args ()
9471 rtx registers;
9472 int size, align, regno;
9473 enum machine_mode mode;
9475 /* Create a block where the arg-pointer, structure value address,
9476 and argument registers can be saved. */
9477 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9479 /* Walk past the arg-pointer and structure value address. */
9480 size = GET_MODE_SIZE (Pmode);
9481 if (struct_value_rtx)
9482 size += GET_MODE_SIZE (Pmode);
9484 /* Save each register used in calling a function to the block. */
9485 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9486 if ((mode = apply_args_mode[regno]) != VOIDmode)
9488 rtx tem;
9490 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9491 if (size % align != 0)
9492 size = CEIL (size, align) * align;
9494 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9496 #ifdef STACK_REGS
9497 /* For reg-stack.c's stack register household.
9498 Compare with a similar piece of code in function.c. */
9500 emit_insn (gen_rtx_USE (mode, tem));
9501 #endif
9503 emit_move_insn (change_address (registers, mode,
9504 plus_constant (XEXP (registers, 0),
9505 size)),
9506 tem);
9507 size += GET_MODE_SIZE (mode);
9510 /* Save the arg pointer to the block. */
9511 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9512 copy_to_reg (virtual_incoming_args_rtx));
9513 size = GET_MODE_SIZE (Pmode);
9515 /* Save the structure value address unless this is passed as an
9516 "invisible" first argument. */
9517 if (struct_value_incoming_rtx)
9519 emit_move_insn (change_address (registers, Pmode,
9520 plus_constant (XEXP (registers, 0),
9521 size)),
9522 copy_to_reg (struct_value_incoming_rtx));
9523 size += GET_MODE_SIZE (Pmode);
9526 /* Return the address of the block. */
9527 return copy_addr_to_reg (XEXP (registers, 0));
9530 /* Perform an untyped call and save the state required to perform an
9531 untyped return of whatever value was returned by the given function. */
9533 static rtx
9534 expand_builtin_apply (function, arguments, argsize)
9535 rtx function, arguments, argsize;
9537 int size, align, regno;
9538 enum machine_mode mode;
9539 rtx incoming_args, result, reg, dest, call_insn;
9540 rtx old_stack_level = 0;
9541 rtx call_fusage = 0;
9543 /* Create a block where the return registers can be saved. */
9544 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9546 /* ??? The argsize value should be adjusted here. */
9548 /* Fetch the arg pointer from the ARGUMENTS block. */
9549 incoming_args = gen_reg_rtx (Pmode);
9550 emit_move_insn (incoming_args,
9551 gen_rtx_MEM (Pmode, arguments));
9552 #ifndef STACK_GROWS_DOWNWARD
9553 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9554 incoming_args, 0, OPTAB_LIB_WIDEN);
9555 #endif
9557 /* Perform postincrements before actually calling the function. */
9558 emit_queue ();
9560 /* Push a new argument block and copy the arguments. */
9561 do_pending_stack_adjust ();
9563 /* Save the stack with nonlocal if available */
9564 #ifdef HAVE_save_stack_nonlocal
9565 if (HAVE_save_stack_nonlocal)
9566 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9567 else
9568 #endif
9569 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9571 /* Push a block of memory onto the stack to store the memory arguments.
9572 Save the address in a register, and copy the memory arguments. ??? I
9573 haven't figured out how the calling convention macros effect this,
9574 but it's likely that the source and/or destination addresses in
9575 the block copy will need updating in machine specific ways. */
9576 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9577 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9578 gen_rtx_MEM (BLKmode, incoming_args),
9579 argsize,
9580 PARM_BOUNDARY / BITS_PER_UNIT);
9582 /* Refer to the argument block. */
9583 apply_args_size ();
9584 arguments = gen_rtx_MEM (BLKmode, arguments);
9586 /* Walk past the arg-pointer and structure value address. */
9587 size = GET_MODE_SIZE (Pmode);
9588 if (struct_value_rtx)
9589 size += GET_MODE_SIZE (Pmode);
9591 /* Restore each of the registers previously saved. Make USE insns
9592 for each of these registers for use in making the call. */
9593 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9594 if ((mode = apply_args_mode[regno]) != VOIDmode)
9596 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9597 if (size % align != 0)
9598 size = CEIL (size, align) * align;
9599 reg = gen_rtx_REG (mode, regno);
9600 emit_move_insn (reg,
9601 change_address (arguments, mode,
9602 plus_constant (XEXP (arguments, 0),
9603 size)));
9605 use_reg (&call_fusage, reg);
9606 size += GET_MODE_SIZE (mode);
9609 /* Restore the structure value address unless this is passed as an
9610 "invisible" first argument. */
9611 size = GET_MODE_SIZE (Pmode);
9612 if (struct_value_rtx)
9614 rtx value = gen_reg_rtx (Pmode);
9615 emit_move_insn (value,
9616 change_address (arguments, Pmode,
9617 plus_constant (XEXP (arguments, 0),
9618 size)));
9619 emit_move_insn (struct_value_rtx, value);
9620 if (GET_CODE (struct_value_rtx) == REG)
9621 use_reg (&call_fusage, struct_value_rtx);
9622 size += GET_MODE_SIZE (Pmode);
9625 /* All arguments and registers used for the call are set up by now! */
9626 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9628 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9629 and we don't want to load it into a register as an optimization,
9630 because prepare_call_address already did it if it should be done. */
9631 if (GET_CODE (function) != SYMBOL_REF)
9632 function = memory_address (FUNCTION_MODE, function);
9634 /* Generate the actual call instruction and save the return value. */
9635 #ifdef HAVE_untyped_call
9636 if (HAVE_untyped_call)
9637 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9638 result, result_vector (1, result)));
9639 else
9640 #endif
9641 #ifdef HAVE_call_value
9642 if (HAVE_call_value)
9644 rtx valreg = 0;
9646 /* Locate the unique return register. It is not possible to
9647 express a call that sets more than one return register using
9648 call_value; use untyped_call for that. In fact, untyped_call
9649 only needs to save the return registers in the given block. */
9650 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9651 if ((mode = apply_result_mode[regno]) != VOIDmode)
9653 if (valreg)
9654 abort (); /* HAVE_untyped_call required. */
9655 valreg = gen_rtx_REG (mode, regno);
9658 emit_call_insn (gen_call_value (valreg,
9659 gen_rtx_MEM (FUNCTION_MODE, function),
9660 const0_rtx, NULL_RTX, const0_rtx));
9662 emit_move_insn (change_address (result, GET_MODE (valreg),
9663 XEXP (result, 0)),
9664 valreg);
9666 else
9667 #endif
9668 abort ();
9670 /* Find the CALL insn we just emitted. */
9671 for (call_insn = get_last_insn ();
9672 call_insn && GET_CODE (call_insn) != CALL_INSN;
9673 call_insn = PREV_INSN (call_insn))
9676 if (! call_insn)
9677 abort ();
9679 /* Put the register usage information on the CALL. If there is already
9680 some usage information, put ours at the end. */
9681 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9683 rtx link;
9685 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9686 link = XEXP (link, 1))
9689 XEXP (link, 1) = call_fusage;
9691 else
9692 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9694 /* Restore the stack. */
9695 #ifdef HAVE_save_stack_nonlocal
9696 if (HAVE_save_stack_nonlocal)
9697 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9698 else
9699 #endif
9700 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9702 /* Return the address of the result block. */
9703 return copy_addr_to_reg (XEXP (result, 0));
9706 /* Perform an untyped return. */
9708 static void
9709 expand_builtin_return (result)
9710 rtx result;
9712 int size, align, regno;
9713 enum machine_mode mode;
9714 rtx reg;
9715 rtx call_fusage = 0;
9717 apply_result_size ();
9718 result = gen_rtx_MEM (BLKmode, result);
9720 #ifdef HAVE_untyped_return
9721 if (HAVE_untyped_return)
9723 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9724 emit_barrier ();
9725 return;
9727 #endif
9729 /* Restore the return value and note that each value is used. */
9730 size = 0;
9731 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9732 if ((mode = apply_result_mode[regno]) != VOIDmode)
9734 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9735 if (size % align != 0)
9736 size = CEIL (size, align) * align;
9737 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9738 emit_move_insn (reg,
9739 change_address (result, mode,
9740 plus_constant (XEXP (result, 0),
9741 size)));
9743 push_to_sequence (call_fusage);
9744 emit_insn (gen_rtx_USE (VOIDmode, reg));
9745 call_fusage = get_insns ();
9746 end_sequence ();
9747 size += GET_MODE_SIZE (mode);
9750 /* Put the USE insns before the return. */
9751 emit_insns (call_fusage);
9753 /* Return whatever values was restored by jumping directly to the end
9754 of the function. */
9755 expand_null_return ();
9758 /* Expand code for a post- or pre- increment or decrement
9759 and return the RTX for the result.
9760 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9762 static rtx
9763 expand_increment (exp, post, ignore)
9764 register tree exp;
9765 int post, ignore;
9767 register rtx op0, op1;
9768 register rtx temp, value;
9769 register tree incremented = TREE_OPERAND (exp, 0);
9770 optab this_optab = add_optab;
9771 int icode;
9772 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9773 int op0_is_copy = 0;
9774 int single_insn = 0;
9775 /* 1 means we can't store into OP0 directly,
9776 because it is a subreg narrower than a word,
9777 and we don't dare clobber the rest of the word. */
9778 int bad_subreg = 0;
9780 /* Stabilize any component ref that might need to be
9781 evaluated more than once below. */
9782 if (!post
9783 || TREE_CODE (incremented) == BIT_FIELD_REF
9784 || (TREE_CODE (incremented) == COMPONENT_REF
9785 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9786 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9787 incremented = stabilize_reference (incremented);
9788 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9789 ones into save exprs so that they don't accidentally get evaluated
9790 more than once by the code below. */
9791 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9792 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9793 incremented = save_expr (incremented);
9795 /* Compute the operands as RTX.
9796 Note whether OP0 is the actual lvalue or a copy of it:
9797 I believe it is a copy iff it is a register or subreg
9798 and insns were generated in computing it. */
9800 temp = get_last_insn ();
9801 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9803 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9804 in place but instead must do sign- or zero-extension during assignment,
9805 so we copy it into a new register and let the code below use it as
9806 a copy.
9808 Note that we can safely modify this SUBREG since it is know not to be
9809 shared (it was made by the expand_expr call above). */
9811 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9813 if (post)
9814 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9815 else
9816 bad_subreg = 1;
9818 else if (GET_CODE (op0) == SUBREG
9819 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9821 /* We cannot increment this SUBREG in place. If we are
9822 post-incrementing, get a copy of the old value. Otherwise,
9823 just mark that we cannot increment in place. */
9824 if (post)
9825 op0 = copy_to_reg (op0);
9826 else
9827 bad_subreg = 1;
9830 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9831 && temp != get_last_insn ());
9832 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9833 EXPAND_MEMORY_USE_BAD);
9835 /* Decide whether incrementing or decrementing. */
9836 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9837 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9838 this_optab = sub_optab;
9840 /* Convert decrement by a constant into a negative increment. */
9841 if (this_optab == sub_optab
9842 && GET_CODE (op1) == CONST_INT)
9844 op1 = GEN_INT (- INTVAL (op1));
9845 this_optab = add_optab;
9848 /* For a preincrement, see if we can do this with a single instruction. */
9849 if (!post)
9851 icode = (int) this_optab->handlers[(int) mode].insn_code;
9852 if (icode != (int) CODE_FOR_nothing
9853 /* Make sure that OP0 is valid for operands 0 and 1
9854 of the insn we want to queue. */
9855 && (*insn_operand_predicate[icode][0]) (op0, mode)
9856 && (*insn_operand_predicate[icode][1]) (op0, mode)
9857 && (*insn_operand_predicate[icode][2]) (op1, mode))
9858 single_insn = 1;
9861 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9862 then we cannot just increment OP0. We must therefore contrive to
9863 increment the original value. Then, for postincrement, we can return
9864 OP0 since it is a copy of the old value. For preincrement, expand here
9865 unless we can do it with a single insn.
9867 Likewise if storing directly into OP0 would clobber high bits
9868 we need to preserve (bad_subreg). */
9869 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9871 /* This is the easiest way to increment the value wherever it is.
9872 Problems with multiple evaluation of INCREMENTED are prevented
9873 because either (1) it is a component_ref or preincrement,
9874 in which case it was stabilized above, or (2) it is an array_ref
9875 with constant index in an array in a register, which is
9876 safe to reevaluate. */
9877 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9878 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9879 ? MINUS_EXPR : PLUS_EXPR),
9880 TREE_TYPE (exp),
9881 incremented,
9882 TREE_OPERAND (exp, 1));
9884 while (TREE_CODE (incremented) == NOP_EXPR
9885 || TREE_CODE (incremented) == CONVERT_EXPR)
9887 newexp = convert (TREE_TYPE (incremented), newexp);
9888 incremented = TREE_OPERAND (incremented, 0);
9891 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9892 return post ? op0 : temp;
9895 if (post)
9897 /* We have a true reference to the value in OP0.
9898 If there is an insn to add or subtract in this mode, queue it.
9899 Queueing the increment insn avoids the register shuffling
9900 that often results if we must increment now and first save
9901 the old value for subsequent use. */
9903 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9904 op0 = stabilize (op0);
9905 #endif
9907 icode = (int) this_optab->handlers[(int) mode].insn_code;
9908 if (icode != (int) CODE_FOR_nothing
9909 /* Make sure that OP0 is valid for operands 0 and 1
9910 of the insn we want to queue. */
9911 && (*insn_operand_predicate[icode][0]) (op0, mode)
9912 && (*insn_operand_predicate[icode][1]) (op0, mode))
9914 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9915 op1 = force_reg (mode, op1);
9917 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9919 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9921 rtx addr = (general_operand (XEXP (op0, 0), mode)
9922 ? force_reg (Pmode, XEXP (op0, 0))
9923 : copy_to_reg (XEXP (op0, 0)));
9924 rtx temp, result;
9926 op0 = change_address (op0, VOIDmode, addr);
9927 temp = force_reg (GET_MODE (op0), op0);
9928 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9929 op1 = force_reg (mode, op1);
9931 /* The increment queue is LIFO, thus we have to `queue'
9932 the instructions in reverse order. */
9933 enqueue_insn (op0, gen_move_insn (op0, temp));
9934 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9935 return result;
9939 /* Preincrement, or we can't increment with one simple insn. */
9940 if (post)
9941 /* Save a copy of the value before inc or dec, to return it later. */
9942 temp = value = copy_to_reg (op0);
9943 else
9944 /* Arrange to return the incremented value. */
9945 /* Copy the rtx because expand_binop will protect from the queue,
9946 and the results of that would be invalid for us to return
9947 if our caller does emit_queue before using our result. */
9948 temp = copy_rtx (value = op0);
9950 /* Increment however we can. */
9951 op1 = expand_binop (mode, this_optab, value, op1,
9952 flag_check_memory_usage ? NULL_RTX : op0,
9953 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9954 /* Make sure the value is stored into OP0. */
9955 if (op1 != op0)
9956 emit_move_insn (op0, op1);
9958 return temp;
9961 /* Expand all function calls contained within EXP, innermost ones first.
9962 But don't look within expressions that have sequence points.
9963 For each CALL_EXPR, record the rtx for its value
9964 in the CALL_EXPR_RTL field. */
9966 static void
9967 preexpand_calls (exp)
9968 tree exp;
9970 register int nops, i;
9971 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9973 if (! do_preexpand_calls)
9974 return;
9976 /* Only expressions and references can contain calls. */
9978 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9979 return;
9981 switch (TREE_CODE (exp))
9983 case CALL_EXPR:
9984 /* Do nothing if already expanded. */
9985 if (CALL_EXPR_RTL (exp) != 0
9986 /* Do nothing if the call returns a variable-sized object. */
9987 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9988 /* Do nothing to built-in functions. */
9989 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9990 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9991 == FUNCTION_DECL)
9992 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9993 return;
9995 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9996 return;
9998 case COMPOUND_EXPR:
9999 case COND_EXPR:
10000 case TRUTH_ANDIF_EXPR:
10001 case TRUTH_ORIF_EXPR:
10002 /* If we find one of these, then we can be sure
10003 the adjust will be done for it (since it makes jumps).
10004 Do it now, so that if this is inside an argument
10005 of a function, we don't get the stack adjustment
10006 after some other args have already been pushed. */
10007 do_pending_stack_adjust ();
10008 return;
10010 case BLOCK:
10011 case RTL_EXPR:
10012 case WITH_CLEANUP_EXPR:
10013 case CLEANUP_POINT_EXPR:
10014 case TRY_CATCH_EXPR:
10015 return;
10017 case SAVE_EXPR:
10018 if (SAVE_EXPR_RTL (exp) != 0)
10019 return;
10021 default:
10022 break;
10025 nops = tree_code_length[(int) TREE_CODE (exp)];
10026 for (i = 0; i < nops; i++)
10027 if (TREE_OPERAND (exp, i) != 0)
10029 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10030 if (type == 'e' || type == '<' || type == '1' || type == '2'
10031 || type == 'r')
10032 preexpand_calls (TREE_OPERAND (exp, i));
10036 /* At the start of a function, record that we have no previously-pushed
10037 arguments waiting to be popped. */
10039 void
10040 init_pending_stack_adjust ()
10042 pending_stack_adjust = 0;
10045 /* When exiting from function, if safe, clear out any pending stack adjust
10046 so the adjustment won't get done.
10048 Note, if the current function calls alloca, then it must have a
10049 frame pointer regardless of the value of flag_omit_frame_pointer. */
10051 void
10052 clear_pending_stack_adjust ()
10054 #ifdef EXIT_IGNORE_STACK
10055 if (optimize > 0
10056 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10057 && EXIT_IGNORE_STACK
10058 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10059 && ! flag_inline_functions)
10060 pending_stack_adjust = 0;
10061 #endif
10064 /* Pop any previously-pushed arguments that have not been popped yet. */
10066 void
10067 do_pending_stack_adjust ()
10069 if (inhibit_defer_pop == 0)
10071 if (pending_stack_adjust != 0)
10072 adjust_stack (GEN_INT (pending_stack_adjust));
10073 pending_stack_adjust = 0;
10077 /* Expand conditional expressions. */
10079 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10080 LABEL is an rtx of code CODE_LABEL, in this function and all the
10081 functions here. */
10083 void
10084 jumpifnot (exp, label)
10085 tree exp;
10086 rtx label;
10088 do_jump (exp, label, NULL_RTX);
10091 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10093 void
10094 jumpif (exp, label)
10095 tree exp;
10096 rtx label;
10098 do_jump (exp, NULL_RTX, label);
10101 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10102 the result is zero, or IF_TRUE_LABEL if the result is one.
10103 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10104 meaning fall through in that case.
10106 do_jump always does any pending stack adjust except when it does not
10107 actually perform a jump. An example where there is no jump
10108 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10110 This function is responsible for optimizing cases such as
10111 &&, || and comparison operators in EXP. */
10113 void
10114 do_jump (exp, if_false_label, if_true_label)
10115 tree exp;
10116 rtx if_false_label, if_true_label;
10118 register enum tree_code code = TREE_CODE (exp);
10119 /* Some cases need to create a label to jump to
10120 in order to properly fall through.
10121 These cases set DROP_THROUGH_LABEL nonzero. */
10122 rtx drop_through_label = 0;
10123 rtx temp;
10124 rtx comparison = 0;
10125 int i;
10126 tree type;
10127 enum machine_mode mode;
10129 #ifdef MAX_INTEGER_COMPUTATION_MODE
10130 check_max_integer_computation_mode (exp);
10131 #endif
10133 emit_queue ();
10135 switch (code)
10137 case ERROR_MARK:
10138 break;
10140 case INTEGER_CST:
10141 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10142 if (temp)
10143 emit_jump (temp);
10144 break;
10146 #if 0
10147 /* This is not true with #pragma weak */
10148 case ADDR_EXPR:
10149 /* The address of something can never be zero. */
10150 if (if_true_label)
10151 emit_jump (if_true_label);
10152 break;
10153 #endif
10155 case NOP_EXPR:
10156 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10157 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10158 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10159 goto normal;
10160 case CONVERT_EXPR:
10161 /* If we are narrowing the operand, we have to do the compare in the
10162 narrower mode. */
10163 if ((TYPE_PRECISION (TREE_TYPE (exp))
10164 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10165 goto normal;
10166 case NON_LVALUE_EXPR:
10167 case REFERENCE_EXPR:
10168 case ABS_EXPR:
10169 case NEGATE_EXPR:
10170 case LROTATE_EXPR:
10171 case RROTATE_EXPR:
10172 /* These cannot change zero->non-zero or vice versa. */
10173 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10174 break;
10176 #if 0
10177 /* This is never less insns than evaluating the PLUS_EXPR followed by
10178 a test and can be longer if the test is eliminated. */
10179 case PLUS_EXPR:
10180 /* Reduce to minus. */
10181 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10182 TREE_OPERAND (exp, 0),
10183 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10184 TREE_OPERAND (exp, 1))));
10185 /* Process as MINUS. */
10186 #endif
10188 case MINUS_EXPR:
10189 /* Non-zero iff operands of minus differ. */
10190 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10191 TREE_OPERAND (exp, 0),
10192 TREE_OPERAND (exp, 1)),
10193 NE, NE);
10194 break;
10196 case BIT_AND_EXPR:
10197 /* If we are AND'ing with a small constant, do this comparison in the
10198 smallest type that fits. If the machine doesn't have comparisons
10199 that small, it will be converted back to the wider comparison.
10200 This helps if we are testing the sign bit of a narrower object.
10201 combine can't do this for us because it can't know whether a
10202 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10204 if (! SLOW_BYTE_ACCESS
10205 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10206 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10207 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10208 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10209 && (type = type_for_mode (mode, 1)) != 0
10210 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10211 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10212 != CODE_FOR_nothing))
10214 do_jump (convert (type, exp), if_false_label, if_true_label);
10215 break;
10217 goto normal;
10219 case TRUTH_NOT_EXPR:
10220 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10221 break;
10223 case TRUTH_ANDIF_EXPR:
10224 if (if_false_label == 0)
10225 if_false_label = drop_through_label = gen_label_rtx ();
10226 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10227 start_cleanup_deferral ();
10228 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10229 end_cleanup_deferral ();
10230 break;
10232 case TRUTH_ORIF_EXPR:
10233 if (if_true_label == 0)
10234 if_true_label = drop_through_label = gen_label_rtx ();
10235 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10236 start_cleanup_deferral ();
10237 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10238 end_cleanup_deferral ();
10239 break;
10241 case COMPOUND_EXPR:
10242 push_temp_slots ();
10243 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10244 preserve_temp_slots (NULL_RTX);
10245 free_temp_slots ();
10246 pop_temp_slots ();
10247 emit_queue ();
10248 do_pending_stack_adjust ();
10249 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10250 break;
10252 case COMPONENT_REF:
10253 case BIT_FIELD_REF:
10254 case ARRAY_REF:
10256 int bitsize, bitpos, unsignedp;
10257 enum machine_mode mode;
10258 tree type;
10259 tree offset;
10260 int volatilep = 0;
10261 int alignment;
10263 /* Get description of this reference. We don't actually care
10264 about the underlying object here. */
10265 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10266 &mode, &unsignedp, &volatilep,
10267 &alignment);
10269 type = type_for_size (bitsize, unsignedp);
10270 if (! SLOW_BYTE_ACCESS
10271 && type != 0 && bitsize >= 0
10272 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10273 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10274 != CODE_FOR_nothing))
10276 do_jump (convert (type, exp), if_false_label, if_true_label);
10277 break;
10279 goto normal;
10282 case COND_EXPR:
10283 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10284 if (integer_onep (TREE_OPERAND (exp, 1))
10285 && integer_zerop (TREE_OPERAND (exp, 2)))
10286 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10288 else if (integer_zerop (TREE_OPERAND (exp, 1))
10289 && integer_onep (TREE_OPERAND (exp, 2)))
10290 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10292 else
10294 register rtx label1 = gen_label_rtx ();
10295 drop_through_label = gen_label_rtx ();
10297 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10299 start_cleanup_deferral ();
10300 /* Now the THEN-expression. */
10301 do_jump (TREE_OPERAND (exp, 1),
10302 if_false_label ? if_false_label : drop_through_label,
10303 if_true_label ? if_true_label : drop_through_label);
10304 /* In case the do_jump just above never jumps. */
10305 do_pending_stack_adjust ();
10306 emit_label (label1);
10308 /* Now the ELSE-expression. */
10309 do_jump (TREE_OPERAND (exp, 2),
10310 if_false_label ? if_false_label : drop_through_label,
10311 if_true_label ? if_true_label : drop_through_label);
10312 end_cleanup_deferral ();
10314 break;
10316 case EQ_EXPR:
10318 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10320 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10321 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10323 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10324 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10325 do_jump
10326 (fold
10327 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10328 fold (build (EQ_EXPR, TREE_TYPE (exp),
10329 fold (build1 (REALPART_EXPR,
10330 TREE_TYPE (inner_type),
10331 exp0)),
10332 fold (build1 (REALPART_EXPR,
10333 TREE_TYPE (inner_type),
10334 exp1)))),
10335 fold (build (EQ_EXPR, TREE_TYPE (exp),
10336 fold (build1 (IMAGPART_EXPR,
10337 TREE_TYPE (inner_type),
10338 exp0)),
10339 fold (build1 (IMAGPART_EXPR,
10340 TREE_TYPE (inner_type),
10341 exp1)))))),
10342 if_false_label, if_true_label);
10345 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10346 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10348 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10349 && !can_compare_p (TYPE_MODE (inner_type)))
10350 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10351 else
10352 comparison = compare (exp, EQ, EQ);
10353 break;
10356 case NE_EXPR:
10358 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10360 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10361 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10363 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10364 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10365 do_jump
10366 (fold
10367 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10368 fold (build (NE_EXPR, TREE_TYPE (exp),
10369 fold (build1 (REALPART_EXPR,
10370 TREE_TYPE (inner_type),
10371 exp0)),
10372 fold (build1 (REALPART_EXPR,
10373 TREE_TYPE (inner_type),
10374 exp1)))),
10375 fold (build (NE_EXPR, TREE_TYPE (exp),
10376 fold (build1 (IMAGPART_EXPR,
10377 TREE_TYPE (inner_type),
10378 exp0)),
10379 fold (build1 (IMAGPART_EXPR,
10380 TREE_TYPE (inner_type),
10381 exp1)))))),
10382 if_false_label, if_true_label);
10385 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10386 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10388 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10389 && !can_compare_p (TYPE_MODE (inner_type)))
10390 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10391 else
10392 comparison = compare (exp, NE, NE);
10393 break;
10396 case LT_EXPR:
10397 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10398 == MODE_INT)
10399 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10400 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10401 else
10402 comparison = compare (exp, LT, LTU);
10403 break;
10405 case LE_EXPR:
10406 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10407 == MODE_INT)
10408 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10409 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10410 else
10411 comparison = compare (exp, LE, LEU);
10412 break;
10414 case GT_EXPR:
10415 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10416 == MODE_INT)
10417 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10418 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10419 else
10420 comparison = compare (exp, GT, GTU);
10421 break;
10423 case GE_EXPR:
10424 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10425 == MODE_INT)
10426 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10427 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10428 else
10429 comparison = compare (exp, GE, GEU);
10430 break;
10432 default:
10433 normal:
10434 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10435 #if 0
10436 /* This is not needed any more and causes poor code since it causes
10437 comparisons and tests from non-SI objects to have different code
10438 sequences. */
10439 /* Copy to register to avoid generating bad insns by cse
10440 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10441 if (!cse_not_expected && GET_CODE (temp) == MEM)
10442 temp = copy_to_reg (temp);
10443 #endif
10444 do_pending_stack_adjust ();
10445 if (GET_CODE (temp) == CONST_INT)
10446 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10447 else if (GET_CODE (temp) == LABEL_REF)
10448 comparison = const_true_rtx;
10449 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10450 && !can_compare_p (GET_MODE (temp)))
10451 /* Note swapping the labels gives us not-equal. */
10452 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10453 else if (GET_MODE (temp) != VOIDmode)
10454 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10455 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10456 GET_MODE (temp), NULL_RTX, 0);
10457 else
10458 abort ();
10461 /* Do any postincrements in the expression that was tested. */
10462 emit_queue ();
10464 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10465 straight into a conditional jump instruction as the jump condition.
10466 Otherwise, all the work has been done already. */
10468 if (comparison == const_true_rtx)
10470 if (if_true_label)
10471 emit_jump (if_true_label);
10473 else if (comparison == const0_rtx)
10475 if (if_false_label)
10476 emit_jump (if_false_label);
10478 else if (comparison)
10479 do_jump_for_compare (comparison, if_false_label, if_true_label);
10481 if (drop_through_label)
10483 /* If do_jump produces code that might be jumped around,
10484 do any stack adjusts from that code, before the place
10485 where control merges in. */
10486 do_pending_stack_adjust ();
10487 emit_label (drop_through_label);
10491 /* Given a comparison expression EXP for values too wide to be compared
10492 with one insn, test the comparison and jump to the appropriate label.
10493 The code of EXP is ignored; we always test GT if SWAP is 0,
10494 and LT if SWAP is 1. */
10496 static void
10497 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10498 tree exp;
10499 int swap;
10500 rtx if_false_label, if_true_label;
10502 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10503 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10504 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10505 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10506 rtx drop_through_label = 0;
10507 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10508 int i;
10510 if (! if_true_label || ! if_false_label)
10511 drop_through_label = gen_label_rtx ();
10512 if (! if_true_label)
10513 if_true_label = drop_through_label;
10514 if (! if_false_label)
10515 if_false_label = drop_through_label;
10517 /* Compare a word at a time, high order first. */
10518 for (i = 0; i < nwords; i++)
10520 rtx comp;
10521 rtx op0_word, op1_word;
10523 if (WORDS_BIG_ENDIAN)
10525 op0_word = operand_subword_force (op0, i, mode);
10526 op1_word = operand_subword_force (op1, i, mode);
10528 else
10530 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10531 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10534 /* All but high-order word must be compared as unsigned. */
10535 comp = compare_from_rtx (op0_word, op1_word,
10536 (unsignedp || i > 0) ? GTU : GT,
10537 unsignedp, word_mode, NULL_RTX, 0);
10538 if (comp == const_true_rtx)
10539 emit_jump (if_true_label);
10540 else if (comp != const0_rtx)
10541 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10543 /* Consider lower words only if these are equal. */
10544 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10545 NULL_RTX, 0);
10546 if (comp == const_true_rtx)
10547 emit_jump (if_false_label);
10548 else if (comp != const0_rtx)
10549 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10552 if (if_false_label)
10553 emit_jump (if_false_label);
10554 if (drop_through_label)
10555 emit_label (drop_through_label);
10558 /* Compare OP0 with OP1, word at a time, in mode MODE.
10559 UNSIGNEDP says to do unsigned comparison.
10560 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10562 void
10563 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10564 enum machine_mode mode;
10565 int unsignedp;
10566 rtx op0, op1;
10567 rtx if_false_label, if_true_label;
10569 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10570 rtx drop_through_label = 0;
10571 int i;
10573 if (! if_true_label || ! if_false_label)
10574 drop_through_label = gen_label_rtx ();
10575 if (! if_true_label)
10576 if_true_label = drop_through_label;
10577 if (! if_false_label)
10578 if_false_label = drop_through_label;
10580 /* Compare a word at a time, high order first. */
10581 for (i = 0; i < nwords; i++)
10583 rtx comp;
10584 rtx op0_word, op1_word;
10586 if (WORDS_BIG_ENDIAN)
10588 op0_word = operand_subword_force (op0, i, mode);
10589 op1_word = operand_subword_force (op1, i, mode);
10591 else
10593 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10594 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10597 /* All but high-order word must be compared as unsigned. */
10598 comp = compare_from_rtx (op0_word, op1_word,
10599 (unsignedp || i > 0) ? GTU : GT,
10600 unsignedp, word_mode, NULL_RTX, 0);
10601 if (comp == const_true_rtx)
10602 emit_jump (if_true_label);
10603 else if (comp != const0_rtx)
10604 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10606 /* Consider lower words only if these are equal. */
10607 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10608 NULL_RTX, 0);
10609 if (comp == const_true_rtx)
10610 emit_jump (if_false_label);
10611 else if (comp != const0_rtx)
10612 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10615 if (if_false_label)
10616 emit_jump (if_false_label);
10617 if (drop_through_label)
10618 emit_label (drop_through_label);
10621 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10622 with one insn, test the comparison and jump to the appropriate label. */
10624 static void
10625 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10626 tree exp;
10627 rtx if_false_label, if_true_label;
10629 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10630 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10631 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10632 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10633 int i;
10634 rtx drop_through_label = 0;
10636 if (! if_false_label)
10637 drop_through_label = if_false_label = gen_label_rtx ();
10639 for (i = 0; i < nwords; i++)
10641 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10642 operand_subword_force (op1, i, mode),
10643 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10644 word_mode, NULL_RTX, 0);
10645 if (comp == const_true_rtx)
10646 emit_jump (if_false_label);
10647 else if (comp != const0_rtx)
10648 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10651 if (if_true_label)
10652 emit_jump (if_true_label);
10653 if (drop_through_label)
10654 emit_label (drop_through_label);
10657 /* Jump according to whether OP0 is 0.
10658 We assume that OP0 has an integer mode that is too wide
10659 for the available compare insns. */
10661 void
10662 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10663 rtx op0;
10664 rtx if_false_label, if_true_label;
10666 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10667 rtx part;
10668 int i;
10669 rtx drop_through_label = 0;
10671 /* The fastest way of doing this comparison on almost any machine is to
10672 "or" all the words and compare the result. If all have to be loaded
10673 from memory and this is a very wide item, it's possible this may
10674 be slower, but that's highly unlikely. */
10676 part = gen_reg_rtx (word_mode);
10677 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10678 for (i = 1; i < nwords && part != 0; i++)
10679 part = expand_binop (word_mode, ior_optab, part,
10680 operand_subword_force (op0, i, GET_MODE (op0)),
10681 part, 1, OPTAB_WIDEN);
10683 if (part != 0)
10685 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10686 NULL_RTX, 0);
10688 if (comp == const_true_rtx)
10689 emit_jump (if_false_label);
10690 else if (comp == const0_rtx)
10691 emit_jump (if_true_label);
10692 else
10693 do_jump_for_compare (comp, if_false_label, if_true_label);
10695 return;
10698 /* If we couldn't do the "or" simply, do this with a series of compares. */
10699 if (! if_false_label)
10700 drop_through_label = if_false_label = gen_label_rtx ();
10702 for (i = 0; i < nwords; i++)
10704 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10705 GET_MODE (op0)),
10706 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10707 if (comp == const_true_rtx)
10708 emit_jump (if_false_label);
10709 else if (comp != const0_rtx)
10710 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10713 if (if_true_label)
10714 emit_jump (if_true_label);
10716 if (drop_through_label)
10717 emit_label (drop_through_label);
10720 /* Given a comparison expression in rtl form, output conditional branches to
10721 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10723 static void
10724 do_jump_for_compare (comparison, if_false_label, if_true_label)
10725 rtx comparison, if_false_label, if_true_label;
10727 if (if_true_label)
10729 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10730 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10731 else
10732 abort ();
10734 if (if_false_label)
10735 emit_jump (if_false_label);
10737 else if (if_false_label)
10739 rtx insn;
10740 rtx prev = get_last_insn ();
10741 rtx branch = 0;
10743 /* Output the branch with the opposite condition. Then try to invert
10744 what is generated. If more than one insn is a branch, or if the
10745 branch is not the last insn written, abort. If we can't invert
10746 the branch, emit make a true label, redirect this jump to that,
10747 emit a jump to the false label and define the true label. */
10749 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10750 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10751 else
10752 abort ();
10754 /* Here we get the first insn that was just emitted. It used to be the
10755 case that, on some machines, emitting the branch would discard
10756 the previous compare insn and emit a replacement. This isn't
10757 done anymore, but abort if we see that PREV is deleted. */
10759 if (prev == 0)
10760 insn = get_insns ();
10761 else if (INSN_DELETED_P (prev))
10762 abort ();
10763 else
10764 insn = NEXT_INSN (prev);
10766 for (; insn; insn = NEXT_INSN (insn))
10767 if (GET_CODE (insn) == JUMP_INSN)
10769 if (branch)
10770 abort ();
10771 branch = insn;
10774 if (branch != get_last_insn ())
10775 abort ();
10777 JUMP_LABEL (branch) = if_false_label;
10778 if (! invert_jump (branch, if_false_label))
10780 if_true_label = gen_label_rtx ();
10781 redirect_jump (branch, if_true_label);
10782 emit_jump (if_false_label);
10783 emit_label (if_true_label);
10788 /* Generate code for a comparison expression EXP
10789 (including code to compute the values to be compared)
10790 and set (CC0) according to the result.
10791 SIGNED_CODE should be the rtx operation for this comparison for
10792 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10794 We force a stack adjustment unless there are currently
10795 things pushed on the stack that aren't yet used. */
10797 static rtx
10798 compare (exp, signed_code, unsigned_code)
10799 register tree exp;
10800 enum rtx_code signed_code, unsigned_code;
10802 register rtx op0
10803 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10804 register rtx op1
10805 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10806 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10807 register enum machine_mode mode = TYPE_MODE (type);
10808 int unsignedp = TREE_UNSIGNED (type);
10809 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10811 #ifdef HAVE_canonicalize_funcptr_for_compare
10812 /* If function pointers need to be "canonicalized" before they can
10813 be reliably compared, then canonicalize them. */
10814 if (HAVE_canonicalize_funcptr_for_compare
10815 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10816 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10817 == FUNCTION_TYPE))
10819 rtx new_op0 = gen_reg_rtx (mode);
10821 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10822 op0 = new_op0;
10825 if (HAVE_canonicalize_funcptr_for_compare
10826 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10827 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10828 == FUNCTION_TYPE))
10830 rtx new_op1 = gen_reg_rtx (mode);
10832 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10833 op1 = new_op1;
10835 #endif
10837 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10838 ((mode == BLKmode)
10839 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10840 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10843 /* Like compare but expects the values to compare as two rtx's.
10844 The decision as to signed or unsigned comparison must be made by the caller.
10846 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10847 compared.
10849 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10850 size of MODE should be used. */
10853 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10854 register rtx op0, op1;
10855 enum rtx_code code;
10856 int unsignedp;
10857 enum machine_mode mode;
10858 rtx size;
10859 int align;
10861 rtx tem;
10863 /* If one operand is constant, make it the second one. Only do this
10864 if the other operand is not constant as well. */
10866 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10867 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10869 tem = op0;
10870 op0 = op1;
10871 op1 = tem;
10872 code = swap_condition (code);
10875 if (flag_force_mem)
10877 op0 = force_not_mem (op0);
10878 op1 = force_not_mem (op1);
10881 do_pending_stack_adjust ();
10883 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10884 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10885 return tem;
10887 #if 0
10888 /* There's no need to do this now that combine.c can eliminate lots of
10889 sign extensions. This can be less efficient in certain cases on other
10890 machines. */
10892 /* If this is a signed equality comparison, we can do it as an
10893 unsigned comparison since zero-extension is cheaper than sign
10894 extension and comparisons with zero are done as unsigned. This is
10895 the case even on machines that can do fast sign extension, since
10896 zero-extension is easier to combine with other operations than
10897 sign-extension is. If we are comparing against a constant, we must
10898 convert it to what it would look like unsigned. */
10899 if ((code == EQ || code == NE) && ! unsignedp
10900 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10902 if (GET_CODE (op1) == CONST_INT
10903 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10904 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10905 unsignedp = 1;
10907 #endif
10909 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10911 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10914 /* Generate code to calculate EXP using a store-flag instruction
10915 and return an rtx for the result. EXP is either a comparison
10916 or a TRUTH_NOT_EXPR whose operand is a comparison.
10918 If TARGET is nonzero, store the result there if convenient.
10920 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10921 cheap.
10923 Return zero if there is no suitable set-flag instruction
10924 available on this machine.
10926 Once expand_expr has been called on the arguments of the comparison,
10927 we are committed to doing the store flag, since it is not safe to
10928 re-evaluate the expression. We emit the store-flag insn by calling
10929 emit_store_flag, but only expand the arguments if we have a reason
10930 to believe that emit_store_flag will be successful. If we think that
10931 it will, but it isn't, we have to simulate the store-flag with a
10932 set/jump/set sequence. */
10934 static rtx
10935 do_store_flag (exp, target, mode, only_cheap)
10936 tree exp;
10937 rtx target;
10938 enum machine_mode mode;
10939 int only_cheap;
10941 enum rtx_code code;
10942 tree arg0, arg1, type;
10943 tree tem;
10944 enum machine_mode operand_mode;
10945 int invert = 0;
10946 int unsignedp;
10947 rtx op0, op1;
10948 enum insn_code icode;
10949 rtx subtarget = target;
10950 rtx result, label;
10952 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10953 result at the end. We can't simply invert the test since it would
10954 have already been inverted if it were valid. This case occurs for
10955 some floating-point comparisons. */
10957 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10958 invert = 1, exp = TREE_OPERAND (exp, 0);
10960 arg0 = TREE_OPERAND (exp, 0);
10961 arg1 = TREE_OPERAND (exp, 1);
10962 type = TREE_TYPE (arg0);
10963 operand_mode = TYPE_MODE (type);
10964 unsignedp = TREE_UNSIGNED (type);
10966 /* We won't bother with BLKmode store-flag operations because it would mean
10967 passing a lot of information to emit_store_flag. */
10968 if (operand_mode == BLKmode)
10969 return 0;
10971 /* We won't bother with store-flag operations involving function pointers
10972 when function pointers must be canonicalized before comparisons. */
10973 #ifdef HAVE_canonicalize_funcptr_for_compare
10974 if (HAVE_canonicalize_funcptr_for_compare
10975 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10976 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10977 == FUNCTION_TYPE))
10978 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10979 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10980 == FUNCTION_TYPE))))
10981 return 0;
10982 #endif
10984 STRIP_NOPS (arg0);
10985 STRIP_NOPS (arg1);
10987 /* Get the rtx comparison code to use. We know that EXP is a comparison
10988 operation of some type. Some comparisons against 1 and -1 can be
10989 converted to comparisons with zero. Do so here so that the tests
10990 below will be aware that we have a comparison with zero. These
10991 tests will not catch constants in the first operand, but constants
10992 are rarely passed as the first operand. */
10994 switch (TREE_CODE (exp))
10996 case EQ_EXPR:
10997 code = EQ;
10998 break;
10999 case NE_EXPR:
11000 code = NE;
11001 break;
11002 case LT_EXPR:
11003 if (integer_onep (arg1))
11004 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11005 else
11006 code = unsignedp ? LTU : LT;
11007 break;
11008 case LE_EXPR:
11009 if (! unsignedp && integer_all_onesp (arg1))
11010 arg1 = integer_zero_node, code = LT;
11011 else
11012 code = unsignedp ? LEU : LE;
11013 break;
11014 case GT_EXPR:
11015 if (! unsignedp && integer_all_onesp (arg1))
11016 arg1 = integer_zero_node, code = GE;
11017 else
11018 code = unsignedp ? GTU : GT;
11019 break;
11020 case GE_EXPR:
11021 if (integer_onep (arg1))
11022 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11023 else
11024 code = unsignedp ? GEU : GE;
11025 break;
11026 default:
11027 abort ();
11030 /* Put a constant second. */
11031 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11033 tem = arg0; arg0 = arg1; arg1 = tem;
11034 code = swap_condition (code);
11037 /* If this is an equality or inequality test of a single bit, we can
11038 do this by shifting the bit being tested to the low-order bit and
11039 masking the result with the constant 1. If the condition was EQ,
11040 we xor it with 1. This does not require an scc insn and is faster
11041 than an scc insn even if we have it. */
11043 if ((code == NE || code == EQ)
11044 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11045 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11047 tree inner = TREE_OPERAND (arg0, 0);
11048 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11049 int ops_unsignedp;
11051 /* If INNER is a right shift of a constant and it plus BITNUM does
11052 not overflow, adjust BITNUM and INNER. */
11054 if (TREE_CODE (inner) == RSHIFT_EXPR
11055 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11056 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11057 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11058 < TYPE_PRECISION (type)))
11060 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11061 inner = TREE_OPERAND (inner, 0);
11064 /* If we are going to be able to omit the AND below, we must do our
11065 operations as unsigned. If we must use the AND, we have a choice.
11066 Normally unsigned is faster, but for some machines signed is. */
11067 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11068 #ifdef LOAD_EXTEND_OP
11069 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11070 #else
11072 #endif
11075 if (subtarget == 0 || GET_CODE (subtarget) != REG
11076 || GET_MODE (subtarget) != operand_mode
11077 || ! safe_from_p (subtarget, inner, 1))
11078 subtarget = 0;
11080 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11082 if (bitnum != 0)
11083 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11084 size_int (bitnum), subtarget, ops_unsignedp);
11086 if (GET_MODE (op0) != mode)
11087 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11089 if ((code == EQ && ! invert) || (code == NE && invert))
11090 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11091 ops_unsignedp, OPTAB_LIB_WIDEN);
11093 /* Put the AND last so it can combine with more things. */
11094 if (bitnum != TYPE_PRECISION (type) - 1)
11095 op0 = expand_and (op0, const1_rtx, subtarget);
11097 return op0;
11100 /* Now see if we are likely to be able to do this. Return if not. */
11101 if (! can_compare_p (operand_mode))
11102 return 0;
11103 icode = setcc_gen_code[(int) code];
11104 if (icode == CODE_FOR_nothing
11105 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11107 /* We can only do this if it is one of the special cases that
11108 can be handled without an scc insn. */
11109 if ((code == LT && integer_zerop (arg1))
11110 || (! only_cheap && code == GE && integer_zerop (arg1)))
11112 else if (BRANCH_COST >= 0
11113 && ! only_cheap && (code == NE || code == EQ)
11114 && TREE_CODE (type) != REAL_TYPE
11115 && ((abs_optab->handlers[(int) operand_mode].insn_code
11116 != CODE_FOR_nothing)
11117 || (ffs_optab->handlers[(int) operand_mode].insn_code
11118 != CODE_FOR_nothing)))
11120 else
11121 return 0;
11124 preexpand_calls (exp);
11125 if (subtarget == 0 || GET_CODE (subtarget) != REG
11126 || GET_MODE (subtarget) != operand_mode
11127 || ! safe_from_p (subtarget, arg1, 1))
11128 subtarget = 0;
11130 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11131 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11133 if (target == 0)
11134 target = gen_reg_rtx (mode);
11136 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11137 because, if the emit_store_flag does anything it will succeed and
11138 OP0 and OP1 will not be used subsequently. */
11140 result = emit_store_flag (target, code,
11141 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11142 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11143 operand_mode, unsignedp, 1);
11145 if (result)
11147 if (invert)
11148 result = expand_binop (mode, xor_optab, result, const1_rtx,
11149 result, 0, OPTAB_LIB_WIDEN);
11150 return result;
11153 /* If this failed, we have to do this with set/compare/jump/set code. */
11154 if (GET_CODE (target) != REG
11155 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11156 target = gen_reg_rtx (GET_MODE (target));
11158 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11159 result = compare_from_rtx (op0, op1, code, unsignedp,
11160 operand_mode, NULL_RTX, 0);
11161 if (GET_CODE (result) == CONST_INT)
11162 return (((result == const0_rtx && ! invert)
11163 || (result != const0_rtx && invert))
11164 ? const0_rtx : const1_rtx);
11166 label = gen_label_rtx ();
11167 if (bcc_gen_fctn[(int) code] == 0)
11168 abort ();
11170 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11171 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11172 emit_label (label);
11174 return target;
11177 /* Generate a tablejump instruction (used for switch statements). */
11179 #ifdef HAVE_tablejump
11181 /* INDEX is the value being switched on, with the lowest value
11182 in the table already subtracted.
11183 MODE is its expected mode (needed if INDEX is constant).
11184 RANGE is the length of the jump table.
11185 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11187 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11188 index value is out of range. */
11190 void
11191 do_tablejump (index, mode, range, table_label, default_label)
11192 rtx index, range, table_label, default_label;
11193 enum machine_mode mode;
11195 register rtx temp, vector;
11197 /* Do an unsigned comparison (in the proper mode) between the index
11198 expression and the value which represents the length of the range.
11199 Since we just finished subtracting the lower bound of the range
11200 from the index expression, this comparison allows us to simultaneously
11201 check that the original index expression value is both greater than
11202 or equal to the minimum value of the range and less than or equal to
11203 the maximum value of the range. */
11205 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11206 emit_jump_insn (gen_bgtu (default_label));
11208 /* If index is in range, it must fit in Pmode.
11209 Convert to Pmode so we can index with it. */
11210 if (mode != Pmode)
11211 index = convert_to_mode (Pmode, index, 1);
11213 /* Don't let a MEM slip thru, because then INDEX that comes
11214 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11215 and break_out_memory_refs will go to work on it and mess it up. */
11216 #ifdef PIC_CASE_VECTOR_ADDRESS
11217 if (flag_pic && GET_CODE (index) != REG)
11218 index = copy_to_mode_reg (Pmode, index);
11219 #endif
11221 /* If flag_force_addr were to affect this address
11222 it could interfere with the tricky assumptions made
11223 about addresses that contain label-refs,
11224 which may be valid only very near the tablejump itself. */
11225 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11226 GET_MODE_SIZE, because this indicates how large insns are. The other
11227 uses should all be Pmode, because they are addresses. This code
11228 could fail if addresses and insns are not the same size. */
11229 index = gen_rtx_PLUS (Pmode,
11230 gen_rtx_MULT (Pmode, index,
11231 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11232 gen_rtx_LABEL_REF (Pmode, table_label));
11233 #ifdef PIC_CASE_VECTOR_ADDRESS
11234 if (flag_pic)
11235 index = PIC_CASE_VECTOR_ADDRESS (index);
11236 else
11237 #endif
11238 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11239 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11240 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11241 RTX_UNCHANGING_P (vector) = 1;
11242 convert_move (temp, vector, 0);
11244 emit_jump_insn (gen_tablejump (temp, table_label));
11246 /* If we are generating PIC code or if the table is PC-relative, the
11247 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11248 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11249 emit_barrier ();
11252 #endif /* HAVE_tablejump */