Initial revision
[official-gcc.git] / gcc / expr.c
blobce4c426169491583ff37ffe9c4cd5e03234e652a
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 #include "config.h"
22 #include "machmode.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "obstack.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "insn-flags.h"
29 #include "insn-codes.h"
30 #include "expr.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "output.h"
34 #include "typeclass.h"
36 #include "bytecode.h"
37 #include "bc-opcode.h"
38 #include "bc-typecd.h"
39 #include "bc-optab.h"
40 #include "bc-emit.h"
43 #define CEIL(x,y) (((x) + (y) - 1) / (y))
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
51 #ifdef PUSH_ROUNDING
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first */
55 #endif
57 #endif
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
62 #else
63 #define STACK_PUSH_CODE PRE_INC
64 #endif
65 #endif
67 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
68 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70 /* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
76 int cse_not_expected;
78 /* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81 int do_preexpand_calls = 1;
83 /* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85 int pending_stack_adjust;
87 /* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91 int inhibit_defer_pop;
93 /* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95 tree cleanups_this_call;
97 /* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
99 returned. */
100 static rtx saveregs_value;
102 /* Similarly for __builtin_apply_args. */
103 static rtx apply_args_value;
105 /* This structure is used by move_by_pieces to describe the move to
106 be performed. */
108 struct move_by_pieces
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 rtx from;
115 rtx from_addr;
116 int autinc_from;
117 int explicit_inc_from;
118 int len;
119 int offset;
120 int reverse;
123 /* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
127 int local_vars_size;
128 extern int stack_depth;
129 extern int max_stack_depth;
130 extern struct obstack permanent_obstack;
133 static rtx enqueue_insn PROTO((rtx, rtx));
134 static int queued_subexp_p PROTO((rtx));
135 static void init_queue PROTO((void));
136 static void move_by_pieces PROTO((rtx, rtx, int, int));
137 static int move_by_pieces_ninsns PROTO((unsigned int, int));
138 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
140 static void group_insns PROTO((rtx));
141 static void store_constructor PROTO((tree, rtx));
142 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
143 enum machine_mode, int, int, int));
144 static tree save_noncopied_parts PROTO((tree, tree));
145 static tree init_noncopied_parts PROTO((tree, tree));
146 static int safe_from_p PROTO((rtx, tree));
147 static int fixed_type_p PROTO((tree));
148 static int get_pointer_alignment PROTO((tree, unsigned));
149 static tree string_constant PROTO((tree, tree *));
150 static tree c_strlen PROTO((tree));
151 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
152 static int apply_args_size PROTO((void));
153 static int apply_result_size PROTO((void));
154 static rtx result_vector PROTO((int, rtx));
155 static rtx expand_builtin_apply_args PROTO((void));
156 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157 static void expand_builtin_return PROTO((rtx));
158 static rtx expand_increment PROTO((tree, int));
159 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160 tree bc_runtime_type_code PROTO((tree));
161 rtx bc_allocate_local PROTO((int, int));
162 void bc_store_memory PROTO((tree, tree));
163 tree bc_expand_component_address PROTO((tree));
164 tree bc_expand_address PROTO((tree));
165 void bc_expand_constructor PROTO((tree));
166 void bc_adjust_stack PROTO((int));
167 tree bc_canonicalize_array_ref PROTO((tree));
168 void bc_load_memory PROTO((tree, tree));
169 void bc_load_externaddr PROTO((rtx));
170 void bc_load_externaddr_id PROTO((tree, int));
171 void bc_load_localaddr PROTO((rtx));
172 void bc_load_parmaddr PROTO((rtx));
173 static void preexpand_calls PROTO((tree));
174 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
175 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
176 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
182 /* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
186 static char direct_load[NUM_MACHINE_MODES];
187 static char direct_store[NUM_MACHINE_MODES];
189 /* MOVE_RATIO is the number of move instructions that is better than
190 a block move. */
192 #ifndef MOVE_RATIO
193 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
194 #define MOVE_RATIO 2
195 #else
196 /* A value of around 6 would minimize code size; infinity would minimize
197 execution time. */
198 #define MOVE_RATIO 15
199 #endif
200 #endif
202 /* This array records the insn_code of insns to perform block moves. */
203 enum insn_code movstr_optab[NUM_MACHINE_MODES];
205 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
207 #ifndef SLOW_UNALIGNED_ACCESS
208 #define SLOW_UNALIGNED_ACCESS 0
209 #endif
211 /* Register mappings for target machines without register windows. */
212 #ifndef INCOMING_REGNO
213 #define INCOMING_REGNO(OUT) (OUT)
214 #endif
215 #ifndef OUTGOING_REGNO
216 #define OUTGOING_REGNO(IN) (IN)
217 #endif
219 /* Maps used to convert modes to const, load, and store bytecodes. */
220 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
224 /* Initialize maps used to convert modes to const, load, and store
225 bytecodes. */
226 void
227 bc_init_mode_to_opcode_maps ()
229 int mode;
231 for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
236 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
237 mode_to_const_map[(enum machine_mode) SYM] = CONST; \
238 mode_to_load_map[(enum machine_mode) SYM] = LOAD; \
239 mode_to_store_map[(enum machine_mode) SYM] = STORE;
241 #include "modemap.def"
242 #undef DEF_MODEMAP
245 /* This is run once per compilation to set up which modes can be used
246 directly in memory and to initialize the block move optab. */
248 void
249 init_expr_once ()
251 rtx insn, pat;
252 enum machine_mode mode;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
259 start_sequence ();
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
266 int regno;
267 rtx reg;
268 int num_clobbers;
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
280 regno++)
282 if (! HARD_REGNO_MODE_OK (regno, mode))
283 continue;
285 reg = gen_rtx (REG, mode, regno);
287 SET_SRC (pat) = mem;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
302 SET_SRC (pat) = reg;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
309 end_sequence ();
312 /* This is run at the start of compiling a function. */
314 void
315 init_expr ()
317 init_queue ();
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
322 saveregs_value = 0;
323 apply_args_value = 0;
324 forced_labels = 0;
327 /* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
330 void
331 save_expr_status (p)
332 struct function *p;
334 /* Instead of saving the postincrement queue, empty it. */
335 emit_queue ();
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
341 p->apply_args_value = apply_args_value;
342 p->forced_labels = forced_labels;
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
347 saveregs_value = 0;
348 apply_args_value = 0;
349 forced_labels = 0;
352 /* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
355 void
356 restore_expr_status (p)
357 struct function *p;
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
363 apply_args_value = p->apply_args_value;
364 forced_labels = p->forced_labels;
367 /* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
370 static rtx pending_chain;
372 /* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
379 static rtx
380 enqueue_insn (var, body)
381 rtx var, body;
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
384 var, NULL_RTX, NULL_RTX, body, pending_chain);
385 return pending_chain;
388 /* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
404 protect_from_queue (x, modify)
405 register rtx x;
406 int modify;
408 register RTX_CODE code = GET_CODE (x);
410 #if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
413 return x;
414 #endif
416 if (code != QUEUED)
418 /* A special hack for read access to (MEM (QUEUED ...))
419 to facilitate use of autoincrement.
420 Make a copy of the contents of the memory location
421 rather than a copy of the address, but not
422 if the value is of mode BLKmode. */
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 register rtx y = XEXP (x, 0);
427 XEXP (x, 0) = QUEUED_VAR (y);
428 if (QUEUED_INSN (y))
430 register rtx temp = gen_reg_rtx (GET_MODE (x));
431 emit_insn_before (gen_move_insn (temp, x),
432 QUEUED_INSN (y));
433 return temp;
435 return x;
437 /* Otherwise, recursively protect the subexpressions of all
438 the kinds of rtx's that can contain a QUEUED. */
439 if (code == MEM)
441 rtx tem = protect_from_queue (XEXP (x, 0), 0);
442 if (tem != XEXP (x, 0))
444 x = copy_rtx (x);
445 XEXP (x, 0) = tem;
448 else if (code == PLUS || code == MULT)
450 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
451 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
452 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
454 x = copy_rtx (x);
455 XEXP (x, 0) = new0;
456 XEXP (x, 1) = new1;
459 return x;
461 /* If the increment has not happened, use the variable itself. */
462 if (QUEUED_INSN (x) == 0)
463 return QUEUED_VAR (x);
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
481 static int
482 queued_subexp_p (x)
483 rtx x;
485 register enum rtx_code code = GET_CODE (x);
486 switch (code)
488 case QUEUED:
489 return 1;
490 case MEM:
491 return queued_subexp_p (XEXP (x, 0));
492 case MULT:
493 case PLUS:
494 case MINUS:
495 return queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1));
498 return 0;
501 /* Perform all the pending incrementations. */
503 void
504 emit_queue ()
506 register rtx p;
507 while (p = pending_chain)
509 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
510 pending_chain = QUEUED_NEXT (p);
514 static void
515 init_queue ()
517 if (pending_chain)
518 abort ();
521 /* Copy data from FROM to TO, where the machine modes are not the same.
522 Both modes may be integer, or both may be floating.
523 UNSIGNEDP should be nonzero if FROM is an unsigned type.
524 This causes zero-extension instead of sign-extension. */
526 void
527 convert_move (to, from, unsignedp)
528 register rtx to, from;
529 int unsignedp;
531 enum machine_mode to_mode = GET_MODE (to);
532 enum machine_mode from_mode = GET_MODE (from);
533 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
534 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
535 enum insn_code code;
536 rtx libcall;
538 /* rtx code for making an equivalent value. */
539 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
541 to = protect_from_queue (to, 1);
542 from = protect_from_queue (from, 0);
544 if (to_real != from_real)
545 abort ();
547 /* If FROM is a SUBREG that indicates that we have already done at least
548 the required extension, strip it. We don't handle such SUBREGs as
549 TO here. */
551 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
552 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
553 >= GET_MODE_SIZE (to_mode))
554 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
555 from = gen_lowpart (to_mode, from), from_mode = to_mode;
557 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
558 abort ();
560 if (to_mode == from_mode
561 || (from_mode == VOIDmode && CONSTANT_P (from)))
563 emit_move_insn (to, from);
564 return;
567 if (to_real)
569 rtx value;
571 #ifdef HAVE_extendqfhf2
572 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
574 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
575 return;
577 #endif
578 #ifdef HAVE_extendqfsf2
579 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
581 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
582 return;
584 #endif
585 #ifdef HAVE_extendqfdf2
586 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
588 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
589 return;
591 #endif
592 #ifdef HAVE_extendqfxf2
593 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
595 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
596 return;
598 #endif
599 #ifdef HAVE_extendqftf2
600 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
602 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
603 return;
605 #endif
607 #ifdef HAVE_extendhfsf2
608 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
610 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
611 return;
613 #endif
614 #ifdef HAVE_extendhfdf2
615 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
617 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
618 return;
620 #endif
621 #ifdef HAVE_extendhfxf2
622 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
624 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
625 return;
627 #endif
628 #ifdef HAVE_extendhftf2
629 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
631 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
632 return;
634 #endif
636 #ifdef HAVE_extendsfdf2
637 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
639 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
640 return;
642 #endif
643 #ifdef HAVE_extendsfxf2
644 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
646 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
647 return;
649 #endif
650 #ifdef HAVE_extendsftf2
651 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
653 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
654 return;
656 #endif
657 #ifdef HAVE_extenddfxf2
658 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
660 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
661 return;
663 #endif
664 #ifdef HAVE_extenddftf2
665 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
667 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
668 return;
670 #endif
672 #ifdef HAVE_trunchfqf2
673 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
675 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
676 return;
678 #endif
679 #ifdef HAVE_truncsfqf2
680 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
682 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
683 return;
685 #endif
686 #ifdef HAVE_truncdfqf2
687 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
689 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
690 return;
692 #endif
693 #ifdef HAVE_truncxfqf2
694 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
696 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
697 return;
699 #endif
700 #ifdef HAVE_trunctfqf2
701 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
703 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
704 return;
706 #endif
707 #ifdef HAVE_truncsfhf2
708 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
710 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
711 return;
713 #endif
714 #ifdef HAVE_truncdfhf2
715 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
717 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
718 return;
720 #endif
721 #ifdef HAVE_truncxfhf2
722 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
724 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
725 return;
727 #endif
728 #ifdef HAVE_trunctfhf2
729 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
731 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
732 return;
734 #endif
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
741 #endif
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
748 #endif
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
755 #endif
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
762 #endif
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
769 #endif
771 libcall = (rtx) 0;
772 switch (from_mode)
774 case SFmode:
775 switch (to_mode)
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
789 break;
791 case DFmode:
792 switch (to_mode)
794 case SFmode:
795 libcall = truncdfsf2_libfunc;
796 break;
798 case XFmode:
799 libcall = extenddfxf2_libfunc;
800 break;
802 case TFmode:
803 libcall = extenddftf2_libfunc;
804 break;
806 break;
808 case XFmode:
809 switch (to_mode)
811 case SFmode:
812 libcall = truncxfsf2_libfunc;
813 break;
815 case DFmode:
816 libcall = truncxfdf2_libfunc;
817 break;
819 break;
821 case TFmode:
822 switch (to_mode)
824 case SFmode:
825 libcall = trunctfsf2_libfunc;
826 break;
828 case DFmode:
829 libcall = trunctfdf2_libfunc;
830 break;
832 break;
835 if (libcall == (rtx) 0)
836 /* This conversion is not implemented yet. */
837 abort ();
839 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
840 1, from, from_mode);
841 emit_move_insn (to, value);
842 return;
845 /* Now both modes are integers. */
847 /* Handle expanding beyond a word. */
848 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
849 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
851 rtx insns;
852 rtx lowpart;
853 rtx fill_value;
854 rtx lowfrom;
855 int i;
856 enum machine_mode lowpart_mode;
857 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
859 /* Try converting directly if the insn is supported. */
860 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
861 != CODE_FOR_nothing)
863 /* If FROM is a SUBREG, put it into a register. Do this
864 so that we always generate the same set of insns for
865 better cse'ing; if an intermediate assignment occurred,
866 we won't be doing the operation directly on the SUBREG. */
867 if (optimize > 0 && GET_CODE (from) == SUBREG)
868 from = force_reg (from_mode, from);
869 emit_unop_insn (code, to, from, equiv_code);
870 return;
872 /* Next, try converting via full word. */
873 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
874 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
875 != CODE_FOR_nothing))
877 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
878 emit_unop_insn (code, to,
879 gen_lowpart (word_mode, to), equiv_code);
880 return;
883 /* No special multiword conversion insn; do it by hand. */
884 start_sequence ();
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
889 else
890 lowpart_mode = from_mode;
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
897 /* Compute the value to put in each remaining word. */
898 if (unsignedp)
899 fill_value = const0_rtx;
900 else
902 #ifdef HAVE_slt
903 if (HAVE_slt
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 lowpart_mode, 0, 0);
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
912 else
913 #endif
915 fill_value
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
918 NULL_RTX, 0);
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
929 if (subword == 0)
930 abort ();
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
936 insns = get_insns ();
937 end_sequence ();
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
941 return;
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
956 return;
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
965 #ifdef HAVE_truncsipsi
966 if (HAVE_truncsipsi)
968 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
969 return;
971 #endif /* HAVE_truncsipsi */
972 abort ();
975 if (from_mode == PSImode)
977 if (to_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
982 else
984 #ifdef HAVE_extendpsisi
985 if (HAVE_extendpsisi)
987 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
988 return;
990 #endif /* HAVE_extendpsisi */
991 abort ();
995 /* Now follow all the conversions between integers
996 no more than a word long. */
998 /* For truncation, usually we can just refer to FROM in a narrower mode. */
999 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1000 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1001 GET_MODE_BITSIZE (from_mode)))
1003 if (!((GET_CODE (from) == MEM
1004 && ! MEM_VOLATILE_P (from)
1005 && direct_load[(int) to_mode]
1006 && ! mode_dependent_address_p (XEXP (from, 0)))
1007 || GET_CODE (from) == REG
1008 || GET_CODE (from) == SUBREG))
1009 from = force_reg (from_mode, from);
1010 emit_move_insn (to, gen_lowpart (to_mode, from));
1011 return;
1014 /* Handle extension. */
1015 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1017 /* Convert directly if that works. */
1018 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1019 != CODE_FOR_nothing)
1021 /* If FROM is a SUBREG, put it into a register. Do this
1022 so that we always generate the same set of insns for
1023 better cse'ing; if an intermediate assignment occurred,
1024 we won't be doing the operation directly on the SUBREG. */
1025 if (optimize > 0 && GET_CODE (from) == SUBREG)
1026 from = force_reg (from_mode, from);
1027 emit_unop_insn (code, to, from, equiv_code);
1028 return;
1030 else
1032 enum machine_mode intermediate;
1034 /* Search for a mode to convert via. */
1035 for (intermediate = from_mode; intermediate != VOIDmode;
1036 intermediate = GET_MODE_WIDER_MODE (intermediate))
1037 if ((can_extend_p (to_mode, intermediate, unsignedp)
1038 != CODE_FOR_nothing)
1039 && (can_extend_p (intermediate, from_mode, unsignedp)
1040 != CODE_FOR_nothing))
1042 convert_move (to, convert_to_mode (intermediate, from,
1043 unsignedp), unsignedp);
1044 return;
1047 /* No suitable intermediate mode. */
1048 abort ();
1052 /* Support special truncate insns for certain modes. */
1054 if (from_mode == DImode && to_mode == SImode)
1056 #ifdef HAVE_truncdisi2
1057 if (HAVE_truncdisi2)
1059 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1060 return;
1062 #endif
1063 convert_move (to, force_reg (from_mode, from), unsignedp);
1064 return;
1067 if (from_mode == DImode && to_mode == HImode)
1069 #ifdef HAVE_truncdihi2
1070 if (HAVE_truncdihi2)
1072 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1073 return;
1075 #endif
1076 convert_move (to, force_reg (from_mode, from), unsignedp);
1077 return;
1080 if (from_mode == DImode && to_mode == QImode)
1082 #ifdef HAVE_truncdiqi2
1083 if (HAVE_truncdiqi2)
1085 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1086 return;
1088 #endif
1089 convert_move (to, force_reg (from_mode, from), unsignedp);
1090 return;
1093 if (from_mode == SImode && to_mode == HImode)
1095 #ifdef HAVE_truncsihi2
1096 if (HAVE_truncsihi2)
1098 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1099 return;
1101 #endif
1102 convert_move (to, force_reg (from_mode, from), unsignedp);
1103 return;
1106 if (from_mode == SImode && to_mode == QImode)
1108 #ifdef HAVE_truncsiqi2
1109 if (HAVE_truncsiqi2)
1111 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1112 return;
1114 #endif
1115 convert_move (to, force_reg (from_mode, from), unsignedp);
1116 return;
1119 if (from_mode == HImode && to_mode == QImode)
1121 #ifdef HAVE_trunchiqi2
1122 if (HAVE_trunchiqi2)
1124 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1125 return;
1127 #endif
1128 convert_move (to, force_reg (from_mode, from), unsignedp);
1129 return;
1132 /* Handle truncation of volatile memrefs, and so on;
1133 the things that couldn't be truncated directly,
1134 and for which there was no special instruction. */
1135 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1137 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1138 emit_move_insn (to, temp);
1139 return;
1142 /* Mode combination is not recognized. */
1143 abort ();
1146 /* Return an rtx for a value that would result
1147 from converting X to mode MODE.
1148 Both X and MODE may be floating, or both integer.
1149 UNSIGNEDP is nonzero if X is an unsigned value.
1150 This can be done by referring to a part of X in place
1151 or by copying to a new temporary with conversion.
1153 This function *must not* call protect_from_queue
1154 except when putting X into an insn (in which case convert_move does it). */
1157 convert_to_mode (mode, x, unsignedp)
1158 enum machine_mode mode;
1159 rtx x;
1160 int unsignedp;
1162 return convert_modes (mode, VOIDmode, x, unsignedp);
1165 /* Return an rtx for a value that would result
1166 from converting X from mode OLDMODE to mode MODE.
1167 Both modes may be floating, or both integer.
1168 UNSIGNEDP is nonzero if X is an unsigned value.
1170 This can be done by referring to a part of X in place
1171 or by copying to a new temporary with conversion.
1173 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1175 This function *must not* call protect_from_queue
1176 except when putting X into an insn (in which case convert_move does it). */
1179 convert_modes (mode, oldmode, x, unsignedp)
1180 enum machine_mode mode, oldmode;
1181 rtx x;
1182 int unsignedp;
1184 register rtx temp;
1186 if (GET_MODE (x) != VOIDmode)
1187 oldmode = GET_MODE (x);
1189 /* If FROM is a SUBREG that indicates that we have already done at least
1190 the required extension, strip it. */
1192 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1193 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1194 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1195 x = gen_lowpart (mode, x);
1197 if (mode == oldmode)
1198 return x;
1200 /* There is one case that we must handle specially: If we are converting
1201 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1202 we are to interpret the constant as unsigned, gen_lowpart will do
1203 the wrong if the constant appears negative. What we want to do is
1204 make the high-order word of the constant zero, not all ones. */
1206 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1207 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1208 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1209 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1211 /* We can do this with a gen_lowpart if both desired and current modes
1212 are integer, and this is either a constant integer, a register, or a
1213 non-volatile MEM. Except for the constant case, we must be narrowing
1214 the operand. */
1216 if (GET_CODE (x) == CONST_INT
1217 || (GET_MODE_CLASS (mode) == MODE_INT
1218 && GET_MODE_CLASS (oldmode) == MODE_INT
1219 && (GET_CODE (x) == CONST_DOUBLE
1220 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1221 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1222 && direct_load[(int) mode])
1223 || GET_CODE (x) == REG)))))
1224 return gen_lowpart (mode, x);
1226 temp = gen_reg_rtx (mode);
1227 convert_move (temp, x, unsignedp);
1228 return temp;
1231 /* Generate several move instructions to copy LEN bytes
1232 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1233 The caller must pass FROM and TO
1234 through protect_from_queue before calling.
1235 ALIGN (in bytes) is maximum alignment we can assume. */
1237 static void
1238 move_by_pieces (to, from, len, align)
1239 rtx to, from;
1240 int len, align;
1242 struct move_by_pieces data;
1243 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1244 int max_size = MOVE_MAX + 1;
1246 data.offset = 0;
1247 data.to_addr = to_addr;
1248 data.from_addr = from_addr;
1249 data.to = to;
1250 data.from = from;
1251 data.autinc_to
1252 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1253 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1254 data.autinc_from
1255 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1256 || GET_CODE (from_addr) == POST_INC
1257 || GET_CODE (from_addr) == POST_DEC);
1259 data.explicit_inc_from = 0;
1260 data.explicit_inc_to = 0;
1261 data.reverse
1262 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1263 if (data.reverse) data.offset = len;
1264 data.len = len;
1266 /* If copying requires more than two move insns,
1267 copy addresses to registers (to make displacements shorter)
1268 and use post-increment if available. */
1269 if (!(data.autinc_from && data.autinc_to)
1270 && move_by_pieces_ninsns (len, align) > 2)
1272 #ifdef HAVE_PRE_DECREMENT
1273 if (data.reverse && ! data.autinc_from)
1275 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1276 data.autinc_from = 1;
1277 data.explicit_inc_from = -1;
1279 #endif
1280 #ifdef HAVE_POST_INCREMENT
1281 if (! data.autinc_from)
1283 data.from_addr = copy_addr_to_reg (from_addr);
1284 data.autinc_from = 1;
1285 data.explicit_inc_from = 1;
1287 #endif
1288 if (!data.autinc_from && CONSTANT_P (from_addr))
1289 data.from_addr = copy_addr_to_reg (from_addr);
1290 #ifdef HAVE_PRE_DECREMENT
1291 if (data.reverse && ! data.autinc_to)
1293 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1294 data.autinc_to = 1;
1295 data.explicit_inc_to = -1;
1297 #endif
1298 #ifdef HAVE_POST_INCREMENT
1299 if (! data.reverse && ! data.autinc_to)
1301 data.to_addr = copy_addr_to_reg (to_addr);
1302 data.autinc_to = 1;
1303 data.explicit_inc_to = 1;
1305 #endif
1306 if (!data.autinc_to && CONSTANT_P (to_addr))
1307 data.to_addr = copy_addr_to_reg (to_addr);
1310 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1311 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1312 align = MOVE_MAX;
1314 /* First move what we can in the largest integer mode, then go to
1315 successively smaller modes. */
1317 while (max_size > 1)
1319 enum machine_mode mode = VOIDmode, tmode;
1320 enum insn_code icode;
1322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1323 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1324 if (GET_MODE_SIZE (tmode) < max_size)
1325 mode = tmode;
1327 if (mode == VOIDmode)
1328 break;
1330 icode = mov_optab->handlers[(int) mode].insn_code;
1331 if (icode != CODE_FOR_nothing
1332 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1333 GET_MODE_SIZE (mode)))
1334 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1336 max_size = GET_MODE_SIZE (mode);
1339 /* The code above should have handled everything. */
1340 if (data.len != 0)
1341 abort ();
1344 /* Return number of insns required to move L bytes by pieces.
1345 ALIGN (in bytes) is maximum alignment we can assume. */
1347 static int
1348 move_by_pieces_ninsns (l, align)
1349 unsigned int l;
1350 int align;
1352 register int n_insns = 0;
1353 int max_size = MOVE_MAX + 1;
1355 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1356 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1357 align = MOVE_MAX;
1359 while (max_size > 1)
1361 enum machine_mode mode = VOIDmode, tmode;
1362 enum insn_code icode;
1364 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1365 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1366 if (GET_MODE_SIZE (tmode) < max_size)
1367 mode = tmode;
1369 if (mode == VOIDmode)
1370 break;
1372 icode = mov_optab->handlers[(int) mode].insn_code;
1373 if (icode != CODE_FOR_nothing
1374 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1375 GET_MODE_SIZE (mode)))
1376 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1378 max_size = GET_MODE_SIZE (mode);
1381 return n_insns;
1384 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1385 with move instructions for mode MODE. GENFUN is the gen_... function
1386 to make a move insn for that mode. DATA has all the other info. */
1388 static void
1389 move_by_pieces_1 (genfun, mode, data)
1390 rtx (*genfun) ();
1391 enum machine_mode mode;
1392 struct move_by_pieces *data;
1394 register int size = GET_MODE_SIZE (mode);
1395 register rtx to1, from1;
1397 while (data->len >= size)
1399 if (data->reverse) data->offset -= size;
1401 to1 = (data->autinc_to
1402 ? gen_rtx (MEM, mode, data->to_addr)
1403 : change_address (data->to, mode,
1404 plus_constant (data->to_addr, data->offset)));
1405 from1 =
1406 (data->autinc_from
1407 ? gen_rtx (MEM, mode, data->from_addr)
1408 : change_address (data->from, mode,
1409 plus_constant (data->from_addr, data->offset)));
1411 #ifdef HAVE_PRE_DECREMENT
1412 if (data->explicit_inc_to < 0)
1413 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1414 if (data->explicit_inc_from < 0)
1415 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1416 #endif
1418 emit_insn ((*genfun) (to1, from1));
1419 #ifdef HAVE_POST_INCREMENT
1420 if (data->explicit_inc_to > 0)
1421 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1422 if (data->explicit_inc_from > 0)
1423 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1424 #endif
1426 if (! data->reverse) data->offset += size;
1428 data->len -= size;
1432 /* Emit code to move a block Y to a block X.
1433 This may be done with string-move instructions,
1434 with multiple scalar move instructions, or with a library call.
1436 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1437 with mode BLKmode.
1438 SIZE is an rtx that says how long they are.
1439 ALIGN is the maximum alignment we can assume they have,
1440 measured in bytes. */
1442 void
1443 emit_block_move (x, y, size, align)
1444 rtx x, y;
1445 rtx size;
1446 int align;
1448 if (GET_MODE (x) != BLKmode)
1449 abort ();
1451 if (GET_MODE (y) != BLKmode)
1452 abort ();
1454 x = protect_from_queue (x, 1);
1455 y = protect_from_queue (y, 0);
1456 size = protect_from_queue (size, 0);
1458 if (GET_CODE (x) != MEM)
1459 abort ();
1460 if (GET_CODE (y) != MEM)
1461 abort ();
1462 if (size == 0)
1463 abort ();
1465 if (GET_CODE (size) == CONST_INT
1466 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1467 move_by_pieces (x, y, INTVAL (size), align);
1468 else
1470 /* Try the most limited insn first, because there's no point
1471 including more than one in the machine description unless
1472 the more limited one has some advantage. */
1474 rtx opalign = GEN_INT (align);
1475 enum machine_mode mode;
1477 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1478 mode = GET_MODE_WIDER_MODE (mode))
1480 enum insn_code code = movstr_optab[(int) mode];
1482 if (code != CODE_FOR_nothing
1483 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1484 here because if SIZE is less than the mode mask, as it is
1485 returned by the macro, it will definitely be less than the
1486 actual mode mask. */
1487 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1488 && (insn_operand_predicate[(int) code][0] == 0
1489 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1490 && (insn_operand_predicate[(int) code][1] == 0
1491 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1492 && (insn_operand_predicate[(int) code][3] == 0
1493 || (*insn_operand_predicate[(int) code][3]) (opalign,
1494 VOIDmode)))
1496 rtx op2;
1497 rtx last = get_last_insn ();
1498 rtx pat;
1500 op2 = convert_to_mode (mode, size, 1);
1501 if (insn_operand_predicate[(int) code][2] != 0
1502 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1503 op2 = copy_to_mode_reg (mode, op2);
1505 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1506 if (pat)
1508 emit_insn (pat);
1509 return;
1511 else
1512 delete_insns_since (last);
1516 #ifdef TARGET_MEM_FUNCTIONS
1517 emit_library_call (memcpy_libfunc, 0,
1518 VOIDmode, 3, XEXP (x, 0), Pmode,
1519 XEXP (y, 0), Pmode,
1520 convert_to_mode (TYPE_MODE (sizetype), size,
1521 TREE_UNSIGNED (sizetype)),
1522 TYPE_MODE (sizetype));
1523 #else
1524 emit_library_call (bcopy_libfunc, 0,
1525 VOIDmode, 3, XEXP (y, 0), Pmode,
1526 XEXP (x, 0), Pmode,
1527 convert_to_mode (TYPE_MODE (sizetype), size,
1528 TREE_UNSIGNED (sizetype)),
1529 TYPE_MODE (sizetype));
1530 #endif
1534 /* Copy all or part of a value X into registers starting at REGNO.
1535 The number of registers to be filled is NREGS. */
1537 void
1538 move_block_to_reg (regno, x, nregs, mode)
1539 int regno;
1540 rtx x;
1541 int nregs;
1542 enum machine_mode mode;
1544 int i;
1545 rtx pat, last;
1547 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1548 x = validize_mem (force_const_mem (mode, x));
1550 /* See if the machine can do this with a load multiple insn. */
1551 #ifdef HAVE_load_multiple
1552 last = get_last_insn ();
1553 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1554 GEN_INT (nregs));
1555 if (pat)
1557 emit_insn (pat);
1558 return;
1560 else
1561 delete_insns_since (last);
1562 #endif
1564 for (i = 0; i < nregs; i++)
1565 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1566 operand_subword_force (x, i, mode));
1569 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1570 The number of registers to be filled is NREGS. SIZE indicates the number
1571 of bytes in the object X. */
1574 void
1575 move_block_from_reg (regno, x, nregs, size)
1576 int regno;
1577 rtx x;
1578 int nregs;
1579 int size;
1581 int i;
1582 rtx pat, last;
1584 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1585 to the left before storing to memory. */
1586 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1588 rtx tem = operand_subword (x, 0, 1, BLKmode);
1589 rtx shift;
1591 if (tem == 0)
1592 abort ();
1594 shift = expand_shift (LSHIFT_EXPR, word_mode,
1595 gen_rtx (REG, word_mode, regno),
1596 build_int_2 ((UNITS_PER_WORD - size)
1597 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1598 emit_move_insn (tem, shift);
1599 return;
1602 /* See if the machine can do this with a store multiple insn. */
1603 #ifdef HAVE_store_multiple
1604 last = get_last_insn ();
1605 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1606 GEN_INT (nregs));
1607 if (pat)
1609 emit_insn (pat);
1610 return;
1612 else
1613 delete_insns_since (last);
1614 #endif
1616 for (i = 0; i < nregs; i++)
1618 rtx tem = operand_subword (x, i, 1, BLKmode);
1620 if (tem == 0)
1621 abort ();
1623 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1627 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1629 void
1630 use_regs (regno, nregs)
1631 int regno;
1632 int nregs;
1634 int i;
1636 for (i = 0; i < nregs; i++)
1637 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1640 /* Mark the instructions since PREV as a libcall block.
1641 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1643 static void
1644 group_insns (prev)
1645 rtx prev;
1647 rtx insn_first;
1648 rtx insn_last;
1650 /* Find the instructions to mark */
1651 if (prev)
1652 insn_first = NEXT_INSN (prev);
1653 else
1654 insn_first = get_insns ();
1656 insn_last = get_last_insn ();
1658 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1659 REG_NOTES (insn_last));
1661 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1662 REG_NOTES (insn_first));
1665 /* Write zeros through the storage of OBJECT.
1666 If OBJECT has BLKmode, SIZE is its length in bytes. */
1668 void
1669 clear_storage (object, size)
1670 rtx object;
1671 int size;
1673 if (GET_MODE (object) == BLKmode)
1675 #ifdef TARGET_MEM_FUNCTIONS
1676 emit_library_call (memset_libfunc, 0,
1677 VOIDmode, 3,
1678 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1679 GEN_INT (size), Pmode);
1680 #else
1681 emit_library_call (bzero_libfunc, 0,
1682 VOIDmode, 2,
1683 XEXP (object, 0), Pmode,
1684 GEN_INT (size), Pmode);
1685 #endif
1687 else
1688 emit_move_insn (object, const0_rtx);
1691 /* Generate code to copy Y into X.
1692 Both Y and X must have the same mode, except that
1693 Y can be a constant with VOIDmode.
1694 This mode cannot be BLKmode; use emit_block_move for that.
1696 Return the last instruction emitted. */
1699 emit_move_insn (x, y)
1700 rtx x, y;
1702 enum machine_mode mode = GET_MODE (x);
1703 enum machine_mode submode;
1704 enum mode_class class = GET_MODE_CLASS (mode);
1705 int i;
1707 x = protect_from_queue (x, 1);
1708 y = protect_from_queue (y, 0);
1710 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1711 abort ();
1713 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1714 y = force_const_mem (mode, y);
1716 /* If X or Y are memory references, verify that their addresses are valid
1717 for the machine. */
1718 if (GET_CODE (x) == MEM
1719 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1720 && ! push_operand (x, GET_MODE (x)))
1721 || (flag_force_addr
1722 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1723 x = change_address (x, VOIDmode, XEXP (x, 0));
1725 if (GET_CODE (y) == MEM
1726 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1727 || (flag_force_addr
1728 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1729 y = change_address (y, VOIDmode, XEXP (y, 0));
1731 if (mode == BLKmode)
1732 abort ();
1734 return emit_move_insn_1 (x, y);
1737 /* Low level part of emit_move_insn.
1738 Called just like emit_move_insn, but assumes X and Y
1739 are basically valid. */
1742 emit_move_insn_1 (x, y)
1743 rtx x, y;
1745 enum machine_mode mode = GET_MODE (x);
1746 enum machine_mode submode;
1747 enum mode_class class = GET_MODE_CLASS (mode);
1748 int i;
1750 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1751 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1752 (class == MODE_COMPLEX_INT
1753 ? MODE_INT : MODE_FLOAT),
1756 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1757 return
1758 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1760 /* Expand complex moves by moving real part and imag part, if possible. */
1761 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1762 && submode != BLKmode
1763 && (mov_optab->handlers[(int) submode].insn_code
1764 != CODE_FOR_nothing))
1766 /* Don't split destination if it is a stack push. */
1767 int stack = push_operand (x, GET_MODE (x));
1768 rtx prev = get_last_insn ();
1770 /* Tell flow that the whole of the destination is being set. */
1771 if (GET_CODE (x) == REG)
1772 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1774 /* If this is a stack, push the highpart first, so it
1775 will be in the argument order.
1777 In that case, change_address is used only to convert
1778 the mode, not to change the address. */
1779 if (stack)
1781 #ifdef STACK_GROWS_DOWNWARD
1782 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1783 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1784 gen_highpart (submode, y)));
1785 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1786 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1787 gen_lowpart (submode, y)));
1788 #else
1789 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1790 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1791 gen_lowpart (submode, y)));
1792 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1793 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1794 gen_highpart (submode, y)));
1795 #endif
1797 else
1799 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1800 (gen_highpart (submode, x), gen_highpart (submode, y)));
1801 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1802 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1805 group_insns (prev);
1807 return get_last_insn ();
1810 /* This will handle any multi-word mode that lacks a move_insn pattern.
1811 However, you will get better code if you define such patterns,
1812 even if they must turn into multiple assembler instructions. */
1813 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1815 rtx last_insn = 0;
1816 rtx prev_insn = get_last_insn ();
1818 for (i = 0;
1819 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1820 i++)
1822 rtx xpart = operand_subword (x, i, 1, mode);
1823 rtx ypart = operand_subword (y, i, 1, mode);
1825 /* If we can't get a part of Y, put Y into memory if it is a
1826 constant. Otherwise, force it into a register. If we still
1827 can't get a part of Y, abort. */
1828 if (ypart == 0 && CONSTANT_P (y))
1830 y = force_const_mem (mode, y);
1831 ypart = operand_subword (y, i, 1, mode);
1833 else if (ypart == 0)
1834 ypart = operand_subword_force (y, i, mode);
1836 if (xpart == 0 || ypart == 0)
1837 abort ();
1839 last_insn = emit_move_insn (xpart, ypart);
1841 /* Mark these insns as a libcall block. */
1842 group_insns (prev_insn);
1844 return last_insn;
1846 else
1847 abort ();
1850 /* Pushing data onto the stack. */
1852 /* Push a block of length SIZE (perhaps variable)
1853 and return an rtx to address the beginning of the block.
1854 Note that it is not possible for the value returned to be a QUEUED.
1855 The value may be virtual_outgoing_args_rtx.
1857 EXTRA is the number of bytes of padding to push in addition to SIZE.
1858 BELOW nonzero means this padding comes at low addresses;
1859 otherwise, the padding comes at high addresses. */
1862 push_block (size, extra, below)
1863 rtx size;
1864 int extra, below;
1866 register rtx temp;
1867 if (CONSTANT_P (size))
1868 anti_adjust_stack (plus_constant (size, extra));
1869 else if (GET_CODE (size) == REG && extra == 0)
1870 anti_adjust_stack (size);
1871 else
1873 rtx temp = copy_to_mode_reg (Pmode, size);
1874 if (extra != 0)
1875 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1876 temp, 0, OPTAB_LIB_WIDEN);
1877 anti_adjust_stack (temp);
1880 #ifdef STACK_GROWS_DOWNWARD
1881 temp = virtual_outgoing_args_rtx;
1882 if (extra != 0 && below)
1883 temp = plus_constant (temp, extra);
1884 #else
1885 if (GET_CODE (size) == CONST_INT)
1886 temp = plus_constant (virtual_outgoing_args_rtx,
1887 - INTVAL (size) - (below ? 0 : extra));
1888 else if (extra != 0 && !below)
1889 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1890 negate_rtx (Pmode, plus_constant (size, extra)));
1891 else
1892 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1893 negate_rtx (Pmode, size));
1894 #endif
1896 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1900 gen_push_operand ()
1902 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1905 /* Generate code to push X onto the stack, assuming it has mode MODE and
1906 type TYPE.
1907 MODE is redundant except when X is a CONST_INT (since they don't
1908 carry mode info).
1909 SIZE is an rtx for the size of data to be copied (in bytes),
1910 needed only if X is BLKmode.
1912 ALIGN (in bytes) is maximum alignment we can assume.
1914 If PARTIAL and REG are both nonzero, then copy that many of the first
1915 words of X into registers starting with REG, and push the rest of X.
1916 The amount of space pushed is decreased by PARTIAL words,
1917 rounded *down* to a multiple of PARM_BOUNDARY.
1918 REG must be a hard register in this case.
1919 If REG is zero but PARTIAL is not, take any all others actions for an
1920 argument partially in registers, but do not actually load any
1921 registers.
1923 EXTRA is the amount in bytes of extra space to leave next to this arg.
1924 This is ignored if an argument block has already been allocated.
1926 On a machine that lacks real push insns, ARGS_ADDR is the address of
1927 the bottom of the argument block for this call. We use indexing off there
1928 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1929 argument block has not been preallocated.
1931 ARGS_SO_FAR is the size of args previously pushed for this call. */
1933 void
1934 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1935 args_addr, args_so_far)
1936 register rtx x;
1937 enum machine_mode mode;
1938 tree type;
1939 rtx size;
1940 int align;
1941 int partial;
1942 rtx reg;
1943 int extra;
1944 rtx args_addr;
1945 rtx args_so_far;
1947 rtx xinner;
1948 enum direction stack_direction
1949 #ifdef STACK_GROWS_DOWNWARD
1950 = downward;
1951 #else
1952 = upward;
1953 #endif
1955 /* Decide where to pad the argument: `downward' for below,
1956 `upward' for above, or `none' for don't pad it.
1957 Default is below for small data on big-endian machines; else above. */
1958 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1960 /* Invert direction if stack is post-update. */
1961 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1962 if (where_pad != none)
1963 where_pad = (where_pad == downward ? upward : downward);
1965 xinner = x = protect_from_queue (x, 0);
1967 if (mode == BLKmode)
1969 /* Copy a block into the stack, entirely or partially. */
1971 register rtx temp;
1972 int used = partial * UNITS_PER_WORD;
1973 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1974 int skip;
1976 if (size == 0)
1977 abort ();
1979 used -= offset;
1981 /* USED is now the # of bytes we need not copy to the stack
1982 because registers will take care of them. */
1984 if (partial != 0)
1985 xinner = change_address (xinner, BLKmode,
1986 plus_constant (XEXP (xinner, 0), used));
1988 /* If the partial register-part of the arg counts in its stack size,
1989 skip the part of stack space corresponding to the registers.
1990 Otherwise, start copying to the beginning of the stack space,
1991 by setting SKIP to 0. */
1992 #ifndef REG_PARM_STACK_SPACE
1993 skip = 0;
1994 #else
1995 skip = used;
1996 #endif
1998 #ifdef PUSH_ROUNDING
1999 /* Do it with several push insns if that doesn't take lots of insns
2000 and if there is no difficulty with push insns that skip bytes
2001 on the stack for alignment purposes. */
2002 if (args_addr == 0
2003 && GET_CODE (size) == CONST_INT
2004 && skip == 0
2005 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2006 < MOVE_RATIO)
2007 /* Here we avoid the case of a structure whose weak alignment
2008 forces many pushes of a small amount of data,
2009 and such small pushes do rounding that causes trouble. */
2010 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2011 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2012 || PUSH_ROUNDING (align) == align)
2013 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2015 /* Push padding now if padding above and stack grows down,
2016 or if padding below and stack grows up.
2017 But if space already allocated, this has already been done. */
2018 if (extra && args_addr == 0
2019 && where_pad != none && where_pad != stack_direction)
2020 anti_adjust_stack (GEN_INT (extra));
2022 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2023 INTVAL (size) - used, align);
2025 else
2026 #endif /* PUSH_ROUNDING */
2028 /* Otherwise make space on the stack and copy the data
2029 to the address of that space. */
2031 /* Deduct words put into registers from the size we must copy. */
2032 if (partial != 0)
2034 if (GET_CODE (size) == CONST_INT)
2035 size = GEN_INT (INTVAL (size) - used);
2036 else
2037 size = expand_binop (GET_MODE (size), sub_optab, size,
2038 GEN_INT (used), NULL_RTX, 0,
2039 OPTAB_LIB_WIDEN);
2042 /* Get the address of the stack space.
2043 In this case, we do not deal with EXTRA separately.
2044 A single stack adjust will do. */
2045 if (! args_addr)
2047 temp = push_block (size, extra, where_pad == downward);
2048 extra = 0;
2050 else if (GET_CODE (args_so_far) == CONST_INT)
2051 temp = memory_address (BLKmode,
2052 plus_constant (args_addr,
2053 skip + INTVAL (args_so_far)));
2054 else
2055 temp = memory_address (BLKmode,
2056 plus_constant (gen_rtx (PLUS, Pmode,
2057 args_addr, args_so_far),
2058 skip));
2060 /* TEMP is the address of the block. Copy the data there. */
2061 if (GET_CODE (size) == CONST_INT
2062 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2063 < MOVE_RATIO))
2065 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2066 INTVAL (size), align);
2067 goto ret;
2069 /* Try the most limited insn first, because there's no point
2070 including more than one in the machine description unless
2071 the more limited one has some advantage. */
2072 #ifdef HAVE_movstrqi
2073 if (HAVE_movstrqi
2074 && GET_CODE (size) == CONST_INT
2075 && ((unsigned) INTVAL (size)
2076 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2078 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2079 xinner, size, GEN_INT (align));
2080 if (pat != 0)
2082 emit_insn (pat);
2083 goto ret;
2086 #endif
2087 #ifdef HAVE_movstrhi
2088 if (HAVE_movstrhi
2089 && GET_CODE (size) == CONST_INT
2090 && ((unsigned) INTVAL (size)
2091 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2093 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2094 xinner, size, GEN_INT (align));
2095 if (pat != 0)
2097 emit_insn (pat);
2098 goto ret;
2101 #endif
2102 #ifdef HAVE_movstrsi
2103 if (HAVE_movstrsi)
2105 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2106 xinner, size, GEN_INT (align));
2107 if (pat != 0)
2109 emit_insn (pat);
2110 goto ret;
2113 #endif
2114 #ifdef HAVE_movstrdi
2115 if (HAVE_movstrdi)
2117 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2118 xinner, size, GEN_INT (align));
2119 if (pat != 0)
2121 emit_insn (pat);
2122 goto ret;
2125 #endif
2127 #ifndef ACCUMULATE_OUTGOING_ARGS
2128 /* If the source is referenced relative to the stack pointer,
2129 copy it to another register to stabilize it. We do not need
2130 to do this if we know that we won't be changing sp. */
2132 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2133 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2134 temp = copy_to_reg (temp);
2135 #endif
2137 /* Make inhibit_defer_pop nonzero around the library call
2138 to force it to pop the bcopy-arguments right away. */
2139 NO_DEFER_POP;
2140 #ifdef TARGET_MEM_FUNCTIONS
2141 emit_library_call (memcpy_libfunc, 0,
2142 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2143 convert_to_mode (TYPE_MODE (sizetype),
2144 size, TREE_UNSIGNED (sizetype)),
2145 TYPE_MODE (sizetype));
2146 #else
2147 emit_library_call (bcopy_libfunc, 0,
2148 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2149 convert_to_mode (TYPE_MODE (sizetype),
2150 size, TREE_UNSIGNED (sizetype)),
2151 TYPE_MODE (sizetype));
2152 #endif
2153 OK_DEFER_POP;
2156 else if (partial > 0)
2158 /* Scalar partly in registers. */
2160 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2161 int i;
2162 int not_stack;
2163 /* # words of start of argument
2164 that we must make space for but need not store. */
2165 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2166 int args_offset = INTVAL (args_so_far);
2167 int skip;
2169 /* Push padding now if padding above and stack grows down,
2170 or if padding below and stack grows up.
2171 But if space already allocated, this has already been done. */
2172 if (extra && args_addr == 0
2173 && where_pad != none && where_pad != stack_direction)
2174 anti_adjust_stack (GEN_INT (extra));
2176 /* If we make space by pushing it, we might as well push
2177 the real data. Otherwise, we can leave OFFSET nonzero
2178 and leave the space uninitialized. */
2179 if (args_addr == 0)
2180 offset = 0;
2182 /* Now NOT_STACK gets the number of words that we don't need to
2183 allocate on the stack. */
2184 not_stack = partial - offset;
2186 /* If the partial register-part of the arg counts in its stack size,
2187 skip the part of stack space corresponding to the registers.
2188 Otherwise, start copying to the beginning of the stack space,
2189 by setting SKIP to 0. */
2190 #ifndef REG_PARM_STACK_SPACE
2191 skip = 0;
2192 #else
2193 skip = not_stack;
2194 #endif
2196 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2197 x = validize_mem (force_const_mem (mode, x));
2199 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2200 SUBREGs of such registers are not allowed. */
2201 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2202 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2203 x = copy_to_reg (x);
2205 /* Loop over all the words allocated on the stack for this arg. */
2206 /* We can do it by words, because any scalar bigger than a word
2207 has a size a multiple of a word. */
2208 #ifndef PUSH_ARGS_REVERSED
2209 for (i = not_stack; i < size; i++)
2210 #else
2211 for (i = size - 1; i >= not_stack; i--)
2212 #endif
2213 if (i >= not_stack + offset)
2214 emit_push_insn (operand_subword_force (x, i, mode),
2215 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2216 0, args_addr,
2217 GEN_INT (args_offset + ((i - not_stack + skip)
2218 * UNITS_PER_WORD)));
2220 else
2222 rtx addr;
2224 /* Push padding now if padding above and stack grows down,
2225 or if padding below and stack grows up.
2226 But if space already allocated, this has already been done. */
2227 if (extra && args_addr == 0
2228 && where_pad != none && where_pad != stack_direction)
2229 anti_adjust_stack (GEN_INT (extra));
2231 #ifdef PUSH_ROUNDING
2232 if (args_addr == 0)
2233 addr = gen_push_operand ();
2234 else
2235 #endif
2236 if (GET_CODE (args_so_far) == CONST_INT)
2237 addr
2238 = memory_address (mode,
2239 plus_constant (args_addr, INTVAL (args_so_far)));
2240 else
2241 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2242 args_so_far));
2244 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2247 ret:
2248 /* If part should go in registers, copy that part
2249 into the appropriate registers. Do this now, at the end,
2250 since mem-to-mem copies above may do function calls. */
2251 if (partial > 0 && reg != 0)
2252 move_block_to_reg (REGNO (reg), x, partial, mode);
2254 if (extra && args_addr == 0 && where_pad == stack_direction)
2255 anti_adjust_stack (GEN_INT (extra));
2258 /* Expand an assignment that stores the value of FROM into TO.
2259 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2260 (This may contain a QUEUED rtx;
2261 if the value is constant, this rtx is a constant.)
2262 Otherwise, the returned value is NULL_RTX.
2264 SUGGEST_REG is no longer actually used.
2265 It used to mean, copy the value through a register
2266 and return that register, if that is possible.
2267 We now use WANT_VALUE to decide whether to do this. */
2270 expand_assignment (to, from, want_value, suggest_reg)
2271 tree to, from;
2272 int want_value;
2273 int suggest_reg;
2275 register rtx to_rtx = 0;
2276 rtx result;
2278 /* Don't crash if the lhs of the assignment was erroneous. */
2280 if (TREE_CODE (to) == ERROR_MARK)
2282 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2283 return want_value ? result : NULL_RTX;
2286 if (output_bytecode)
2288 tree dest_innermost;
2290 bc_expand_expr (from);
2291 bc_emit_instruction (dup);
2293 dest_innermost = bc_expand_address (to);
2295 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2296 take care of it here. */
2298 bc_store_memory (TREE_TYPE (to), dest_innermost);
2299 return NULL;
2302 /* Assignment of a structure component needs special treatment
2303 if the structure component's rtx is not simply a MEM.
2304 Assignment of an array element at a constant index
2305 has the same problem. */
2307 if (TREE_CODE (to) == COMPONENT_REF
2308 || TREE_CODE (to) == BIT_FIELD_REF
2309 || (TREE_CODE (to) == ARRAY_REF
2310 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2311 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2313 enum machine_mode mode1;
2314 int bitsize;
2315 int bitpos;
2316 tree offset;
2317 int unsignedp;
2318 int volatilep = 0;
2319 tree tem;
2321 push_temp_slots ();
2322 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2323 &mode1, &unsignedp, &volatilep);
2325 /* If we are going to use store_bit_field and extract_bit_field,
2326 make sure to_rtx will be safe for multiple use. */
2328 if (mode1 == VOIDmode && want_value)
2329 tem = stabilize_reference (tem);
2331 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2332 if (offset != 0)
2334 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2336 if (GET_CODE (to_rtx) != MEM)
2337 abort ();
2338 to_rtx = change_address (to_rtx, VOIDmode,
2339 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2340 force_reg (Pmode, offset_rtx)));
2342 if (volatilep)
2344 if (GET_CODE (to_rtx) == MEM)
2345 MEM_VOLATILE_P (to_rtx) = 1;
2346 #if 0 /* This was turned off because, when a field is volatile
2347 in an object which is not volatile, the object may be in a register,
2348 and then we would abort over here. */
2349 else
2350 abort ();
2351 #endif
2354 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2355 (want_value
2356 /* Spurious cast makes HPUX compiler happy. */
2357 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2358 : VOIDmode),
2359 unsignedp,
2360 /* Required alignment of containing datum. */
2361 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2362 int_size_in_bytes (TREE_TYPE (tem)));
2363 preserve_temp_slots (result);
2364 free_temp_slots ();
2365 pop_temp_slots ();
2367 /* If the value is meaningful, convert RESULT to the proper mode.
2368 Otherwise, return nothing. */
2369 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2370 TYPE_MODE (TREE_TYPE (from)),
2371 result,
2372 TREE_UNSIGNED (TREE_TYPE (to)))
2373 : NULL_RTX);
2376 /* If the rhs is a function call and its value is not an aggregate,
2377 call the function before we start to compute the lhs.
2378 This is needed for correct code for cases such as
2379 val = setjmp (buf) on machines where reference to val
2380 requires loading up part of an address in a separate insn. */
2381 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
2383 rtx value;
2385 push_temp_slots ();
2386 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2387 if (to_rtx == 0)
2388 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2389 emit_move_insn (to_rtx, value);
2390 preserve_temp_slots (to_rtx);
2391 free_temp_slots ();
2392 pop_temp_slots ();
2393 return want_value ? to_rtx : NULL_RTX;
2396 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2397 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2399 if (to_rtx == 0)
2400 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2402 /* Don't move directly into a return register. */
2403 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2405 rtx temp;
2407 push_temp_slots ();
2408 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2409 emit_move_insn (to_rtx, temp);
2410 preserve_temp_slots (to_rtx);
2411 free_temp_slots ();
2412 pop_temp_slots ();
2413 return want_value ? to_rtx : NULL_RTX;
2416 /* In case we are returning the contents of an object which overlaps
2417 the place the value is being stored, use a safe function when copying
2418 a value through a pointer into a structure value return block. */
2419 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2420 && current_function_returns_struct
2421 && !current_function_returns_pcc_struct)
2423 rtx from_rtx, size;
2425 push_temp_slots ();
2426 from_rtx = expr_size (from);
2427 size = expand_expr (from, NULL_RTX, VOIDmode, 0);
2429 #ifdef TARGET_MEM_FUNCTIONS
2430 emit_library_call (memcpy_libfunc, 0,
2431 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2432 XEXP (from_rtx, 0), Pmode,
2433 convert_to_mode (TYPE_MODE (sizetype),
2434 size, TREE_UNSIGNED (sizetype)),
2435 TYPE_MODE (sizetype));
2436 #else
2437 emit_library_call (bcopy_libfunc, 0,
2438 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2439 XEXP (to_rtx, 0), Pmode,
2440 convert_to_mode (TYPE_MODE (sizetype),
2441 size, TREE_UNSIGNED (sizetype)),
2442 TYPE_MODE (sizetype));
2443 #endif
2445 preserve_temp_slots (to_rtx);
2446 free_temp_slots ();
2447 pop_temp_slots ();
2448 return want_value ? to_rtx : NULL_RTX;
2451 /* Compute FROM and store the value in the rtx we got. */
2453 push_temp_slots ();
2454 result = store_expr (from, to_rtx, want_value);
2455 preserve_temp_slots (result);
2456 free_temp_slots ();
2457 pop_temp_slots ();
2458 return want_value ? result : NULL_RTX;
2461 /* Generate code for computing expression EXP,
2462 and storing the value into TARGET.
2463 TARGET may contain a QUEUED rtx.
2465 If WANT_VALUE is nonzero, return a copy of the value
2466 not in TARGET, so that we can be sure to use the proper
2467 value in a containing expression even if TARGET has something
2468 else stored in it. If possible, we copy the value through a pseudo
2469 and return that pseudo. Or, if the value is constant, we try to
2470 return the constant. In some cases, we return a pseudo
2471 copied *from* TARGET.
2473 If the mode is BLKmode then we may return TARGET itself.
2474 It turns out that in BLKmode it doesn't cause a problem.
2475 because C has no operators that could combine two different
2476 assignments into the same BLKmode object with different values
2477 with no sequence point. Will other languages need this to
2478 be more thorough?
2480 If WANT_VALUE is 0, we return NULL, to make sure
2481 to catch quickly any cases where the caller uses the value
2482 and fails to set WANT_VALUE. */
2485 store_expr (exp, target, want_value)
2486 register tree exp;
2487 register rtx target;
2488 int want_value;
2490 register rtx temp;
2491 int dont_return_target = 0;
2493 if (TREE_CODE (exp) == COMPOUND_EXPR)
2495 /* Perform first part of compound expression, then assign from second
2496 part. */
2497 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2498 emit_queue ();
2499 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2501 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2503 /* For conditional expression, get safe form of the target. Then
2504 test the condition, doing the appropriate assignment on either
2505 side. This avoids the creation of unnecessary temporaries.
2506 For non-BLKmode, it is more efficient not to do this. */
2508 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2510 emit_queue ();
2511 target = protect_from_queue (target, 1);
2513 NO_DEFER_POP;
2514 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2515 store_expr (TREE_OPERAND (exp, 1), target, 0);
2516 emit_queue ();
2517 emit_jump_insn (gen_jump (lab2));
2518 emit_barrier ();
2519 emit_label (lab1);
2520 store_expr (TREE_OPERAND (exp, 2), target, 0);
2521 emit_queue ();
2522 emit_label (lab2);
2523 OK_DEFER_POP;
2524 return want_value ? target : NULL_RTX;
2526 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2527 && GET_MODE (target) != BLKmode)
2528 /* If target is in memory and caller wants value in a register instead,
2529 arrange that. Pass TARGET as target for expand_expr so that,
2530 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2531 We know expand_expr will not use the target in that case.
2532 Don't do this if TARGET is volatile because we are supposed
2533 to write it and then read it. */
2535 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2536 GET_MODE (target), 0);
2537 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2538 temp = copy_to_reg (temp);
2539 dont_return_target = 1;
2541 else if (queued_subexp_p (target))
2542 /* If target contains a postincrement, let's not risk
2543 using it as the place to generate the rhs. */
2545 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2547 /* Expand EXP into a new pseudo. */
2548 temp = gen_reg_rtx (GET_MODE (target));
2549 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2551 else
2552 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2554 /* If target is volatile, ANSI requires accessing the value
2555 *from* the target, if it is accessed. So make that happen.
2556 In no case return the target itself. */
2557 if (! MEM_VOLATILE_P (target) && want_value)
2558 dont_return_target = 1;
2560 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2561 /* If this is an scalar in a register that is stored in a wider mode
2562 than the declared mode, compute the result into its declared mode
2563 and then convert to the wider mode. Our value is the computed
2564 expression. */
2566 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2567 convert_move (SUBREG_REG (target), temp,
2568 SUBREG_PROMOTED_UNSIGNED_P (target));
2569 return want_value ? temp : NULL_RTX;
2571 else
2573 temp = expand_expr (exp, target, GET_MODE (target), 0);
2574 /* DO return TARGET if it's a specified hardware register.
2575 expand_return relies on this.
2576 If TARGET is a volatile mem ref, either return TARGET
2577 or return a reg copied *from* TARGET; ANSI requires this.
2579 Otherwise, if TEMP is not TARGET, return TEMP
2580 if it is constant (for efficiency),
2581 or if we really want the correct value. */
2582 if (!(target && GET_CODE (target) == REG
2583 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2584 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2585 && temp != target
2586 && (CONSTANT_P (temp) || want_value))
2587 dont_return_target = 1;
2590 /* If value was not generated in the target, store it there.
2591 Convert the value to TARGET's type first if nec. */
2593 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2595 target = protect_from_queue (target, 1);
2596 if (GET_MODE (temp) != GET_MODE (target)
2597 && GET_MODE (temp) != VOIDmode)
2599 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2600 if (dont_return_target)
2602 /* In this case, we will return TEMP,
2603 so make sure it has the proper mode.
2604 But don't forget to store the value into TARGET. */
2605 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2606 emit_move_insn (target, temp);
2608 else
2609 convert_move (target, temp, unsignedp);
2612 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2614 /* Handle copying a string constant into an array.
2615 The string constant may be shorter than the array.
2616 So copy just the string's actual length, and clear the rest. */
2617 rtx size;
2619 /* Get the size of the data type of the string,
2620 which is actually the size of the target. */
2621 size = expr_size (exp);
2622 if (GET_CODE (size) == CONST_INT
2623 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2624 emit_block_move (target, temp, size,
2625 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2626 else
2628 /* Compute the size of the data to copy from the string. */
2629 tree copy_size
2630 = size_binop (MIN_EXPR,
2631 size_binop (CEIL_DIV_EXPR,
2632 TYPE_SIZE (TREE_TYPE (exp)),
2633 size_int (BITS_PER_UNIT)),
2634 convert (sizetype,
2635 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2636 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2637 VOIDmode, 0);
2638 rtx label = 0;
2640 /* Copy that much. */
2641 emit_block_move (target, temp, copy_size_rtx,
2642 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2644 /* Figure out how much is left in TARGET
2645 that we have to clear. */
2646 if (GET_CODE (copy_size_rtx) == CONST_INT)
2648 temp = plus_constant (XEXP (target, 0),
2649 TREE_STRING_LENGTH (exp));
2650 size = plus_constant (size,
2651 - TREE_STRING_LENGTH (exp));
2653 else
2655 enum machine_mode size_mode = Pmode;
2657 temp = force_reg (Pmode, XEXP (target, 0));
2658 temp = expand_binop (size_mode, add_optab, temp,
2659 copy_size_rtx, NULL_RTX, 0,
2660 OPTAB_LIB_WIDEN);
2662 size = expand_binop (size_mode, sub_optab, size,
2663 copy_size_rtx, NULL_RTX, 0,
2664 OPTAB_LIB_WIDEN);
2666 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2667 GET_MODE (size), 0, 0);
2668 label = gen_label_rtx ();
2669 emit_jump_insn (gen_blt (label));
2672 if (size != const0_rtx)
2674 #ifdef TARGET_MEM_FUNCTIONS
2675 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2676 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2677 #else
2678 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2679 temp, Pmode, size, Pmode);
2680 #endif
2682 if (label)
2683 emit_label (label);
2686 else if (GET_MODE (temp) == BLKmode)
2687 emit_block_move (target, temp, expr_size (exp),
2688 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2689 else
2690 emit_move_insn (target, temp);
2693 if (dont_return_target && GET_CODE (temp) != MEM)
2694 return temp;
2695 if (want_value && GET_MODE (target) != BLKmode)
2696 return copy_to_reg (target);
2697 if (want_value)
2698 return target;
2699 return NULL_RTX;
2702 /* Store the value of constructor EXP into the rtx TARGET.
2703 TARGET is either a REG or a MEM. */
2705 static void
2706 store_constructor (exp, target)
2707 tree exp;
2708 rtx target;
2710 tree type = TREE_TYPE (exp);
2712 /* We know our target cannot conflict, since safe_from_p has been called. */
2713 #if 0
2714 /* Don't try copying piece by piece into a hard register
2715 since that is vulnerable to being clobbered by EXP.
2716 Instead, construct in a pseudo register and then copy it all. */
2717 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2719 rtx temp = gen_reg_rtx (GET_MODE (target));
2720 store_constructor (exp, temp);
2721 emit_move_insn (target, temp);
2722 return;
2724 #endif
2726 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2727 || TREE_CODE (type) == QUAL_UNION_TYPE)
2729 register tree elt;
2731 /* Inform later passes that the whole union value is dead. */
2732 if (TREE_CODE (type) == UNION_TYPE
2733 || TREE_CODE (type) == QUAL_UNION_TYPE)
2734 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2736 /* If we are building a static constructor into a register,
2737 set the initial value as zero so we can fold the value into
2738 a constant. */
2739 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2740 emit_move_insn (target, const0_rtx);
2742 /* If the constructor has fewer fields than the structure,
2743 clear the whole structure first. */
2744 else if (list_length (CONSTRUCTOR_ELTS (exp))
2745 != list_length (TYPE_FIELDS (type)))
2746 clear_storage (target, int_size_in_bytes (type));
2747 else
2748 /* Inform later passes that the old value is dead. */
2749 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2751 /* Store each element of the constructor into
2752 the corresponding field of TARGET. */
2754 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2756 register tree field = TREE_PURPOSE (elt);
2757 register enum machine_mode mode;
2758 int bitsize;
2759 int bitpos;
2760 int unsignedp;
2762 /* Just ignore missing fields.
2763 We cleared the whole structure, above,
2764 if any fields are missing. */
2765 if (field == 0)
2766 continue;
2768 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2769 unsignedp = TREE_UNSIGNED (field);
2770 mode = DECL_MODE (field);
2771 if (DECL_BIT_FIELD (field))
2772 mode = VOIDmode;
2774 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2775 /* ??? This case remains to be written. */
2776 abort ();
2778 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2780 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2781 /* The alignment of TARGET is
2782 at least what its type requires. */
2783 VOIDmode, 0,
2784 TYPE_ALIGN (type) / BITS_PER_UNIT,
2785 int_size_in_bytes (type));
2788 else if (TREE_CODE (type) == ARRAY_TYPE)
2790 register tree elt;
2791 register int i;
2792 tree domain = TYPE_DOMAIN (type);
2793 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2794 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2795 tree elttype = TREE_TYPE (type);
2797 /* If the constructor has fewer fields than the structure,
2798 clear the whole structure first. Similarly if this this is
2799 static constructor of a non-BLKmode object. */
2801 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2802 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2803 clear_storage (target, int_size_in_bytes (type));
2804 else
2805 /* Inform later passes that the old value is dead. */
2806 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2808 /* Store each element of the constructor into
2809 the corresponding element of TARGET, determined
2810 by counting the elements. */
2811 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2812 elt;
2813 elt = TREE_CHAIN (elt), i++)
2815 register enum machine_mode mode;
2816 int bitsize;
2817 int bitpos;
2818 int unsignedp;
2819 tree index = TREE_PURPOSE (elt);
2820 rtx xtarget = target;
2822 mode = TYPE_MODE (elttype);
2823 bitsize = GET_MODE_BITSIZE (mode);
2824 unsignedp = TREE_UNSIGNED (elttype);
2826 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2828 /* We don't currently allow variable indices in a
2829 C initializer, but let's try here to support them. */
2830 rtx pos_rtx, addr, xtarget;
2831 tree position;
2833 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2834 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2835 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2836 xtarget = change_address (target, mode, addr);
2837 store_expr (TREE_VALUE (elt), xtarget, 0);
2839 else
2841 if (index != 0)
2842 bitpos = (TREE_INT_CST_LOW (index)
2843 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2844 else
2845 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2847 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2848 /* The alignment of TARGET is
2849 at least what its type requires. */
2850 VOIDmode, 0,
2851 TYPE_ALIGN (type) / BITS_PER_UNIT,
2852 int_size_in_bytes (type));
2857 else
2858 abort ();
2861 /* Store the value of EXP (an expression tree)
2862 into a subfield of TARGET which has mode MODE and occupies
2863 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2864 If MODE is VOIDmode, it means that we are storing into a bit-field.
2866 If VALUE_MODE is VOIDmode, return nothing in particular.
2867 UNSIGNEDP is not used in this case.
2869 Otherwise, return an rtx for the value stored. This rtx
2870 has mode VALUE_MODE if that is convenient to do.
2871 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2873 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2874 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2876 static rtx
2877 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2878 unsignedp, align, total_size)
2879 rtx target;
2880 int bitsize, bitpos;
2881 enum machine_mode mode;
2882 tree exp;
2883 enum machine_mode value_mode;
2884 int unsignedp;
2885 int align;
2886 int total_size;
2888 HOST_WIDE_INT width_mask = 0;
2890 if (bitsize < HOST_BITS_PER_WIDE_INT)
2891 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2893 /* If we are storing into an unaligned field of an aligned union that is
2894 in a register, we may have the mode of TARGET being an integer mode but
2895 MODE == BLKmode. In that case, get an aligned object whose size and
2896 alignment are the same as TARGET and store TARGET into it (we can avoid
2897 the store if the field being stored is the entire width of TARGET). Then
2898 call ourselves recursively to store the field into a BLKmode version of
2899 that object. Finally, load from the object into TARGET. This is not
2900 very efficient in general, but should only be slightly more expensive
2901 than the otherwise-required unaligned accesses. Perhaps this can be
2902 cleaned up later. */
2904 if (mode == BLKmode
2905 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2907 rtx object = assign_stack_temp (GET_MODE (target),
2908 GET_MODE_SIZE (GET_MODE (target)), 0);
2909 rtx blk_object = copy_rtx (object);
2911 PUT_MODE (blk_object, BLKmode);
2913 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2914 emit_move_insn (object, target);
2916 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2917 align, total_size);
2919 emit_move_insn (target, object);
2921 return target;
2924 /* If the structure is in a register or if the component
2925 is a bit field, we cannot use addressing to access it.
2926 Use bit-field techniques or SUBREG to store in it. */
2928 if (mode == VOIDmode
2929 || (mode != BLKmode && ! direct_store[(int) mode])
2930 || GET_CODE (target) == REG
2931 || GET_CODE (target) == SUBREG
2932 /* If the field isn't aligned enough to store as an ordinary memref,
2933 store it as a bit field. */
2934 || (STRICT_ALIGNMENT
2935 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
2936 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
2938 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2939 /* Store the value in the bitfield. */
2940 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2941 if (value_mode != VOIDmode)
2943 /* The caller wants an rtx for the value. */
2944 /* If possible, avoid refetching from the bitfield itself. */
2945 if (width_mask != 0
2946 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2948 tree count;
2949 enum machine_mode tmode;
2951 if (unsignedp)
2952 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2953 tmode = GET_MODE (temp);
2954 if (tmode == VOIDmode)
2955 tmode = value_mode;
2956 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2957 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2958 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2960 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2961 NULL_RTX, value_mode, 0, align,
2962 total_size);
2964 return const0_rtx;
2966 else
2968 rtx addr = XEXP (target, 0);
2969 rtx to_rtx;
2971 /* If a value is wanted, it must be the lhs;
2972 so make the address stable for multiple use. */
2974 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2975 && ! CONSTANT_ADDRESS_P (addr)
2976 /* A frame-pointer reference is already stable. */
2977 && ! (GET_CODE (addr) == PLUS
2978 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2979 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2980 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2981 addr = copy_to_reg (addr);
2983 /* Now build a reference to just the desired component. */
2985 to_rtx = change_address (target, mode,
2986 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2987 MEM_IN_STRUCT_P (to_rtx) = 1;
2989 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2993 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2994 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2995 ARRAY_REFs and find the ultimate containing object, which we return.
2997 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2998 bit position, and *PUNSIGNEDP to the signedness of the field.
2999 If the position of the field is variable, we store a tree
3000 giving the variable offset (in units) in *POFFSET.
3001 This offset is in addition to the bit position.
3002 If the position is not variable, we store 0 in *POFFSET.
3004 If any of the extraction expressions is volatile,
3005 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3007 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3008 is a mode that can be used to access the field. In that case, *PBITSIZE
3009 is redundant.
3011 If the field describes a variable-sized object, *PMODE is set to
3012 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3013 this case, but the address of the object can be found. */
3015 tree
3016 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3017 punsignedp, pvolatilep)
3018 tree exp;
3019 int *pbitsize;
3020 int *pbitpos;
3021 tree *poffset;
3022 enum machine_mode *pmode;
3023 int *punsignedp;
3024 int *pvolatilep;
3026 tree size_tree = 0;
3027 enum machine_mode mode = VOIDmode;
3028 tree offset = integer_zero_node;
3030 if (TREE_CODE (exp) == COMPONENT_REF)
3032 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3033 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3034 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3035 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3037 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3039 size_tree = TREE_OPERAND (exp, 1);
3040 *punsignedp = TREE_UNSIGNED (exp);
3042 else
3044 mode = TYPE_MODE (TREE_TYPE (exp));
3045 *pbitsize = GET_MODE_BITSIZE (mode);
3046 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3049 if (size_tree)
3051 if (TREE_CODE (size_tree) != INTEGER_CST)
3052 mode = BLKmode, *pbitsize = -1;
3053 else
3054 *pbitsize = TREE_INT_CST_LOW (size_tree);
3057 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3058 and find the ultimate containing object. */
3060 *pbitpos = 0;
3062 while (1)
3064 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3066 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3067 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3068 : TREE_OPERAND (exp, 2));
3070 /* If this field hasn't been filled in yet, don't go
3071 past it. This should only happen when folding expressions
3072 made during type construction. */
3073 if (pos == 0)
3074 break;
3076 if (TREE_CODE (pos) == PLUS_EXPR)
3078 tree constant, var;
3079 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3081 constant = TREE_OPERAND (pos, 0);
3082 var = TREE_OPERAND (pos, 1);
3084 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3086 constant = TREE_OPERAND (pos, 1);
3087 var = TREE_OPERAND (pos, 0);
3089 else
3090 abort ();
3092 *pbitpos += TREE_INT_CST_LOW (constant);
3093 offset = size_binop (PLUS_EXPR, offset,
3094 size_binop (FLOOR_DIV_EXPR, var,
3095 size_int (BITS_PER_UNIT)));
3097 else if (TREE_CODE (pos) == INTEGER_CST)
3098 *pbitpos += TREE_INT_CST_LOW (pos);
3099 else
3101 /* Assume here that the offset is a multiple of a unit.
3102 If not, there should be an explicitly added constant. */
3103 offset = size_binop (PLUS_EXPR, offset,
3104 size_binop (FLOOR_DIV_EXPR, pos,
3105 size_int (BITS_PER_UNIT)));
3109 else if (TREE_CODE (exp) == ARRAY_REF)
3111 /* This code is based on the code in case ARRAY_REF in expand_expr
3112 below. We assume here that the size of an array element is
3113 always an integral multiple of BITS_PER_UNIT. */
3115 tree index = TREE_OPERAND (exp, 1);
3116 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3117 tree low_bound
3118 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3119 tree index_type = TREE_TYPE (index);
3121 if (! integer_zerop (low_bound))
3122 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3124 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3126 index = convert (type_for_size (POINTER_SIZE, 0), index);
3127 index_type = TREE_TYPE (index);
3130 index = fold (build (MULT_EXPR, index_type, index,
3131 TYPE_SIZE (TREE_TYPE (exp))));
3133 if (TREE_CODE (index) == INTEGER_CST
3134 && TREE_INT_CST_HIGH (index) == 0)
3135 *pbitpos += TREE_INT_CST_LOW (index);
3136 else
3137 offset = size_binop (PLUS_EXPR, offset,
3138 size_binop (FLOOR_DIV_EXPR, index,
3139 size_int (BITS_PER_UNIT)));
3141 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3142 && ! ((TREE_CODE (exp) == NOP_EXPR
3143 || TREE_CODE (exp) == CONVERT_EXPR)
3144 && (TYPE_MODE (TREE_TYPE (exp))
3145 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3146 break;
3148 /* If any reference in the chain is volatile, the effect is volatile. */
3149 if (TREE_THIS_VOLATILE (exp))
3150 *pvolatilep = 1;
3151 exp = TREE_OPERAND (exp, 0);
3154 /* If this was a bit-field, see if there is a mode that allows direct
3155 access in case EXP is in memory. */
3156 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3158 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3159 if (mode == BLKmode)
3160 mode = VOIDmode;
3163 if (integer_zerop (offset))
3164 offset = 0;
3166 *pmode = mode;
3167 *poffset = offset;
3168 #if 0
3169 /* We aren't finished fixing the callers to really handle nonzero offset. */
3170 if (offset != 0)
3171 abort ();
3172 #endif
3174 return exp;
3177 /* Given an rtx VALUE that may contain additions and multiplications,
3178 return an equivalent value that just refers to a register or memory.
3179 This is done by generating instructions to perform the arithmetic
3180 and returning a pseudo-register containing the value.
3182 The returned value may be a REG, SUBREG, MEM or constant. */
3185 force_operand (value, target)
3186 rtx value, target;
3188 register optab binoptab = 0;
3189 /* Use a temporary to force order of execution of calls to
3190 `force_operand'. */
3191 rtx tmp;
3192 register rtx op2;
3193 /* Use subtarget as the target for operand 0 of a binary operation. */
3194 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3196 if (GET_CODE (value) == PLUS)
3197 binoptab = add_optab;
3198 else if (GET_CODE (value) == MINUS)
3199 binoptab = sub_optab;
3200 else if (GET_CODE (value) == MULT)
3202 op2 = XEXP (value, 1);
3203 if (!CONSTANT_P (op2)
3204 && !(GET_CODE (op2) == REG && op2 != subtarget))
3205 subtarget = 0;
3206 tmp = force_operand (XEXP (value, 0), subtarget);
3207 return expand_mult (GET_MODE (value), tmp,
3208 force_operand (op2, NULL_RTX),
3209 target, 0);
3212 if (binoptab)
3214 op2 = XEXP (value, 1);
3215 if (!CONSTANT_P (op2)
3216 && !(GET_CODE (op2) == REG && op2 != subtarget))
3217 subtarget = 0;
3218 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3220 binoptab = add_optab;
3221 op2 = negate_rtx (GET_MODE (value), op2);
3224 /* Check for an addition with OP2 a constant integer and our first
3225 operand a PLUS of a virtual register and something else. In that
3226 case, we want to emit the sum of the virtual register and the
3227 constant first and then add the other value. This allows virtual
3228 register instantiation to simply modify the constant rather than
3229 creating another one around this addition. */
3230 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3231 && GET_CODE (XEXP (value, 0)) == PLUS
3232 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3233 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3234 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3236 rtx temp = expand_binop (GET_MODE (value), binoptab,
3237 XEXP (XEXP (value, 0), 0), op2,
3238 subtarget, 0, OPTAB_LIB_WIDEN);
3239 return expand_binop (GET_MODE (value), binoptab, temp,
3240 force_operand (XEXP (XEXP (value, 0), 1), 0),
3241 target, 0, OPTAB_LIB_WIDEN);
3244 tmp = force_operand (XEXP (value, 0), subtarget);
3245 return expand_binop (GET_MODE (value), binoptab, tmp,
3246 force_operand (op2, NULL_RTX),
3247 target, 0, OPTAB_LIB_WIDEN);
3248 /* We give UNSIGNEDP = 0 to expand_binop
3249 because the only operations we are expanding here are signed ones. */
3251 return value;
3254 /* Subroutine of expand_expr:
3255 save the non-copied parts (LIST) of an expr (LHS), and return a list
3256 which can restore these values to their previous values,
3257 should something modify their storage. */
3259 static tree
3260 save_noncopied_parts (lhs, list)
3261 tree lhs;
3262 tree list;
3264 tree tail;
3265 tree parts = 0;
3267 for (tail = list; tail; tail = TREE_CHAIN (tail))
3268 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3269 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3270 else
3272 tree part = TREE_VALUE (tail);
3273 tree part_type = TREE_TYPE (part);
3274 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3275 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3276 int_size_in_bytes (part_type), 0);
3277 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3278 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3279 parts = tree_cons (to_be_saved,
3280 build (RTL_EXPR, part_type, NULL_TREE,
3281 (tree) target),
3282 parts);
3283 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3285 return parts;
3288 /* Subroutine of expand_expr:
3289 record the non-copied parts (LIST) of an expr (LHS), and return a list
3290 which specifies the initial values of these parts. */
3292 static tree
3293 init_noncopied_parts (lhs, list)
3294 tree lhs;
3295 tree list;
3297 tree tail;
3298 tree parts = 0;
3300 for (tail = list; tail; tail = TREE_CHAIN (tail))
3301 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3302 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3303 else
3305 tree part = TREE_VALUE (tail);
3306 tree part_type = TREE_TYPE (part);
3307 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3308 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3310 return parts;
3313 /* Subroutine of expand_expr: return nonzero iff there is no way that
3314 EXP can reference X, which is being modified. */
3316 static int
3317 safe_from_p (x, exp)
3318 rtx x;
3319 tree exp;
3321 rtx exp_rtl = 0;
3322 int i, nops;
3324 if (x == 0)
3325 return 1;
3327 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3328 find the underlying pseudo. */
3329 if (GET_CODE (x) == SUBREG)
3331 x = SUBREG_REG (x);
3332 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3333 return 0;
3336 /* If X is a location in the outgoing argument area, it is always safe. */
3337 if (GET_CODE (x) == MEM
3338 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3339 || (GET_CODE (XEXP (x, 0)) == PLUS
3340 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3341 return 1;
3343 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3345 case 'd':
3346 exp_rtl = DECL_RTL (exp);
3347 break;
3349 case 'c':
3350 return 1;
3352 case 'x':
3353 if (TREE_CODE (exp) == TREE_LIST)
3354 return ((TREE_VALUE (exp) == 0
3355 || safe_from_p (x, TREE_VALUE (exp)))
3356 && (TREE_CHAIN (exp) == 0
3357 || safe_from_p (x, TREE_CHAIN (exp))));
3358 else
3359 return 0;
3361 case '1':
3362 return safe_from_p (x, TREE_OPERAND (exp, 0));
3364 case '2':
3365 case '<':
3366 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3367 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3369 case 'e':
3370 case 'r':
3371 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3372 the expression. If it is set, we conflict iff we are that rtx or
3373 both are in memory. Otherwise, we check all operands of the
3374 expression recursively. */
3376 switch (TREE_CODE (exp))
3378 case ADDR_EXPR:
3379 return (staticp (TREE_OPERAND (exp, 0))
3380 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3382 case INDIRECT_REF:
3383 if (GET_CODE (x) == MEM)
3384 return 0;
3385 break;
3387 case CALL_EXPR:
3388 exp_rtl = CALL_EXPR_RTL (exp);
3389 if (exp_rtl == 0)
3391 /* Assume that the call will clobber all hard registers and
3392 all of memory. */
3393 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3394 || GET_CODE (x) == MEM)
3395 return 0;
3398 break;
3400 case RTL_EXPR:
3401 exp_rtl = RTL_EXPR_RTL (exp);
3402 if (exp_rtl == 0)
3403 /* We don't know what this can modify. */
3404 return 0;
3406 break;
3408 case WITH_CLEANUP_EXPR:
3409 exp_rtl = RTL_EXPR_RTL (exp);
3410 break;
3412 case SAVE_EXPR:
3413 exp_rtl = SAVE_EXPR_RTL (exp);
3414 break;
3416 case BIND_EXPR:
3417 /* The only operand we look at is operand 1. The rest aren't
3418 part of the expression. */
3419 return safe_from_p (x, TREE_OPERAND (exp, 1));
3421 case METHOD_CALL_EXPR:
3422 /* This takes a rtx argument, but shouldn't appear here. */
3423 abort ();
3426 /* If we have an rtx, we do not need to scan our operands. */
3427 if (exp_rtl)
3428 break;
3430 nops = tree_code_length[(int) TREE_CODE (exp)];
3431 for (i = 0; i < nops; i++)
3432 if (TREE_OPERAND (exp, i) != 0
3433 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3434 return 0;
3437 /* If we have an rtl, find any enclosed object. Then see if we conflict
3438 with it. */
3439 if (exp_rtl)
3441 if (GET_CODE (exp_rtl) == SUBREG)
3443 exp_rtl = SUBREG_REG (exp_rtl);
3444 if (GET_CODE (exp_rtl) == REG
3445 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3446 return 0;
3449 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3450 are memory and EXP is not readonly. */
3451 return ! (rtx_equal_p (x, exp_rtl)
3452 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3453 && ! TREE_READONLY (exp)));
3456 /* If we reach here, it is safe. */
3457 return 1;
3460 /* Subroutine of expand_expr: return nonzero iff EXP is an
3461 expression whose type is statically determinable. */
3463 static int
3464 fixed_type_p (exp)
3465 tree exp;
3467 if (TREE_CODE (exp) == PARM_DECL
3468 || TREE_CODE (exp) == VAR_DECL
3469 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3470 || TREE_CODE (exp) == COMPONENT_REF
3471 || TREE_CODE (exp) == ARRAY_REF)
3472 return 1;
3473 return 0;
3476 /* expand_expr: generate code for computing expression EXP.
3477 An rtx for the computed value is returned. The value is never null.
3478 In the case of a void EXP, const0_rtx is returned.
3480 The value may be stored in TARGET if TARGET is nonzero.
3481 TARGET is just a suggestion; callers must assume that
3482 the rtx returned may not be the same as TARGET.
3484 If TARGET is CONST0_RTX, it means that the value will be ignored.
3486 If TMODE is not VOIDmode, it suggests generating the
3487 result in mode TMODE. But this is done only when convenient.
3488 Otherwise, TMODE is ignored and the value generated in its natural mode.
3489 TMODE is just a suggestion; callers must assume that
3490 the rtx returned may not have mode TMODE.
3492 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3493 with a constant address even if that address is not normally legitimate.
3494 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3496 If MODIFIER is EXPAND_SUM then when EXP is an addition
3497 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3498 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3499 products as above, or REG or MEM, or constant.
3500 Ordinarily in such cases we would output mul or add instructions
3501 and then return a pseudo reg containing the sum.
3503 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3504 it also marks a label as absolutely required (it can't be dead).
3505 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3506 This is used for outputting expressions used in initializers. */
3509 expand_expr (exp, target, tmode, modifier)
3510 register tree exp;
3511 rtx target;
3512 enum machine_mode tmode;
3513 enum expand_modifier modifier;
3515 register rtx op0, op1, temp;
3516 tree type = TREE_TYPE (exp);
3517 int unsignedp = TREE_UNSIGNED (type);
3518 register enum machine_mode mode = TYPE_MODE (type);
3519 register enum tree_code code = TREE_CODE (exp);
3520 optab this_optab;
3521 /* Use subtarget as the target for operand 0 of a binary operation. */
3522 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3523 rtx original_target = target;
3524 /* Maybe defer this until sure not doing bytecode? */
3525 int ignore = (target == const0_rtx
3526 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3527 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3528 || code == COND_EXPR)
3529 && TREE_CODE (type) == VOID_TYPE));
3530 tree context;
3533 if (output_bytecode)
3535 bc_expand_expr (exp);
3536 return NULL;
3539 /* Don't use hard regs as subtargets, because the combiner
3540 can only handle pseudo regs. */
3541 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3542 subtarget = 0;
3543 /* Avoid subtargets inside loops,
3544 since they hide some invariant expressions. */
3545 if (preserve_subexpressions_p ())
3546 subtarget = 0;
3548 /* If we are going to ignore this result, we need only do something
3549 if there is a side-effect somewhere in the expression. If there
3550 is, short-circuit the most common cases here. */
3552 if (ignore)
3554 if (! TREE_SIDE_EFFECTS (exp))
3555 return const0_rtx;
3557 /* Ensure we reference a volatile object even if value is ignored. */
3558 if (TREE_THIS_VOLATILE (exp)
3559 && TREE_CODE (exp) != FUNCTION_DECL
3560 && mode != VOIDmode && mode != BLKmode)
3562 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3563 if (GET_CODE (temp) == MEM)
3564 temp = copy_to_reg (temp);
3565 return const0_rtx;
3568 if (TREE_CODE_CLASS (code) == '1')
3569 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3570 VOIDmode, modifier);
3571 else if (TREE_CODE_CLASS (code) == '2'
3572 || TREE_CODE_CLASS (code) == '<')
3574 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3575 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3576 return const0_rtx;
3578 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3579 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3580 /* If the second operand has no side effects, just evaluate
3581 the first. */
3582 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3583 VOIDmode, modifier);
3585 target = 0, original_target = 0;
3588 /* If will do cse, generate all results into pseudo registers
3589 since 1) that allows cse to find more things
3590 and 2) otherwise cse could produce an insn the machine
3591 cannot support. */
3593 if (! cse_not_expected && mode != BLKmode && target
3594 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3595 target = subtarget;
3597 switch (code)
3599 case LABEL_DECL:
3601 tree function = decl_function_context (exp);
3602 /* Handle using a label in a containing function. */
3603 if (function != current_function_decl && function != 0)
3605 struct function *p = find_function_data (function);
3606 /* Allocate in the memory associated with the function
3607 that the label is in. */
3608 push_obstacks (p->function_obstack,
3609 p->function_maybepermanent_obstack);
3611 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3612 label_rtx (exp), p->forced_labels);
3613 pop_obstacks ();
3615 else if (modifier == EXPAND_INITIALIZER)
3616 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3617 label_rtx (exp), forced_labels);
3618 temp = gen_rtx (MEM, FUNCTION_MODE,
3619 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3620 if (function != current_function_decl && function != 0)
3621 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3622 return temp;
3625 case PARM_DECL:
3626 if (DECL_RTL (exp) == 0)
3628 error_with_decl (exp, "prior parameter's size depends on `%s'");
3629 return CONST0_RTX (mode);
3632 case FUNCTION_DECL:
3633 case VAR_DECL:
3634 case RESULT_DECL:
3635 if (DECL_RTL (exp) == 0)
3636 abort ();
3637 /* Ensure variable marked as used even if it doesn't go through
3638 a parser. If it hasn't be used yet, write out an external
3639 definition. */
3640 if (! TREE_USED (exp))
3642 assemble_external (exp);
3643 TREE_USED (exp) = 1;
3646 /* Handle variables inherited from containing functions. */
3647 context = decl_function_context (exp);
3649 /* We treat inline_function_decl as an alias for the current function
3650 because that is the inline function whose vars, types, etc.
3651 are being merged into the current function.
3652 See expand_inline_function. */
3653 if (context != 0 && context != current_function_decl
3654 && context != inline_function_decl
3655 /* If var is static, we don't need a static chain to access it. */
3656 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3657 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3659 rtx addr;
3661 /* Mark as non-local and addressable. */
3662 DECL_NONLOCAL (exp) = 1;
3663 mark_addressable (exp);
3664 if (GET_CODE (DECL_RTL (exp)) != MEM)
3665 abort ();
3666 addr = XEXP (DECL_RTL (exp), 0);
3667 if (GET_CODE (addr) == MEM)
3668 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3669 else
3670 addr = fix_lexical_addr (addr, exp);
3671 return change_address (DECL_RTL (exp), mode, addr);
3674 /* This is the case of an array whose size is to be determined
3675 from its initializer, while the initializer is still being parsed.
3676 See expand_decl. */
3677 if (GET_CODE (DECL_RTL (exp)) == MEM
3678 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3679 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3680 XEXP (DECL_RTL (exp), 0));
3681 if (GET_CODE (DECL_RTL (exp)) == MEM
3682 && modifier != EXPAND_CONST_ADDRESS
3683 && modifier != EXPAND_SUM
3684 && modifier != EXPAND_INITIALIZER)
3686 /* DECL_RTL probably contains a constant address.
3687 On RISC machines where a constant address isn't valid,
3688 make some insns to get that address into a register. */
3689 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3690 || (flag_force_addr
3691 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3692 return change_address (DECL_RTL (exp), VOIDmode,
3693 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3696 /* If the mode of DECL_RTL does not match that of the decl, it
3697 must be a promoted value. We return a SUBREG of the wanted mode,
3698 but mark it so that we know that it was already extended. */
3700 if (GET_CODE (DECL_RTL (exp)) == REG
3701 && GET_MODE (DECL_RTL (exp)) != mode)
3703 enum machine_mode decl_mode = DECL_MODE (exp);
3705 /* Get the signedness used for this variable. Ensure we get the
3706 same mode we got when the variable was declared. */
3708 PROMOTE_MODE (decl_mode, unsignedp, type);
3710 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3711 abort ();
3713 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3714 SUBREG_PROMOTED_VAR_P (temp) = 1;
3715 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3716 return temp;
3719 return DECL_RTL (exp);
3721 case INTEGER_CST:
3722 return immed_double_const (TREE_INT_CST_LOW (exp),
3723 TREE_INT_CST_HIGH (exp),
3724 mode);
3726 case CONST_DECL:
3727 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3729 case REAL_CST:
3730 /* If optimized, generate immediate CONST_DOUBLE
3731 which will be turned into memory by reload if necessary.
3733 We used to force a register so that loop.c could see it. But
3734 this does not allow gen_* patterns to perform optimizations with
3735 the constants. It also produces two insns in cases like "x = 1.0;".
3736 On most machines, floating-point constants are not permitted in
3737 many insns, so we'd end up copying it to a register in any case.
3739 Now, we do the copying in expand_binop, if appropriate. */
3740 return immed_real_const (exp);
3742 case COMPLEX_CST:
3743 case STRING_CST:
3744 if (! TREE_CST_RTL (exp))
3745 output_constant_def (exp);
3747 /* TREE_CST_RTL probably contains a constant address.
3748 On RISC machines where a constant address isn't valid,
3749 make some insns to get that address into a register. */
3750 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3751 && modifier != EXPAND_CONST_ADDRESS
3752 && modifier != EXPAND_INITIALIZER
3753 && modifier != EXPAND_SUM
3754 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3755 return change_address (TREE_CST_RTL (exp), VOIDmode,
3756 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3757 return TREE_CST_RTL (exp);
3759 case SAVE_EXPR:
3760 context = decl_function_context (exp);
3761 /* We treat inline_function_decl as an alias for the current function
3762 because that is the inline function whose vars, types, etc.
3763 are being merged into the current function.
3764 See expand_inline_function. */
3765 if (context == current_function_decl || context == inline_function_decl)
3766 context = 0;
3768 /* If this is non-local, handle it. */
3769 if (context)
3771 temp = SAVE_EXPR_RTL (exp);
3772 if (temp && GET_CODE (temp) == REG)
3774 put_var_into_stack (exp);
3775 temp = SAVE_EXPR_RTL (exp);
3777 if (temp == 0 || GET_CODE (temp) != MEM)
3778 abort ();
3779 return change_address (temp, mode,
3780 fix_lexical_addr (XEXP (temp, 0), exp));
3782 if (SAVE_EXPR_RTL (exp) == 0)
3784 if (mode == BLKmode)
3786 temp
3787 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3788 MEM_IN_STRUCT_P (temp)
3789 = (TREE_CODE (type) == RECORD_TYPE
3790 || TREE_CODE (type) == UNION_TYPE
3791 || TREE_CODE (type) == QUAL_UNION_TYPE
3792 || TREE_CODE (type) == ARRAY_TYPE);
3794 else
3796 enum machine_mode var_mode = mode;
3798 if (TREE_CODE (type) == INTEGER_TYPE
3799 || TREE_CODE (type) == ENUMERAL_TYPE
3800 || TREE_CODE (type) == BOOLEAN_TYPE
3801 || TREE_CODE (type) == CHAR_TYPE
3802 || TREE_CODE (type) == REAL_TYPE
3803 || TREE_CODE (type) == POINTER_TYPE
3804 || TREE_CODE (type) == OFFSET_TYPE)
3806 PROMOTE_MODE (var_mode, unsignedp, type);
3809 temp = gen_reg_rtx (var_mode);
3812 SAVE_EXPR_RTL (exp) = temp;
3813 if (!optimize && GET_CODE (temp) == REG)
3814 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3815 save_expr_regs);
3817 /* If the mode of TEMP does not match that of the expression, it
3818 must be a promoted value. We pass store_expr a SUBREG of the
3819 wanted mode but mark it so that we know that it was already
3820 extended. Note that `unsignedp' was modified above in
3821 this case. */
3823 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3825 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3826 SUBREG_PROMOTED_VAR_P (temp) = 1;
3827 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3830 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3833 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3834 must be a promoted value. We return a SUBREG of the wanted mode,
3835 but mark it so that we know that it was already extended. Note
3836 that `unsignedp' was modified above in this case. */
3838 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3839 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3841 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3842 SUBREG_PROMOTED_VAR_P (temp) = 1;
3843 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3844 return temp;
3847 return SAVE_EXPR_RTL (exp);
3849 case EXIT_EXPR:
3850 expand_exit_loop_if_false (NULL_PTR,
3851 invert_truthvalue (TREE_OPERAND (exp, 0)));
3852 return const0_rtx;
3854 case LOOP_EXPR:
3855 push_temp_slots ();
3856 expand_start_loop (1);
3857 expand_expr_stmt (TREE_OPERAND (exp, 0));
3858 expand_end_loop ();
3859 pop_temp_slots ();
3861 return const0_rtx;
3863 case BIND_EXPR:
3865 tree vars = TREE_OPERAND (exp, 0);
3866 int vars_need_expansion = 0;
3868 /* Need to open a binding contour here because
3869 if there are any cleanups they most be contained here. */
3870 expand_start_bindings (0);
3872 /* Mark the corresponding BLOCK for output in its proper place. */
3873 if (TREE_OPERAND (exp, 2) != 0
3874 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3875 insert_block (TREE_OPERAND (exp, 2));
3877 /* If VARS have not yet been expanded, expand them now. */
3878 while (vars)
3880 if (DECL_RTL (vars) == 0)
3882 vars_need_expansion = 1;
3883 expand_decl (vars);
3885 expand_decl_init (vars);
3886 vars = TREE_CHAIN (vars);
3889 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3891 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3893 return temp;
3896 case RTL_EXPR:
3897 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3898 abort ();
3899 emit_insns (RTL_EXPR_SEQUENCE (exp));
3900 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3901 return RTL_EXPR_RTL (exp);
3903 case CONSTRUCTOR:
3904 /* If we don't need the result, just ensure we evaluate any
3905 subexpressions. */
3906 if (ignore)
3908 tree elt;
3909 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3910 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3911 return const0_rtx;
3913 /* All elts simple constants => refer to a constant in memory. But
3914 if this is a non-BLKmode mode, let it store a field at a time
3915 since that should make a CONST_INT or CONST_DOUBLE when we
3916 fold. If we are making an initializer and all operands are
3917 constant, put it in memory as well. */
3918 else if ((TREE_STATIC (exp)
3919 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3920 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
3922 rtx constructor = output_constant_def (exp);
3923 if (modifier != EXPAND_CONST_ADDRESS
3924 && modifier != EXPAND_INITIALIZER
3925 && modifier != EXPAND_SUM
3926 && !memory_address_p (GET_MODE (constructor),
3927 XEXP (constructor, 0)))
3928 constructor = change_address (constructor, VOIDmode,
3929 XEXP (constructor, 0));
3930 return constructor;
3933 else
3935 if (target == 0 || ! safe_from_p (target, exp))
3937 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3938 target = gen_reg_rtx (mode);
3939 else
3941 enum tree_code c = TREE_CODE (type);
3942 target
3943 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3944 if (c == RECORD_TYPE || c == UNION_TYPE
3945 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3946 MEM_IN_STRUCT_P (target) = 1;
3949 store_constructor (exp, target);
3950 return target;
3953 case INDIRECT_REF:
3955 tree exp1 = TREE_OPERAND (exp, 0);
3956 tree exp2;
3958 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3959 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3960 This code has the same general effect as simply doing
3961 expand_expr on the save expr, except that the expression PTR
3962 is computed for use as a memory address. This means different
3963 code, suitable for indexing, may be generated. */
3964 if (TREE_CODE (exp1) == SAVE_EXPR
3965 && SAVE_EXPR_RTL (exp1) == 0
3966 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3967 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3968 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3970 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3971 VOIDmode, EXPAND_SUM);
3972 op0 = memory_address (mode, temp);
3973 op0 = copy_all_regs (op0);
3974 SAVE_EXPR_RTL (exp1) = op0;
3976 else
3978 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3979 op0 = memory_address (mode, op0);
3982 temp = gen_rtx (MEM, mode, op0);
3983 /* If address was computed by addition,
3984 mark this as an element of an aggregate. */
3985 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3986 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3987 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3988 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3989 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3990 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3991 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
3992 || (TREE_CODE (exp1) == ADDR_EXPR
3993 && (exp2 = TREE_OPERAND (exp1, 0))
3994 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3995 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3996 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3997 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
3998 MEM_IN_STRUCT_P (temp) = 1;
3999 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4000 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4001 a location is accessed through a pointer to const does not mean
4002 that the value there can never change. */
4003 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4004 #endif
4005 return temp;
4008 case ARRAY_REF:
4009 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4010 abort ();
4013 tree array = TREE_OPERAND (exp, 0);
4014 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4015 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4016 tree index = TREE_OPERAND (exp, 1);
4017 tree index_type = TREE_TYPE (index);
4018 int i;
4020 /* Optimize the special-case of a zero lower bound.
4022 We convert the low_bound to sizetype to avoid some problems
4023 with constant folding. (E.g. suppose the lower bound is 1,
4024 and its mode is QI. Without the conversion, (ARRAY
4025 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4026 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4028 But sizetype isn't quite right either (especially if
4029 the lowbound is negative). FIXME */
4031 if (! integer_zerop (low_bound))
4032 index = fold (build (MINUS_EXPR, index_type, index,
4033 convert (sizetype, low_bound)));
4035 if (TREE_CODE (index) != INTEGER_CST
4036 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4038 /* Nonconstant array index or nonconstant element size.
4039 Generate the tree for *(&array+index) and expand that,
4040 except do it in a language-independent way
4041 and don't complain about non-lvalue arrays.
4042 `mark_addressable' should already have been called
4043 for any array for which this case will be reached. */
4045 /* Don't forget the const or volatile flag from the array
4046 element. */
4047 tree variant_type = build_type_variant (type,
4048 TREE_READONLY (exp),
4049 TREE_THIS_VOLATILE (exp));
4050 tree array_adr = build1 (ADDR_EXPR,
4051 build_pointer_type (variant_type), array);
4052 tree elt;
4054 /* Convert the integer argument to a type the same size as a
4055 pointer so the multiply won't overflow spuriously. */
4056 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4057 index = convert (type_for_size (POINTER_SIZE, 0), index);
4059 /* Don't think the address has side effects
4060 just because the array does.
4061 (In some cases the address might have side effects,
4062 and we fail to record that fact here. However, it should not
4063 matter, since expand_expr should not care.) */
4064 TREE_SIDE_EFFECTS (array_adr) = 0;
4066 elt = build1 (INDIRECT_REF, type,
4067 fold (build (PLUS_EXPR,
4068 TYPE_POINTER_TO (variant_type),
4069 array_adr,
4070 fold (build (MULT_EXPR,
4071 TYPE_POINTER_TO (variant_type),
4072 index,
4073 size_in_bytes (type))))));
4075 /* Volatility, etc., of new expression is same as old
4076 expression. */
4077 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4078 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4079 TREE_READONLY (elt) = TREE_READONLY (exp);
4081 return expand_expr (elt, target, tmode, modifier);
4084 /* Fold an expression like: "foo"[2].
4085 This is not done in fold so it won't happen inside &. */
4087 if (TREE_CODE (array) == STRING_CST
4088 && TREE_CODE (index) == INTEGER_CST
4089 && !TREE_INT_CST_HIGH (index)
4090 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
4092 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
4094 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
4095 TREE_TYPE (exp) = integer_type_node;
4096 return expand_expr (exp, target, tmode, modifier);
4098 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
4100 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
4101 TREE_TYPE (exp) = integer_type_node;
4102 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
4103 exp),
4104 target, tmode, modifier);
4108 /* If this is a constant index into a constant array,
4109 just get the value from the array. Handle both the cases when
4110 we have an explicit constructor and when our operand is a variable
4111 that was declared const. */
4113 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4115 if (TREE_CODE (index) == INTEGER_CST
4116 && TREE_INT_CST_HIGH (index) == 0)
4118 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4120 i = TREE_INT_CST_LOW (index);
4121 while (elem && i--)
4122 elem = TREE_CHAIN (elem);
4123 if (elem)
4124 return expand_expr (fold (TREE_VALUE (elem)), target,
4125 tmode, modifier);
4129 else if (optimize >= 1
4130 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4131 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4132 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4134 if (TREE_CODE (index) == INTEGER_CST
4135 && TREE_INT_CST_HIGH (index) == 0)
4137 tree init = DECL_INITIAL (array);
4139 i = TREE_INT_CST_LOW (index);
4140 if (TREE_CODE (init) == CONSTRUCTOR)
4142 tree elem = CONSTRUCTOR_ELTS (init);
4144 while (elem
4145 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4146 elem = TREE_CHAIN (elem);
4147 if (elem)
4148 return expand_expr (fold (TREE_VALUE (elem)), target,
4149 tmode, modifier);
4151 else if (TREE_CODE (init) == STRING_CST
4152 && i < TREE_STRING_LENGTH (init))
4154 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4155 return convert_to_mode (mode, temp, 0);
4161 /* Treat array-ref with constant index as a component-ref. */
4163 case COMPONENT_REF:
4164 case BIT_FIELD_REF:
4165 /* If the operand is a CONSTRUCTOR, we can just extract the
4166 appropriate field if it is present. */
4167 if (code != ARRAY_REF
4168 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4170 tree elt;
4172 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4173 elt = TREE_CHAIN (elt))
4174 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4175 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4179 enum machine_mode mode1;
4180 int bitsize;
4181 int bitpos;
4182 tree offset;
4183 int volatilep = 0;
4184 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4185 &mode1, &unsignedp, &volatilep);
4187 /* If we got back the original object, something is wrong. Perhaps
4188 we are evaluating an expression too early. In any event, don't
4189 infinitely recurse. */
4190 if (tem == exp)
4191 abort ();
4193 /* In some cases, we will be offsetting OP0's address by a constant.
4194 So get it as a sum, if possible. If we will be using it
4195 directly in an insn, we validate it. */
4196 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4198 /* If this is a constant, put it into a register if it is a
4199 legitimate constant and memory if it isn't. */
4200 if (CONSTANT_P (op0))
4202 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4203 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4204 op0 = force_reg (mode, op0);
4205 else
4206 op0 = validize_mem (force_const_mem (mode, op0));
4209 if (offset != 0)
4211 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4213 if (GET_CODE (op0) != MEM)
4214 abort ();
4215 op0 = change_address (op0, VOIDmode,
4216 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4217 force_reg (Pmode, offset_rtx)));
4220 /* Don't forget about volatility even if this is a bitfield. */
4221 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4223 op0 = copy_rtx (op0);
4224 MEM_VOLATILE_P (op0) = 1;
4227 /* In cases where an aligned union has an unaligned object
4228 as a field, we might be extracting a BLKmode value from
4229 an integer-mode (e.g., SImode) object. Handle this case
4230 by doing the extract into an object as wide as the field
4231 (which we know to be the width of a basic mode), then
4232 storing into memory, and changing the mode to BLKmode. */
4233 if (mode1 == VOIDmode
4234 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4235 && modifier != EXPAND_CONST_ADDRESS
4236 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4237 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4238 /* If the field isn't aligned enough to fetch as a memref,
4239 fetch it as a bit field. */
4240 || (STRICT_ALIGNMENT
4241 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4242 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4244 enum machine_mode ext_mode = mode;
4246 if (ext_mode == BLKmode)
4247 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4249 if (ext_mode == BLKmode)
4250 abort ();
4252 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4253 unsignedp, target, ext_mode, ext_mode,
4254 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4255 int_size_in_bytes (TREE_TYPE (tem)));
4256 if (mode == BLKmode)
4258 rtx new = assign_stack_temp (ext_mode,
4259 bitsize / BITS_PER_UNIT, 0);
4261 emit_move_insn (new, op0);
4262 op0 = copy_rtx (new);
4263 PUT_MODE (op0, BLKmode);
4264 MEM_IN_STRUCT_P (op0) = 1;
4267 return op0;
4270 /* Get a reference to just this component. */
4271 if (modifier == EXPAND_CONST_ADDRESS
4272 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4273 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4274 (bitpos / BITS_PER_UNIT)));
4275 else
4276 op0 = change_address (op0, mode1,
4277 plus_constant (XEXP (op0, 0),
4278 (bitpos / BITS_PER_UNIT)));
4279 MEM_IN_STRUCT_P (op0) = 1;
4280 MEM_VOLATILE_P (op0) |= volatilep;
4281 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4282 return op0;
4283 if (target == 0)
4284 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4285 convert_move (target, op0, unsignedp);
4286 return target;
4289 case OFFSET_REF:
4291 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4292 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4293 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4294 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4295 MEM_IN_STRUCT_P (temp) = 1;
4296 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4297 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4298 a location is accessed through a pointer to const does not mean
4299 that the value there can never change. */
4300 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4301 #endif
4302 return temp;
4305 /* Intended for a reference to a buffer of a file-object in Pascal.
4306 But it's not certain that a special tree code will really be
4307 necessary for these. INDIRECT_REF might work for them. */
4308 case BUFFER_REF:
4309 abort ();
4311 /* IN_EXPR: Inlined pascal set IN expression.
4313 Algorithm:
4314 rlo = set_low - (set_low%bits_per_word);
4315 the_word = set [ (index - rlo)/bits_per_word ];
4316 bit_index = index % bits_per_word;
4317 bitmask = 1 << bit_index;
4318 return !!(the_word & bitmask); */
4319 case IN_EXPR:
4320 preexpand_calls (exp);
4322 tree set = TREE_OPERAND (exp, 0);
4323 tree index = TREE_OPERAND (exp, 1);
4324 tree set_type = TREE_TYPE (set);
4326 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4327 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4329 rtx index_val;
4330 rtx lo_r;
4331 rtx hi_r;
4332 rtx rlow;
4333 rtx diff, quo, rem, addr, bit, result;
4334 rtx setval, setaddr;
4335 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4337 if (target == 0)
4338 target = gen_reg_rtx (mode);
4340 /* If domain is empty, answer is no. */
4341 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4342 return const0_rtx;
4344 index_val = expand_expr (index, 0, VOIDmode, 0);
4345 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4346 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4347 setval = expand_expr (set, 0, VOIDmode, 0);
4348 setaddr = XEXP (setval, 0);
4350 /* Compare index against bounds, if they are constant. */
4351 if (GET_CODE (index_val) == CONST_INT
4352 && GET_CODE (lo_r) == CONST_INT
4353 && INTVAL (index_val) < INTVAL (lo_r))
4354 return const0_rtx;
4356 if (GET_CODE (index_val) == CONST_INT
4357 && GET_CODE (hi_r) == CONST_INT
4358 && INTVAL (hi_r) < INTVAL (index_val))
4359 return const0_rtx;
4361 /* If we get here, we have to generate the code for both cases
4362 (in range and out of range). */
4364 op0 = gen_label_rtx ();
4365 op1 = gen_label_rtx ();
4367 if (! (GET_CODE (index_val) == CONST_INT
4368 && GET_CODE (lo_r) == CONST_INT))
4370 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4371 GET_MODE (index_val), 0, 0);
4372 emit_jump_insn (gen_blt (op1));
4375 if (! (GET_CODE (index_val) == CONST_INT
4376 && GET_CODE (hi_r) == CONST_INT))
4378 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4379 GET_MODE (index_val), 0, 0);
4380 emit_jump_insn (gen_bgt (op1));
4383 /* Calculate the element number of bit zero in the first word
4384 of the set. */
4385 if (GET_CODE (lo_r) == CONST_INT)
4386 rlow = GEN_INT (INTVAL (lo_r)
4387 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4388 else
4389 rlow = expand_binop (index_mode, and_optab, lo_r,
4390 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4391 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4393 diff = expand_binop (index_mode, sub_optab,
4394 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4396 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4397 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4398 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4399 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4400 addr = memory_address (byte_mode,
4401 expand_binop (index_mode, add_optab,
4402 diff, setaddr, NULL_RTX, 0,
4403 OPTAB_LIB_WIDEN));
4404 /* Extract the bit we want to examine */
4405 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4406 gen_rtx (MEM, byte_mode, addr),
4407 make_tree (TREE_TYPE (index), rem),
4408 NULL_RTX, 1);
4409 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4410 GET_MODE (target) == byte_mode ? target : 0,
4411 1, OPTAB_LIB_WIDEN);
4413 if (result != target)
4414 convert_move (target, result, 1);
4416 /* Output the code to handle the out-of-range case. */
4417 emit_jump (op0);
4418 emit_label (op1);
4419 emit_move_insn (target, const0_rtx);
4420 emit_label (op0);
4421 return target;
4424 case WITH_CLEANUP_EXPR:
4425 if (RTL_EXPR_RTL (exp) == 0)
4427 RTL_EXPR_RTL (exp)
4428 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4429 cleanups_this_call
4430 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4431 /* That's it for this cleanup. */
4432 TREE_OPERAND (exp, 2) = 0;
4434 return RTL_EXPR_RTL (exp);
4436 case CALL_EXPR:
4437 /* Check for a built-in function. */
4438 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4439 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4440 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4441 return expand_builtin (exp, target, subtarget, tmode, ignore);
4442 /* If this call was expanded already by preexpand_calls,
4443 just return the result we got. */
4444 if (CALL_EXPR_RTL (exp) != 0)
4445 return CALL_EXPR_RTL (exp);
4446 return expand_call (exp, target, ignore);
4448 case NON_LVALUE_EXPR:
4449 case NOP_EXPR:
4450 case CONVERT_EXPR:
4451 case REFERENCE_EXPR:
4452 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4453 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4454 if (TREE_CODE (type) == UNION_TYPE)
4456 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4457 if (target == 0)
4459 if (mode == BLKmode)
4461 if (TYPE_SIZE (type) == 0
4462 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4463 abort ();
4464 target = assign_stack_temp (BLKmode,
4465 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4466 + BITS_PER_UNIT - 1)
4467 / BITS_PER_UNIT, 0);
4469 else
4470 target = gen_reg_rtx (mode);
4472 if (GET_CODE (target) == MEM)
4473 /* Store data into beginning of memory target. */
4474 store_expr (TREE_OPERAND (exp, 0),
4475 change_address (target, TYPE_MODE (valtype), 0), 0);
4477 else if (GET_CODE (target) == REG)
4478 /* Store this field into a union of the proper type. */
4479 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4480 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4481 VOIDmode, 0, 1,
4482 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4483 else
4484 abort ();
4486 /* Return the entire union. */
4487 return target;
4489 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4490 if (GET_MODE (op0) == mode)
4491 return op0;
4492 /* If arg is a constant integer being extended from a narrower mode,
4493 we must really truncate to get the extended bits right. Otherwise
4494 (unsigned long) (unsigned char) ("\377"[0])
4495 would come out as ffffffff. */
4496 if (GET_MODE (op0) == VOIDmode
4497 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4498 < GET_MODE_BITSIZE (mode)))
4500 /* MODE must be narrower than HOST_BITS_PER_INT. */
4501 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4503 if (width < HOST_BITS_PER_WIDE_INT)
4505 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4506 : CONST_DOUBLE_LOW (op0));
4507 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4508 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4509 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4510 else
4511 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4513 op0 = GEN_INT (val);
4515 else
4517 op0 = (simplify_unary_operation
4518 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4519 ? ZERO_EXTEND : SIGN_EXTEND),
4520 mode, op0,
4521 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4522 if (op0 == 0)
4523 abort ();
4526 if (GET_MODE (op0) == VOIDmode)
4527 return op0;
4528 if (modifier == EXPAND_INITIALIZER)
4529 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4530 if (flag_force_mem && GET_CODE (op0) == MEM)
4531 op0 = copy_to_reg (op0);
4533 if (target == 0)
4534 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4535 else
4536 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4537 return target;
4539 case PLUS_EXPR:
4540 /* We come here from MINUS_EXPR when the second operand is a constant. */
4541 plus_expr:
4542 this_optab = add_optab;
4544 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4545 something else, make sure we add the register to the constant and
4546 then to the other thing. This case can occur during strength
4547 reduction and doing it this way will produce better code if the
4548 frame pointer or argument pointer is eliminated.
4550 fold-const.c will ensure that the constant is always in the inner
4551 PLUS_EXPR, so the only case we need to do anything about is if
4552 sp, ap, or fp is our second argument, in which case we must swap
4553 the innermost first argument and our second argument. */
4555 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4556 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4557 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4558 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4559 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4560 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4562 tree t = TREE_OPERAND (exp, 1);
4564 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4565 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4568 /* If the result is to be Pmode and we are adding an integer to
4569 something, we might be forming a constant. So try to use
4570 plus_constant. If it produces a sum and we can't accept it,
4571 use force_operand. This allows P = &ARR[const] to generate
4572 efficient code on machines where a SYMBOL_REF is not a valid
4573 address.
4575 If this is an EXPAND_SUM call, always return the sum. */
4576 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4577 || mode == Pmode)
4579 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4580 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4581 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4583 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4584 EXPAND_SUM);
4585 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4586 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4587 op1 = force_operand (op1, target);
4588 return op1;
4591 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4592 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4593 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4595 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4596 EXPAND_SUM);
4597 if (! CONSTANT_P (op0))
4599 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4600 VOIDmode, modifier);
4601 /* Don't go to both_summands if modifier
4602 says it's not right to return a PLUS. */
4603 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4604 goto binop2;
4605 goto both_summands;
4607 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4608 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4609 op0 = force_operand (op0, target);
4610 return op0;
4614 /* No sense saving up arithmetic to be done
4615 if it's all in the wrong mode to form part of an address.
4616 And force_operand won't know whether to sign-extend or
4617 zero-extend. */
4618 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4619 || mode != Pmode)
4620 goto binop;
4622 preexpand_calls (exp);
4623 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4624 subtarget = 0;
4626 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4627 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4629 both_summands:
4630 /* Make sure any term that's a sum with a constant comes last. */
4631 if (GET_CODE (op0) == PLUS
4632 && CONSTANT_P (XEXP (op0, 1)))
4634 temp = op0;
4635 op0 = op1;
4636 op1 = temp;
4638 /* If adding to a sum including a constant,
4639 associate it to put the constant outside. */
4640 if (GET_CODE (op1) == PLUS
4641 && CONSTANT_P (XEXP (op1, 1)))
4643 rtx constant_term = const0_rtx;
4645 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4646 if (temp != 0)
4647 op0 = temp;
4648 /* Ensure that MULT comes first if there is one. */
4649 else if (GET_CODE (op0) == MULT)
4650 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4651 else
4652 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4654 /* Let's also eliminate constants from op0 if possible. */
4655 op0 = eliminate_constant_term (op0, &constant_term);
4657 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4658 their sum should be a constant. Form it into OP1, since the
4659 result we want will then be OP0 + OP1. */
4661 temp = simplify_binary_operation (PLUS, mode, constant_term,
4662 XEXP (op1, 1));
4663 if (temp != 0)
4664 op1 = temp;
4665 else
4666 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4669 /* Put a constant term last and put a multiplication first. */
4670 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4671 temp = op1, op1 = op0, op0 = temp;
4673 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4674 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4676 case MINUS_EXPR:
4677 /* Handle difference of two symbolic constants,
4678 for the sake of an initializer. */
4679 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4680 && really_constant_p (TREE_OPERAND (exp, 0))
4681 && really_constant_p (TREE_OPERAND (exp, 1)))
4683 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4684 VOIDmode, modifier);
4685 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4686 VOIDmode, modifier);
4687 return gen_rtx (MINUS, mode, op0, op1);
4689 /* Convert A - const to A + (-const). */
4690 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4692 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4693 fold (build1 (NEGATE_EXPR, type,
4694 TREE_OPERAND (exp, 1))));
4695 goto plus_expr;
4697 this_optab = sub_optab;
4698 goto binop;
4700 case MULT_EXPR:
4701 preexpand_calls (exp);
4702 /* If first operand is constant, swap them.
4703 Thus the following special case checks need only
4704 check the second operand. */
4705 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4707 register tree t1 = TREE_OPERAND (exp, 0);
4708 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4709 TREE_OPERAND (exp, 1) = t1;
4712 /* Attempt to return something suitable for generating an
4713 indexed address, for machines that support that. */
4715 if (modifier == EXPAND_SUM && mode == Pmode
4716 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4717 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4719 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4721 /* Apply distributive law if OP0 is x+c. */
4722 if (GET_CODE (op0) == PLUS
4723 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4724 return gen_rtx (PLUS, mode,
4725 gen_rtx (MULT, mode, XEXP (op0, 0),
4726 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4727 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4728 * INTVAL (XEXP (op0, 1))));
4730 if (GET_CODE (op0) != REG)
4731 op0 = force_operand (op0, NULL_RTX);
4732 if (GET_CODE (op0) != REG)
4733 op0 = copy_to_mode_reg (mode, op0);
4735 return gen_rtx (MULT, mode, op0,
4736 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4739 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4740 subtarget = 0;
4742 /* Check for multiplying things that have been extended
4743 from a narrower type. If this machine supports multiplying
4744 in that narrower type with a result in the desired type,
4745 do it that way, and avoid the explicit type-conversion. */
4746 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4747 && TREE_CODE (type) == INTEGER_TYPE
4748 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4749 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4750 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4751 && int_fits_type_p (TREE_OPERAND (exp, 1),
4752 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4753 /* Don't use a widening multiply if a shift will do. */
4754 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4755 > HOST_BITS_PER_WIDE_INT)
4756 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4758 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4759 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4761 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4762 /* If both operands are extended, they must either both
4763 be zero-extended or both be sign-extended. */
4764 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4766 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4768 enum machine_mode innermode
4769 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4770 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4771 ? umul_widen_optab : smul_widen_optab);
4772 if (mode == GET_MODE_WIDER_MODE (innermode)
4773 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4775 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4776 NULL_RTX, VOIDmode, 0);
4777 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4778 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4779 VOIDmode, 0);
4780 else
4781 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4782 NULL_RTX, VOIDmode, 0);
4783 goto binop2;
4786 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4787 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4788 return expand_mult (mode, op0, op1, target, unsignedp);
4790 case TRUNC_DIV_EXPR:
4791 case FLOOR_DIV_EXPR:
4792 case CEIL_DIV_EXPR:
4793 case ROUND_DIV_EXPR:
4794 case EXACT_DIV_EXPR:
4795 preexpand_calls (exp);
4796 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4797 subtarget = 0;
4798 /* Possible optimization: compute the dividend with EXPAND_SUM
4799 then if the divisor is constant can optimize the case
4800 where some terms of the dividend have coeffs divisible by it. */
4801 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4802 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4803 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4805 case RDIV_EXPR:
4806 this_optab = flodiv_optab;
4807 goto binop;
4809 case TRUNC_MOD_EXPR:
4810 case FLOOR_MOD_EXPR:
4811 case CEIL_MOD_EXPR:
4812 case ROUND_MOD_EXPR:
4813 preexpand_calls (exp);
4814 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4815 subtarget = 0;
4816 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4817 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4818 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4820 case FIX_ROUND_EXPR:
4821 case FIX_FLOOR_EXPR:
4822 case FIX_CEIL_EXPR:
4823 abort (); /* Not used for C. */
4825 case FIX_TRUNC_EXPR:
4826 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4827 if (target == 0)
4828 target = gen_reg_rtx (mode);
4829 expand_fix (target, op0, unsignedp);
4830 return target;
4832 case FLOAT_EXPR:
4833 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4834 if (target == 0)
4835 target = gen_reg_rtx (mode);
4836 /* expand_float can't figure out what to do if FROM has VOIDmode.
4837 So give it the correct mode. With -O, cse will optimize this. */
4838 if (GET_MODE (op0) == VOIDmode)
4839 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4840 op0);
4841 expand_float (target, op0,
4842 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4843 return target;
4845 case NEGATE_EXPR:
4846 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4847 temp = expand_unop (mode, neg_optab, op0, target, 0);
4848 if (temp == 0)
4849 abort ();
4850 return temp;
4852 case ABS_EXPR:
4853 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4855 /* Handle complex values specially. */
4857 enum machine_mode opmode
4858 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4860 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4861 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4862 return expand_complex_abs (opmode, op0, target, unsignedp);
4865 /* Unsigned abs is simply the operand. Testing here means we don't
4866 risk generating incorrect code below. */
4867 if (TREE_UNSIGNED (type))
4868 return op0;
4870 /* First try to do it with a special abs instruction. */
4871 temp = expand_unop (mode, abs_optab, op0, target, 0);
4872 if (temp != 0)
4873 return temp;
4875 /* If this machine has expensive jumps, we can do integer absolute
4876 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4877 where W is the width of MODE. */
4879 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4881 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4882 size_int (GET_MODE_BITSIZE (mode) - 1),
4883 NULL_RTX, 0);
4885 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4886 OPTAB_LIB_WIDEN);
4887 if (temp != 0)
4888 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4889 OPTAB_LIB_WIDEN);
4891 if (temp != 0)
4892 return temp;
4895 /* If that does not win, use conditional jump and negate. */
4896 target = original_target;
4897 temp = gen_label_rtx ();
4898 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4899 || (GET_CODE (target) == REG
4900 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4901 target = gen_reg_rtx (mode);
4902 emit_move_insn (target, op0);
4903 emit_cmp_insn (target,
4904 expand_expr (convert (type, integer_zero_node),
4905 NULL_RTX, VOIDmode, 0),
4906 GE, NULL_RTX, mode, 0, 0);
4907 NO_DEFER_POP;
4908 emit_jump_insn (gen_bge (temp));
4909 op0 = expand_unop (mode, neg_optab, target, target, 0);
4910 if (op0 != target)
4911 emit_move_insn (target, op0);
4912 emit_label (temp);
4913 OK_DEFER_POP;
4914 return target;
4916 case MAX_EXPR:
4917 case MIN_EXPR:
4918 target = original_target;
4919 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4920 || (GET_CODE (target) == REG
4921 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4922 target = gen_reg_rtx (mode);
4923 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4924 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4926 /* First try to do it with a special MIN or MAX instruction.
4927 If that does not win, use a conditional jump to select the proper
4928 value. */
4929 this_optab = (TREE_UNSIGNED (type)
4930 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4931 : (code == MIN_EXPR ? smin_optab : smax_optab));
4933 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4934 OPTAB_WIDEN);
4935 if (temp != 0)
4936 return temp;
4938 if (target != op0)
4939 emit_move_insn (target, op0);
4940 op0 = gen_label_rtx ();
4941 /* If this mode is an integer too wide to compare properly,
4942 compare word by word. Rely on cse to optimize constant cases. */
4943 if (GET_MODE_CLASS (mode) == MODE_INT
4944 && !can_compare_p (mode))
4946 if (code == MAX_EXPR)
4947 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4948 else
4949 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4950 emit_move_insn (target, op1);
4952 else
4954 if (code == MAX_EXPR)
4955 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4956 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4957 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4958 else
4959 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4960 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4961 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4962 if (temp == const0_rtx)
4963 emit_move_insn (target, op1);
4964 else if (temp != const_true_rtx)
4966 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4967 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4968 else
4969 abort ();
4970 emit_move_insn (target, op1);
4973 emit_label (op0);
4974 return target;
4976 /* ??? Can optimize when the operand of this is a bitwise operation,
4977 by using a different bitwise operation. */
4978 case BIT_NOT_EXPR:
4979 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4980 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4981 if (temp == 0)
4982 abort ();
4983 return temp;
4985 case FFS_EXPR:
4986 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4987 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4988 if (temp == 0)
4989 abort ();
4990 return temp;
4992 /* ??? Can optimize bitwise operations with one arg constant.
4993 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4994 and (a bitwise1 b) bitwise2 b (etc)
4995 but that is probably not worth while. */
4997 /* BIT_AND_EXPR is for bitwise anding.
4998 TRUTH_AND_EXPR is for anding two boolean values
4999 when we want in all cases to compute both of them.
5000 In general it is fastest to do TRUTH_AND_EXPR by
5001 computing both operands as actual zero-or-1 values
5002 and then bitwise anding. In cases where there cannot
5003 be any side effects, better code would be made by
5004 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5005 but the question is how to recognize those cases. */
5007 case TRUTH_AND_EXPR:
5008 case BIT_AND_EXPR:
5009 this_optab = and_optab;
5010 goto binop;
5012 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5013 case TRUTH_OR_EXPR:
5014 case BIT_IOR_EXPR:
5015 this_optab = ior_optab;
5016 goto binop;
5018 case TRUTH_XOR_EXPR:
5019 case BIT_XOR_EXPR:
5020 this_optab = xor_optab;
5021 goto binop;
5023 case LSHIFT_EXPR:
5024 case RSHIFT_EXPR:
5025 case LROTATE_EXPR:
5026 case RROTATE_EXPR:
5027 preexpand_calls (exp);
5028 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5029 subtarget = 0;
5030 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5031 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5032 unsignedp);
5034 /* Could determine the answer when only additive constants differ.
5035 Also, the addition of one can be handled by changing the condition. */
5036 case LT_EXPR:
5037 case LE_EXPR:
5038 case GT_EXPR:
5039 case GE_EXPR:
5040 case EQ_EXPR:
5041 case NE_EXPR:
5042 preexpand_calls (exp);
5043 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5044 if (temp != 0)
5045 return temp;
5046 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5047 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5048 && original_target
5049 && GET_CODE (original_target) == REG
5050 && (GET_MODE (original_target)
5051 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5053 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5054 if (temp != original_target)
5055 temp = copy_to_reg (temp);
5056 op1 = gen_label_rtx ();
5057 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5058 GET_MODE (temp), unsignedp, 0);
5059 emit_jump_insn (gen_beq (op1));
5060 emit_move_insn (temp, const1_rtx);
5061 emit_label (op1);
5062 return temp;
5064 /* If no set-flag instruction, must generate a conditional
5065 store into a temporary variable. Drop through
5066 and handle this like && and ||. */
5068 case TRUTH_ANDIF_EXPR:
5069 case TRUTH_ORIF_EXPR:
5070 if (! ignore
5071 && (target == 0 || ! safe_from_p (target, exp)
5072 /* Make sure we don't have a hard reg (such as function's return
5073 value) live across basic blocks, if not optimizing. */
5074 || (!optimize && GET_CODE (target) == REG
5075 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5076 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5078 if (target)
5079 emit_clr_insn (target);
5081 op1 = gen_label_rtx ();
5082 jumpifnot (exp, op1);
5084 if (target)
5085 emit_0_to_1_insn (target);
5087 emit_label (op1);
5088 return ignore ? const0_rtx : target;
5090 case TRUTH_NOT_EXPR:
5091 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5092 /* The parser is careful to generate TRUTH_NOT_EXPR
5093 only with operands that are always zero or one. */
5094 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5095 target, 1, OPTAB_LIB_WIDEN);
5096 if (temp == 0)
5097 abort ();
5098 return temp;
5100 case COMPOUND_EXPR:
5101 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5102 emit_queue ();
5103 return expand_expr (TREE_OPERAND (exp, 1),
5104 (ignore ? const0_rtx : target),
5105 VOIDmode, 0);
5107 case COND_EXPR:
5109 /* Note that COND_EXPRs whose type is a structure or union
5110 are required to be constructed to contain assignments of
5111 a temporary variable, so that we can evaluate them here
5112 for side effect only. If type is void, we must do likewise. */
5114 /* If an arm of the branch requires a cleanup,
5115 only that cleanup is performed. */
5117 tree singleton = 0;
5118 tree binary_op = 0, unary_op = 0;
5119 tree old_cleanups = cleanups_this_call;
5120 cleanups_this_call = 0;
5122 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5123 convert it to our mode, if necessary. */
5124 if (integer_onep (TREE_OPERAND (exp, 1))
5125 && integer_zerop (TREE_OPERAND (exp, 2))
5126 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5128 if (ignore)
5130 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5131 modifier);
5132 return const0_rtx;
5135 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5136 if (GET_MODE (op0) == mode)
5137 return op0;
5138 if (target == 0)
5139 target = gen_reg_rtx (mode);
5140 convert_move (target, op0, unsignedp);
5141 return target;
5144 /* If we are not to produce a result, we have no target. Otherwise,
5145 if a target was specified use it; it will not be used as an
5146 intermediate target unless it is safe. If no target, use a
5147 temporary. */
5149 if (ignore)
5150 temp = 0;
5151 else if (original_target
5152 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5153 temp = original_target;
5154 else if (mode == BLKmode)
5156 if (TYPE_SIZE (type) == 0
5157 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5158 abort ();
5160 temp = assign_stack_temp (BLKmode,
5161 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5162 + BITS_PER_UNIT - 1)
5163 / BITS_PER_UNIT, 0);
5164 MEM_IN_STRUCT_P (temp)
5165 = (TREE_CODE (type) == RECORD_TYPE
5166 || TREE_CODE (type) == UNION_TYPE
5167 || TREE_CODE (type) == QUAL_UNION_TYPE
5168 || TREE_CODE (type) == ARRAY_TYPE);
5170 else
5171 temp = gen_reg_rtx (mode);
5173 /* Check for X ? A + B : A. If we have this, we can copy
5174 A to the output and conditionally add B. Similarly for unary
5175 operations. Don't do this if X has side-effects because
5176 those side effects might affect A or B and the "?" operation is
5177 a sequence point in ANSI. (We test for side effects later.) */
5179 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5180 && operand_equal_p (TREE_OPERAND (exp, 2),
5181 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5182 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5183 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5184 && operand_equal_p (TREE_OPERAND (exp, 1),
5185 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5186 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5187 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5188 && operand_equal_p (TREE_OPERAND (exp, 2),
5189 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5190 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5191 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5192 && operand_equal_p (TREE_OPERAND (exp, 1),
5193 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5194 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5196 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5197 operation, do this as A + (X != 0). Similarly for other simple
5198 binary operators. */
5199 if (temp && singleton && binary_op
5200 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5201 && (TREE_CODE (binary_op) == PLUS_EXPR
5202 || TREE_CODE (binary_op) == MINUS_EXPR
5203 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5204 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5205 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5206 && integer_onep (TREE_OPERAND (binary_op, 1))
5207 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5209 rtx result;
5210 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5211 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5212 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5213 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5214 : and_optab);
5216 /* If we had X ? A : A + 1, do this as A + (X == 0).
5218 We have to invert the truth value here and then put it
5219 back later if do_store_flag fails. We cannot simply copy
5220 TREE_OPERAND (exp, 0) to another variable and modify that
5221 because invert_truthvalue can modify the tree pointed to
5222 by its argument. */
5223 if (singleton == TREE_OPERAND (exp, 1))
5224 TREE_OPERAND (exp, 0)
5225 = invert_truthvalue (TREE_OPERAND (exp, 0));
5227 result = do_store_flag (TREE_OPERAND (exp, 0),
5228 (safe_from_p (temp, singleton)
5229 ? temp : NULL_RTX),
5230 mode, BRANCH_COST <= 1);
5232 if (result)
5234 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5235 return expand_binop (mode, boptab, op1, result, temp,
5236 unsignedp, OPTAB_LIB_WIDEN);
5238 else if (singleton == TREE_OPERAND (exp, 1))
5239 TREE_OPERAND (exp, 0)
5240 = invert_truthvalue (TREE_OPERAND (exp, 0));
5243 NO_DEFER_POP;
5244 op0 = gen_label_rtx ();
5246 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5248 if (temp != 0)
5250 /* If the target conflicts with the other operand of the
5251 binary op, we can't use it. Also, we can't use the target
5252 if it is a hard register, because evaluating the condition
5253 might clobber it. */
5254 if ((binary_op
5255 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5256 || (GET_CODE (temp) == REG
5257 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5258 temp = gen_reg_rtx (mode);
5259 store_expr (singleton, temp, 0);
5261 else
5262 expand_expr (singleton,
5263 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5264 if (cleanups_this_call)
5266 sorry ("aggregate value in COND_EXPR");
5267 cleanups_this_call = 0;
5269 if (singleton == TREE_OPERAND (exp, 1))
5270 jumpif (TREE_OPERAND (exp, 0), op0);
5271 else
5272 jumpifnot (TREE_OPERAND (exp, 0), op0);
5274 if (binary_op && temp == 0)
5275 /* Just touch the other operand. */
5276 expand_expr (TREE_OPERAND (binary_op, 1),
5277 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5278 else if (binary_op)
5279 store_expr (build (TREE_CODE (binary_op), type,
5280 make_tree (type, temp),
5281 TREE_OPERAND (binary_op, 1)),
5282 temp, 0);
5283 else
5284 store_expr (build1 (TREE_CODE (unary_op), type,
5285 make_tree (type, temp)),
5286 temp, 0);
5287 op1 = op0;
5289 #if 0
5290 /* This is now done in jump.c and is better done there because it
5291 produces shorter register lifetimes. */
5293 /* Check for both possibilities either constants or variables
5294 in registers (but not the same as the target!). If so, can
5295 save branches by assigning one, branching, and assigning the
5296 other. */
5297 else if (temp && GET_MODE (temp) != BLKmode
5298 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5299 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5300 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5301 && DECL_RTL (TREE_OPERAND (exp, 1))
5302 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5303 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5304 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5305 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5306 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5307 && DECL_RTL (TREE_OPERAND (exp, 2))
5308 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5309 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5311 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5312 temp = gen_reg_rtx (mode);
5313 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5314 jumpifnot (TREE_OPERAND (exp, 0), op0);
5315 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5316 op1 = op0;
5318 #endif
5319 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5320 comparison operator. If we have one of these cases, set the
5321 output to A, branch on A (cse will merge these two references),
5322 then set the output to FOO. */
5323 else if (temp
5324 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5325 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5326 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5327 TREE_OPERAND (exp, 1), 0)
5328 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5329 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5331 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5332 temp = gen_reg_rtx (mode);
5333 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5334 jumpif (TREE_OPERAND (exp, 0), op0);
5335 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5336 op1 = op0;
5338 else if (temp
5339 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5340 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5341 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5342 TREE_OPERAND (exp, 2), 0)
5343 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5344 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5346 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5347 temp = gen_reg_rtx (mode);
5348 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5349 jumpifnot (TREE_OPERAND (exp, 0), op0);
5350 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5351 op1 = op0;
5353 else
5355 op1 = gen_label_rtx ();
5356 jumpifnot (TREE_OPERAND (exp, 0), op0);
5357 if (temp != 0)
5358 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5359 else
5360 expand_expr (TREE_OPERAND (exp, 1),
5361 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5362 if (cleanups_this_call)
5364 sorry ("aggregate value in COND_EXPR");
5365 cleanups_this_call = 0;
5368 emit_queue ();
5369 emit_jump_insn (gen_jump (op1));
5370 emit_barrier ();
5371 emit_label (op0);
5372 if (temp != 0)
5373 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5374 else
5375 expand_expr (TREE_OPERAND (exp, 2),
5376 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5379 if (cleanups_this_call)
5381 sorry ("aggregate value in COND_EXPR");
5382 cleanups_this_call = 0;
5385 emit_queue ();
5386 emit_label (op1);
5387 OK_DEFER_POP;
5388 cleanups_this_call = old_cleanups;
5389 return temp;
5392 case TARGET_EXPR:
5394 /* Something needs to be initialized, but we didn't know
5395 where that thing was when building the tree. For example,
5396 it could be the return value of a function, or a parameter
5397 to a function which lays down in the stack, or a temporary
5398 variable which must be passed by reference.
5400 We guarantee that the expression will either be constructed
5401 or copied into our original target. */
5403 tree slot = TREE_OPERAND (exp, 0);
5404 tree exp1;
5406 if (TREE_CODE (slot) != VAR_DECL)
5407 abort ();
5409 if (target == 0)
5411 if (DECL_RTL (slot) != 0)
5413 target = DECL_RTL (slot);
5414 /* If we have already expanded the slot, so don't do
5415 it again. (mrs) */
5416 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5417 return target;
5419 else
5421 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5422 /* All temp slots at this level must not conflict. */
5423 preserve_temp_slots (target);
5424 DECL_RTL (slot) = target;
5427 #if 0
5428 /* I bet this needs to be done, and I bet that it needs to
5429 be above, inside the else clause. The reason is
5430 simple, how else is it going to get cleaned up? (mrs)
5432 The reason is probably did not work before, and was
5433 commented out is because this was re-expanding already
5434 expanded target_exprs (target == 0 and DECL_RTL (slot)
5435 != 0) also cleaning them up many times as well. :-( */
5437 /* Since SLOT is not known to the called function
5438 to belong to its stack frame, we must build an explicit
5439 cleanup. This case occurs when we must build up a reference
5440 to pass the reference as an argument. In this case,
5441 it is very likely that such a reference need not be
5442 built here. */
5444 if (TREE_OPERAND (exp, 2) == 0)
5445 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5446 if (TREE_OPERAND (exp, 2))
5447 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5448 cleanups_this_call);
5449 #endif
5451 else
5453 /* This case does occur, when expanding a parameter which
5454 needs to be constructed on the stack. The target
5455 is the actual stack address that we want to initialize.
5456 The function we call will perform the cleanup in this case. */
5458 /* If we have already assigned it space, use that space,
5459 not target that we were passed in, as our target
5460 parameter is only a hint. */
5461 if (DECL_RTL (slot) != 0)
5463 target = DECL_RTL (slot);
5464 /* If we have already expanded the slot, so don't do
5465 it again. (mrs) */
5466 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5467 return target;
5470 DECL_RTL (slot) = target;
5473 exp1 = TREE_OPERAND (exp, 1);
5474 /* Mark it as expanded. */
5475 TREE_OPERAND (exp, 1) = NULL_TREE;
5477 return expand_expr (exp1, target, tmode, modifier);
5480 case INIT_EXPR:
5482 tree lhs = TREE_OPERAND (exp, 0);
5483 tree rhs = TREE_OPERAND (exp, 1);
5484 tree noncopied_parts = 0;
5485 tree lhs_type = TREE_TYPE (lhs);
5487 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5488 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5489 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5490 TYPE_NONCOPIED_PARTS (lhs_type));
5491 while (noncopied_parts != 0)
5493 expand_assignment (TREE_VALUE (noncopied_parts),
5494 TREE_PURPOSE (noncopied_parts), 0, 0);
5495 noncopied_parts = TREE_CHAIN (noncopied_parts);
5497 return temp;
5500 case MODIFY_EXPR:
5502 /* If lhs is complex, expand calls in rhs before computing it.
5503 That's so we don't compute a pointer and save it over a call.
5504 If lhs is simple, compute it first so we can give it as a
5505 target if the rhs is just a call. This avoids an extra temp and copy
5506 and that prevents a partial-subsumption which makes bad code.
5507 Actually we could treat component_ref's of vars like vars. */
5509 tree lhs = TREE_OPERAND (exp, 0);
5510 tree rhs = TREE_OPERAND (exp, 1);
5511 tree noncopied_parts = 0;
5512 tree lhs_type = TREE_TYPE (lhs);
5514 temp = 0;
5516 if (TREE_CODE (lhs) != VAR_DECL
5517 && TREE_CODE (lhs) != RESULT_DECL
5518 && TREE_CODE (lhs) != PARM_DECL)
5519 preexpand_calls (exp);
5521 /* Check for |= or &= of a bitfield of size one into another bitfield
5522 of size 1. In this case, (unless we need the result of the
5523 assignment) we can do this more efficiently with a
5524 test followed by an assignment, if necessary.
5526 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5527 things change so we do, this code should be enhanced to
5528 support it. */
5529 if (ignore
5530 && TREE_CODE (lhs) == COMPONENT_REF
5531 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5532 || TREE_CODE (rhs) == BIT_AND_EXPR)
5533 && TREE_OPERAND (rhs, 0) == lhs
5534 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5535 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5536 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5538 rtx label = gen_label_rtx ();
5540 do_jump (TREE_OPERAND (rhs, 1),
5541 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5542 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5543 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5544 (TREE_CODE (rhs) == BIT_IOR_EXPR
5545 ? integer_one_node
5546 : integer_zero_node)),
5547 0, 0);
5548 do_pending_stack_adjust ();
5549 emit_label (label);
5550 return const0_rtx;
5553 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5554 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5555 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5556 TYPE_NONCOPIED_PARTS (lhs_type));
5558 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5559 while (noncopied_parts != 0)
5561 expand_assignment (TREE_PURPOSE (noncopied_parts),
5562 TREE_VALUE (noncopied_parts), 0, 0);
5563 noncopied_parts = TREE_CHAIN (noncopied_parts);
5565 return temp;
5568 case PREINCREMENT_EXPR:
5569 case PREDECREMENT_EXPR:
5570 return expand_increment (exp, 0);
5572 case POSTINCREMENT_EXPR:
5573 case POSTDECREMENT_EXPR:
5574 /* Faster to treat as pre-increment if result is not used. */
5575 return expand_increment (exp, ! ignore);
5577 case ADDR_EXPR:
5578 /* Are we taking the address of a nested function? */
5579 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5580 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5582 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5583 op0 = force_operand (op0, target);
5585 else
5587 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5588 (modifier == EXPAND_INITIALIZER
5589 ? modifier : EXPAND_CONST_ADDRESS));
5591 /* We would like the object in memory. If it is a constant,
5592 we can have it be statically allocated into memory. For
5593 a non-constant (REG or SUBREG), we need to allocate some
5594 memory and store the value into it. */
5596 if (CONSTANT_P (op0))
5597 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5598 op0);
5600 /* These cases happen in Fortran. Is that legitimate?
5601 Should Fortran work in another way?
5602 Do they happen in C? */
5603 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5604 || GET_CODE (op0) == CONCAT)
5606 /* If this object is in a register, it must be not
5607 be BLKmode. */
5608 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5609 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5610 rtx memloc
5611 = assign_stack_temp (inner_mode,
5612 int_size_in_bytes (inner_type), 1);
5614 emit_move_insn (memloc, op0);
5615 op0 = memloc;
5618 if (GET_CODE (op0) != MEM)
5619 abort ();
5621 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5622 return XEXP (op0, 0);
5623 op0 = force_operand (XEXP (op0, 0), target);
5625 if (flag_force_addr && GET_CODE (op0) != REG)
5626 return force_reg (Pmode, op0);
5627 return op0;
5629 case ENTRY_VALUE_EXPR:
5630 abort ();
5632 /* COMPLEX type for Extended Pascal & Fortran */
5633 case COMPLEX_EXPR:
5635 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5637 rtx prev;
5639 /* Get the rtx code of the operands. */
5640 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5641 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5643 if (! target)
5644 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5646 prev = get_last_insn ();
5648 /* Tell flow that the whole of the destination is being set. */
5649 if (GET_CODE (target) == REG)
5650 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5652 /* Move the real (op0) and imaginary (op1) parts to their location. */
5653 emit_move_insn (gen_realpart (mode, target), op0);
5654 emit_move_insn (gen_imagpart (mode, target), op1);
5656 /* Complex construction should appear as a single unit. */
5657 group_insns (prev);
5659 return target;
5662 case REALPART_EXPR:
5663 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5664 return gen_realpart (mode, op0);
5666 case IMAGPART_EXPR:
5667 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5668 return gen_imagpart (mode, op0);
5670 case CONJ_EXPR:
5672 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5673 rtx imag_t;
5674 rtx prev;
5676 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5678 if (! target)
5679 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5681 prev = get_last_insn ();
5683 /* Tell flow that the whole of the destination is being set. */
5684 if (GET_CODE (target) == REG)
5685 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5687 /* Store the realpart and the negated imagpart to target. */
5688 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5690 imag_t = gen_imagpart (mode, target);
5691 temp = expand_unop (mode, neg_optab,
5692 gen_imagpart (mode, op0), imag_t, 0);
5693 if (temp != imag_t)
5694 emit_move_insn (imag_t, temp);
5696 /* Conjugate should appear as a single unit */
5697 group_insns (prev);
5699 return target;
5702 case ERROR_MARK:
5703 op0 = CONST0_RTX (tmode);
5704 if (op0 != 0)
5705 return op0;
5706 return const0_rtx;
5708 default:
5709 return (*lang_expand_expr) (exp, target, tmode, modifier);
5712 /* Here to do an ordinary binary operator, generating an instruction
5713 from the optab already placed in `this_optab'. */
5714 binop:
5715 preexpand_calls (exp);
5716 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5717 subtarget = 0;
5718 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5719 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5720 binop2:
5721 temp = expand_binop (mode, this_optab, op0, op1, target,
5722 unsignedp, OPTAB_LIB_WIDEN);
5723 if (temp == 0)
5724 abort ();
5725 return temp;
5729 /* Emit bytecode to evaluate the given expression EXP to the stack. */
5730 void
5731 bc_expand_expr (exp)
5732 tree exp;
5734 enum tree_code code;
5735 tree type, arg0;
5736 rtx r;
5737 struct binary_operator *binoptab;
5738 struct unary_operator *unoptab;
5739 struct increment_operator *incroptab;
5740 struct bc_label *lab, *lab1;
5741 enum bytecode_opcode opcode;
5744 code = TREE_CODE (exp);
5746 switch (code)
5748 case PARM_DECL:
5750 if (DECL_RTL (exp) == 0)
5752 error_with_decl (exp, "prior parameter's size depends on `%s'");
5753 return;
5756 bc_load_parmaddr (DECL_RTL (exp));
5757 bc_load_memory (TREE_TYPE (exp), exp);
5759 return;
5761 case VAR_DECL:
5763 if (DECL_RTL (exp) == 0)
5764 abort ();
5766 #if 0
5767 if (BYTECODE_LABEL (DECL_RTL (exp)))
5768 bc_load_externaddr (DECL_RTL (exp));
5769 else
5770 bc_load_localaddr (DECL_RTL (exp));
5771 #endif
5772 if (TREE_PUBLIC (exp))
5773 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5774 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
5775 else
5776 bc_load_localaddr (DECL_RTL (exp));
5778 bc_load_memory (TREE_TYPE (exp), exp);
5779 return;
5781 case INTEGER_CST:
5783 #ifdef DEBUG_PRINT_CODE
5784 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
5785 #endif
5786 bc_emit_instruction (mode_to_const_map[DECL_BIT_FIELD (exp)
5787 ? SImode
5788 : TYPE_MODE (TREE_TYPE (exp))],
5789 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
5790 return;
5792 case REAL_CST:
5794 #if 0
5795 #ifdef DEBUG_PRINT_CODE
5796 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
5797 #endif
5798 /* FIX THIS: find a better way to pass real_cst's. -bson */
5799 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
5800 (double) TREE_REAL_CST (exp));
5801 #else
5802 abort ();
5803 #endif
5805 return;
5807 case CALL_EXPR:
5809 /* We build a call description vector describing the type of
5810 the return value and of the arguments; this call vector,
5811 together with a pointer to a location for the return value
5812 and the base of the argument list, is passed to the low
5813 level machine dependent call subroutine, which is responsible
5814 for putting the arguments wherever real functions expect
5815 them, as well as getting the return value back. */
5817 tree calldesc = 0, arg;
5818 int nargs = 0, i;
5819 rtx retval;
5821 /* Push the evaluated args on the evaluation stack in reverse
5822 order. Also make an entry for each arg in the calldesc
5823 vector while we're at it. */
5825 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
5827 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
5829 ++nargs;
5830 bc_expand_expr (TREE_VALUE (arg));
5832 calldesc = tree_cons ((tree) 0,
5833 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
5834 calldesc);
5835 calldesc = tree_cons ((tree) 0,
5836 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
5837 calldesc);
5840 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
5842 /* Allocate a location for the return value and push its
5843 address on the evaluation stack. Also make an entry
5844 at the front of the calldesc for the return value type. */
5846 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
5847 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
5848 bc_load_localaddr (retval);
5850 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
5851 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
5853 /* Prepend the argument count. */
5854 calldesc = tree_cons ((tree) 0,
5855 build_int_2 (nargs, 0),
5856 calldesc);
5858 /* Push the address of the call description vector on the stack. */
5859 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
5860 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
5861 build_index_type (build_int_2 (nargs * 2, 0)));
5862 r = output_constant_def (calldesc);
5863 bc_load_externaddr (r);
5865 /* Push the address of the function to be called. */
5866 bc_expand_expr (TREE_OPERAND (exp, 0));
5868 /* Call the function, popping its address and the calldesc vector
5869 address off the evaluation stack in the process. */
5870 bc_emit_instruction (call);
5872 /* Pop the arguments off the stack. */
5873 bc_adjust_stack (nargs);
5875 /* Load the return value onto the stack. */
5876 bc_load_localaddr (retval);
5877 bc_load_memory (type, TREE_OPERAND (exp, 0));
5879 return;
5881 case SAVE_EXPR:
5883 if (!SAVE_EXPR_RTL (exp))
5885 /* First time around: copy to local variable */
5886 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
5887 TYPE_ALIGN (TREE_TYPE(exp)));
5888 bc_expand_expr (TREE_OPERAND (exp, 0));
5889 bc_emit_instruction (dup);
5891 bc_load_localaddr (SAVE_EXPR_RTL (exp));
5892 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
5894 else
5896 /* Consecutive reference: use saved copy */
5897 bc_load_localaddr (SAVE_EXPR_RTL (exp));
5898 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
5900 return;
5902 #if 0
5903 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
5904 how are they handled instead? */
5905 case LET_STMT:
5907 TREE_USED (exp) = 1;
5908 bc_expand_expr (STMT_BODY (exp));
5909 return;
5910 #endif
5912 case NOP_EXPR:
5913 case CONVERT_EXPR:
5915 bc_expand_expr (TREE_OPERAND (exp, 0));
5916 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
5917 return;
5919 case MODIFY_EXPR:
5921 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
5922 return;
5924 case ADDR_EXPR:
5926 bc_expand_address (TREE_OPERAND (exp, 0));
5927 return;
5929 case INDIRECT_REF:
5931 bc_expand_expr (TREE_OPERAND (exp, 0));
5932 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
5933 return;
5935 case ARRAY_REF:
5937 bc_expand_expr (bc_canonicalize_array_ref (exp));
5938 return;
5940 case COMPONENT_REF:
5942 bc_expand_component_address (exp);
5944 /* If we have a bitfield, generate a proper load */
5945 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
5946 return;
5948 case COMPOUND_EXPR:
5950 bc_expand_expr (TREE_OPERAND (exp, 0));
5951 bc_emit_instruction (drop);
5952 bc_expand_expr (TREE_OPERAND (exp, 1));
5953 return;
5955 case COND_EXPR:
5957 bc_expand_expr (TREE_OPERAND (exp, 0));
5958 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
5959 lab = bc_get_bytecode_label ();
5960 bc_emit_bytecode (xjumpifnot);
5961 bc_emit_bytecode_labelref (lab);
5963 #ifdef DEBUG_PRINT_CODE
5964 fputc ('\n', stderr);
5965 #endif
5966 bc_expand_expr (TREE_OPERAND (exp, 1));
5967 lab1 = bc_get_bytecode_label ();
5968 bc_emit_bytecode (jump);
5969 bc_emit_bytecode_labelref (lab1);
5971 #ifdef DEBUG_PRINT_CODE
5972 fputc ('\n', stderr);
5973 #endif
5975 bc_emit_bytecode_labeldef (lab);
5976 bc_expand_expr (TREE_OPERAND (exp, 2));
5977 bc_emit_bytecode_labeldef (lab1);
5978 return;
5980 case TRUTH_ANDIF_EXPR:
5982 opcode = xjumpifnot;
5983 goto andorif;
5985 case TRUTH_ORIF_EXPR:
5987 opcode = xjumpif;
5988 goto andorif;
5990 case PLUS_EXPR:
5992 binoptab = optab_plus_expr;
5993 goto binop;
5995 case MINUS_EXPR:
5997 binoptab = optab_minus_expr;
5998 goto binop;
6000 case MULT_EXPR:
6002 binoptab = optab_mult_expr;
6003 goto binop;
6005 case TRUNC_DIV_EXPR:
6006 case FLOOR_DIV_EXPR:
6007 case CEIL_DIV_EXPR:
6008 case ROUND_DIV_EXPR:
6009 case EXACT_DIV_EXPR:
6011 binoptab = optab_trunc_div_expr;
6012 goto binop;
6014 case TRUNC_MOD_EXPR:
6015 case FLOOR_MOD_EXPR:
6016 case CEIL_MOD_EXPR:
6017 case ROUND_MOD_EXPR:
6019 binoptab = optab_trunc_mod_expr;
6020 goto binop;
6022 case FIX_ROUND_EXPR:
6023 case FIX_FLOOR_EXPR:
6024 case FIX_CEIL_EXPR:
6025 abort (); /* Not used for C. */
6027 case FIX_TRUNC_EXPR:
6028 case FLOAT_EXPR:
6029 case MAX_EXPR:
6030 case MIN_EXPR:
6031 case FFS_EXPR:
6032 case LROTATE_EXPR:
6033 case RROTATE_EXPR:
6034 abort (); /* FIXME */
6036 case RDIV_EXPR:
6038 binoptab = optab_rdiv_expr;
6039 goto binop;
6041 case BIT_AND_EXPR:
6043 binoptab = optab_bit_and_expr;
6044 goto binop;
6046 case BIT_IOR_EXPR:
6048 binoptab = optab_bit_ior_expr;
6049 goto binop;
6051 case BIT_XOR_EXPR:
6053 binoptab = optab_bit_xor_expr;
6054 goto binop;
6056 case LSHIFT_EXPR:
6058 binoptab = optab_lshift_expr;
6059 goto binop;
6061 case RSHIFT_EXPR:
6063 binoptab = optab_rshift_expr;
6064 goto binop;
6066 case TRUTH_AND_EXPR:
6068 binoptab = optab_truth_and_expr;
6069 goto binop;
6071 case TRUTH_OR_EXPR:
6073 binoptab = optab_truth_or_expr;
6074 goto binop;
6076 case LT_EXPR:
6078 binoptab = optab_lt_expr;
6079 goto binop;
6081 case LE_EXPR:
6083 binoptab = optab_le_expr;
6084 goto binop;
6086 case GE_EXPR:
6088 binoptab = optab_ge_expr;
6089 goto binop;
6091 case GT_EXPR:
6093 binoptab = optab_gt_expr;
6094 goto binop;
6096 case EQ_EXPR:
6098 binoptab = optab_eq_expr;
6099 goto binop;
6101 case NE_EXPR:
6103 binoptab = optab_ne_expr;
6104 goto binop;
6106 case NEGATE_EXPR:
6108 unoptab = optab_negate_expr;
6109 goto unop;
6111 case BIT_NOT_EXPR:
6113 unoptab = optab_bit_not_expr;
6114 goto unop;
6116 case TRUTH_NOT_EXPR:
6118 unoptab = optab_truth_not_expr;
6119 goto unop;
6121 case PREDECREMENT_EXPR:
6123 incroptab = optab_predecrement_expr;
6124 goto increment;
6126 case PREINCREMENT_EXPR:
6128 incroptab = optab_preincrement_expr;
6129 goto increment;
6131 case POSTDECREMENT_EXPR:
6133 incroptab = optab_postdecrement_expr;
6134 goto increment;
6136 case POSTINCREMENT_EXPR:
6138 incroptab = optab_postincrement_expr;
6139 goto increment;
6141 case CONSTRUCTOR:
6143 bc_expand_constructor (exp);
6144 return;
6146 case ERROR_MARK:
6147 case RTL_EXPR:
6149 return;
6151 case BIND_EXPR:
6153 tree vars = TREE_OPERAND (exp, 0);
6154 int vars_need_expansion = 0;
6156 /* Need to open a binding contour here because
6157 if there are any cleanups they most be contained here. */
6158 expand_start_bindings (0);
6160 /* Mark the corresponding BLOCK for output. */
6161 if (TREE_OPERAND (exp, 2) != 0)
6162 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6164 /* If VARS have not yet been expanded, expand them now. */
6165 while (vars)
6167 if (DECL_RTL (vars) == 0)
6169 vars_need_expansion = 1;
6170 bc_expand_decl (vars, 0);
6172 bc_expand_decl_init (vars);
6173 vars = TREE_CHAIN (vars);
6176 bc_expand_expr (TREE_OPERAND (exp, 1));
6178 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6180 return;
6184 abort ();
6186 binop:
6188 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6189 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6190 return;
6193 unop:
6195 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6196 return;
6199 andorif:
6201 bc_expand_expr (TREE_OPERAND (exp, 0));
6202 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6203 lab = bc_get_bytecode_label ();
6205 bc_emit_instruction (dup);
6206 bc_emit_bytecode (opcode);
6207 bc_emit_bytecode_labelref (lab);
6209 #ifdef DEBUG_PRINT_CODE
6210 fputc ('\n', stderr);
6211 #endif
6213 bc_emit_instruction (drop);
6215 bc_expand_expr (TREE_OPERAND (exp, 1));
6216 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6217 bc_emit_bytecode_labeldef (lab);
6218 return;
6221 increment:
6223 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6225 /* Push the quantum. */
6226 bc_expand_expr (TREE_OPERAND (exp, 1));
6228 /* Convert it to the lvalue's type. */
6229 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6231 /* Push the address of the lvalue */
6232 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6234 /* Perform actual increment */
6235 bc_expand_increment (incroptab, type);
6236 return;
6239 /* Return the alignment in bits of EXP, a pointer valued expression.
6240 But don't return more than MAX_ALIGN no matter what.
6241 The alignment returned is, by default, the alignment of the thing that
6242 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6244 Otherwise, look at the expression to see if we can do better, i.e., if the
6245 expression is actually pointing at an object whose alignment is tighter. */
6247 static int
6248 get_pointer_alignment (exp, max_align)
6249 tree exp;
6250 unsigned max_align;
6252 unsigned align, inner;
6254 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6255 return 0;
6257 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6258 align = MIN (align, max_align);
6260 while (1)
6262 switch (TREE_CODE (exp))
6264 case NOP_EXPR:
6265 case CONVERT_EXPR:
6266 case NON_LVALUE_EXPR:
6267 exp = TREE_OPERAND (exp, 0);
6268 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6269 return align;
6270 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6271 inner = MIN (inner, max_align);
6272 align = MAX (align, inner);
6273 break;
6275 case PLUS_EXPR:
6276 /* If sum of pointer + int, restrict our maximum alignment to that
6277 imposed by the integer. If not, we can't do any better than
6278 ALIGN. */
6279 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6280 return align;
6282 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6283 & (max_align - 1))
6284 != 0)
6285 max_align >>= 1;
6287 exp = TREE_OPERAND (exp, 0);
6288 break;
6290 case ADDR_EXPR:
6291 /* See what we are pointing at and look at its alignment. */
6292 exp = TREE_OPERAND (exp, 0);
6293 if (TREE_CODE (exp) == FUNCTION_DECL)
6294 align = MAX (align, FUNCTION_BOUNDARY);
6295 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6296 align = MAX (align, DECL_ALIGN (exp));
6297 #ifdef CONSTANT_ALIGNMENT
6298 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6299 align = CONSTANT_ALIGNMENT (exp, align);
6300 #endif
6301 return MIN (align, max_align);
6303 default:
6304 return align;
6309 /* Return the tree node and offset if a given argument corresponds to
6310 a string constant. */
6312 static tree
6313 string_constant (arg, ptr_offset)
6314 tree arg;
6315 tree *ptr_offset;
6317 STRIP_NOPS (arg);
6319 if (TREE_CODE (arg) == ADDR_EXPR
6320 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6322 *ptr_offset = integer_zero_node;
6323 return TREE_OPERAND (arg, 0);
6325 else if (TREE_CODE (arg) == PLUS_EXPR)
6327 tree arg0 = TREE_OPERAND (arg, 0);
6328 tree arg1 = TREE_OPERAND (arg, 1);
6330 STRIP_NOPS (arg0);
6331 STRIP_NOPS (arg1);
6333 if (TREE_CODE (arg0) == ADDR_EXPR
6334 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6336 *ptr_offset = arg1;
6337 return TREE_OPERAND (arg0, 0);
6339 else if (TREE_CODE (arg1) == ADDR_EXPR
6340 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6342 *ptr_offset = arg0;
6343 return TREE_OPERAND (arg1, 0);
6347 return 0;
6350 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6351 way, because it could contain a zero byte in the middle.
6352 TREE_STRING_LENGTH is the size of the character array, not the string.
6354 Unfortunately, string_constant can't access the values of const char
6355 arrays with initializers, so neither can we do so here. */
6357 static tree
6358 c_strlen (src)
6359 tree src;
6361 tree offset_node;
6362 int offset, max;
6363 char *ptr;
6365 src = string_constant (src, &offset_node);
6366 if (src == 0)
6367 return 0;
6368 max = TREE_STRING_LENGTH (src);
6369 ptr = TREE_STRING_POINTER (src);
6370 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6372 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6373 compute the offset to the following null if we don't know where to
6374 start searching for it. */
6375 int i;
6376 for (i = 0; i < max; i++)
6377 if (ptr[i] == 0)
6378 return 0;
6379 /* We don't know the starting offset, but we do know that the string
6380 has no internal zero bytes. We can assume that the offset falls
6381 within the bounds of the string; otherwise, the programmer deserves
6382 what he gets. Subtract the offset from the length of the string,
6383 and return that. */
6384 /* This would perhaps not be valid if we were dealing with named
6385 arrays in addition to literal string constants. */
6386 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6389 /* We have a known offset into the string. Start searching there for
6390 a null character. */
6391 if (offset_node == 0)
6392 offset = 0;
6393 else
6395 /* Did we get a long long offset? If so, punt. */
6396 if (TREE_INT_CST_HIGH (offset_node) != 0)
6397 return 0;
6398 offset = TREE_INT_CST_LOW (offset_node);
6400 /* If the offset is known to be out of bounds, warn, and call strlen at
6401 runtime. */
6402 if (offset < 0 || offset > max)
6404 warning ("offset outside bounds of constant string");
6405 return 0;
6407 /* Use strlen to search for the first zero byte. Since any strings
6408 constructed with build_string will have nulls appended, we win even
6409 if we get handed something like (char[4])"abcd".
6411 Since OFFSET is our starting index into the string, no further
6412 calculation is needed. */
6413 return size_int (strlen (ptr + offset));
6416 /* Expand an expression EXP that calls a built-in function,
6417 with result going to TARGET if that's convenient
6418 (and in mode MODE if that's convenient).
6419 SUBTARGET may be used as the target for computing one of EXP's operands.
6420 IGNORE is nonzero if the value is to be ignored. */
6422 static rtx
6423 expand_builtin (exp, target, subtarget, mode, ignore)
6424 tree exp;
6425 rtx target;
6426 rtx subtarget;
6427 enum machine_mode mode;
6428 int ignore;
6430 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6431 tree arglist = TREE_OPERAND (exp, 1);
6432 rtx op0;
6433 rtx lab1, insns;
6434 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6435 optab builtin_optab;
6437 switch (DECL_FUNCTION_CODE (fndecl))
6439 case BUILT_IN_ABS:
6440 case BUILT_IN_LABS:
6441 case BUILT_IN_FABS:
6442 /* build_function_call changes these into ABS_EXPR. */
6443 abort ();
6445 case BUILT_IN_SIN:
6446 case BUILT_IN_COS:
6447 case BUILT_IN_FSQRT:
6448 /* If not optimizing, call the library function. */
6449 if (! optimize)
6450 break;
6452 if (arglist == 0
6453 /* Arg could be wrong type if user redeclared this fcn wrong. */
6454 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6455 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
6457 /* Stabilize and compute the argument. */
6458 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6459 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6461 exp = copy_node (exp);
6462 arglist = copy_node (arglist);
6463 TREE_OPERAND (exp, 1) = arglist;
6464 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6466 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6468 /* Make a suitable register to place result in. */
6469 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6471 emit_queue ();
6472 start_sequence ();
6474 switch (DECL_FUNCTION_CODE (fndecl))
6476 case BUILT_IN_SIN:
6477 builtin_optab = sin_optab; break;
6478 case BUILT_IN_COS:
6479 builtin_optab = cos_optab; break;
6480 case BUILT_IN_FSQRT:
6481 builtin_optab = sqrt_optab; break;
6482 default:
6483 abort ();
6486 /* Compute into TARGET.
6487 Set TARGET to wherever the result comes back. */
6488 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6489 builtin_optab, op0, target, 0);
6491 /* If we were unable to expand via the builtin, stop the
6492 sequence (without outputting the insns) and break, causing
6493 a call the the library function. */
6494 if (target == 0)
6496 end_sequence ();
6497 break;
6500 /* Check the results by default. But if flag_fast_math is turned on,
6501 then assume sqrt will always be called with valid arguments. */
6503 if (! flag_fast_math)
6505 /* Don't define the builtin FP instructions
6506 if your machine is not IEEE. */
6507 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6508 abort ();
6510 lab1 = gen_label_rtx ();
6512 /* Test the result; if it is NaN, set errno=EDOM because
6513 the argument was not in the domain. */
6514 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6515 emit_jump_insn (gen_beq (lab1));
6517 #if TARGET_EDOM
6519 #ifdef GEN_ERRNO_RTX
6520 rtx errno_rtx = GEN_ERRNO_RTX;
6521 #else
6522 rtx errno_rtx
6523 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6524 #endif
6526 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6528 #else
6529 /* We can't set errno=EDOM directly; let the library call do it.
6530 Pop the arguments right away in case the call gets deleted. */
6531 NO_DEFER_POP;
6532 expand_call (exp, target, 0);
6533 OK_DEFER_POP;
6534 #endif
6536 emit_label (lab1);
6539 /* Output the entire sequence. */
6540 insns = get_insns ();
6541 end_sequence ();
6542 emit_insns (insns);
6544 return target;
6546 /* __builtin_apply_args returns block of memory allocated on
6547 the stack into which is stored the arg pointer, structure
6548 value address, static chain, and all the registers that might
6549 possibly be used in performing a function call. The code is
6550 moved to the start of the function so the incoming values are
6551 saved. */
6552 case BUILT_IN_APPLY_ARGS:
6553 /* Don't do __builtin_apply_args more than once in a function.
6554 Save the result of the first call and reuse it. */
6555 if (apply_args_value != 0)
6556 return apply_args_value;
6558 /* When this function is called, it means that registers must be
6559 saved on entry to this function. So we migrate the
6560 call to the first insn of this function. */
6561 rtx temp;
6562 rtx seq;
6564 start_sequence ();
6565 temp = expand_builtin_apply_args ();
6566 seq = get_insns ();
6567 end_sequence ();
6569 apply_args_value = temp;
6571 /* Put the sequence after the NOTE that starts the function.
6572 If this is inside a SEQUENCE, make the outer-level insn
6573 chain current, so the code is placed at the start of the
6574 function. */
6575 push_topmost_sequence ();
6576 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6577 pop_topmost_sequence ();
6578 return temp;
6581 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6582 FUNCTION with a copy of the parameters described by
6583 ARGUMENTS, and ARGSIZE. It returns a block of memory
6584 allocated on the stack into which is stored all the registers
6585 that might possibly be used for returning the result of a
6586 function. ARGUMENTS is the value returned by
6587 __builtin_apply_args. ARGSIZE is the number of bytes of
6588 arguments that must be copied. ??? How should this value be
6589 computed? We'll also need a safe worst case value for varargs
6590 functions. */
6591 case BUILT_IN_APPLY:
6592 if (arglist == 0
6593 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6594 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6595 || TREE_CHAIN (arglist) == 0
6596 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6597 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6598 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6599 return const0_rtx;
6600 else
6602 int i;
6603 tree t;
6604 rtx ops[3];
6606 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6607 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6609 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6612 /* __builtin_return (RESULT) causes the function to return the
6613 value described by RESULT. RESULT is address of the block of
6614 memory returned by __builtin_apply. */
6615 case BUILT_IN_RETURN:
6616 if (arglist
6617 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6618 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6619 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6620 NULL_RTX, VOIDmode, 0));
6621 return const0_rtx;
6623 case BUILT_IN_SAVEREGS:
6624 /* Don't do __builtin_saveregs more than once in a function.
6625 Save the result of the first call and reuse it. */
6626 if (saveregs_value != 0)
6627 return saveregs_value;
6629 /* When this function is called, it means that registers must be
6630 saved on entry to this function. So we migrate the
6631 call to the first insn of this function. */
6632 rtx temp;
6633 rtx seq;
6634 rtx valreg, saved_valreg;
6636 /* Now really call the function. `expand_call' does not call
6637 expand_builtin, so there is no danger of infinite recursion here. */
6638 start_sequence ();
6640 #ifdef EXPAND_BUILTIN_SAVEREGS
6641 /* Do whatever the machine needs done in this case. */
6642 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6643 #else
6644 /* The register where the function returns its value
6645 is likely to have something else in it, such as an argument.
6646 So preserve that register around the call. */
6647 if (value_mode != VOIDmode)
6649 valreg = hard_libcall_value (value_mode);
6650 saved_valreg = gen_reg_rtx (value_mode);
6651 emit_move_insn (saved_valreg, valreg);
6654 /* Generate the call, putting the value in a pseudo. */
6655 temp = expand_call (exp, target, ignore);
6657 if (value_mode != VOIDmode)
6658 emit_move_insn (valreg, saved_valreg);
6659 #endif
6661 seq = get_insns ();
6662 end_sequence ();
6664 saveregs_value = temp;
6666 /* Put the sequence after the NOTE that starts the function.
6667 If this is inside a SEQUENCE, make the outer-level insn
6668 chain current, so the code is placed at the start of the
6669 function. */
6670 push_topmost_sequence ();
6671 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6672 pop_topmost_sequence ();
6673 return temp;
6676 /* __builtin_args_info (N) returns word N of the arg space info
6677 for the current function. The number and meanings of words
6678 is controlled by the definition of CUMULATIVE_ARGS. */
6679 case BUILT_IN_ARGS_INFO:
6681 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6682 int i;
6683 int *word_ptr = (int *) &current_function_args_info;
6684 tree type, elts, result;
6686 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6687 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6688 __FILE__, __LINE__);
6690 if (arglist != 0)
6692 tree arg = TREE_VALUE (arglist);
6693 if (TREE_CODE (arg) != INTEGER_CST)
6694 error ("argument of `__builtin_args_info' must be constant");
6695 else
6697 int wordnum = TREE_INT_CST_LOW (arg);
6699 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6700 error ("argument of `__builtin_args_info' out of range");
6701 else
6702 return GEN_INT (word_ptr[wordnum]);
6705 else
6706 error ("missing argument in `__builtin_args_info'");
6708 return const0_rtx;
6710 #if 0
6711 for (i = 0; i < nwords; i++)
6712 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6714 type = build_array_type (integer_type_node,
6715 build_index_type (build_int_2 (nwords, 0)));
6716 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6717 TREE_CONSTANT (result) = 1;
6718 TREE_STATIC (result) = 1;
6719 result = build (INDIRECT_REF, build_pointer_type (type), result);
6720 TREE_CONSTANT (result) = 1;
6721 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6722 #endif
6725 /* Return the address of the first anonymous stack arg. */
6726 case BUILT_IN_NEXT_ARG:
6728 tree fntype = TREE_TYPE (current_function_decl);
6729 if (!(TYPE_ARG_TYPES (fntype) != 0
6730 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6731 != void_type_node)))
6733 error ("`va_start' used in function with fixed args");
6734 return const0_rtx;
6738 return expand_binop (Pmode, add_optab,
6739 current_function_internal_arg_pointer,
6740 current_function_arg_offset_rtx,
6741 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6743 case BUILT_IN_CLASSIFY_TYPE:
6744 if (arglist != 0)
6746 tree type = TREE_TYPE (TREE_VALUE (arglist));
6747 enum tree_code code = TREE_CODE (type);
6748 if (code == VOID_TYPE)
6749 return GEN_INT (void_type_class);
6750 if (code == INTEGER_TYPE)
6751 return GEN_INT (integer_type_class);
6752 if (code == CHAR_TYPE)
6753 return GEN_INT (char_type_class);
6754 if (code == ENUMERAL_TYPE)
6755 return GEN_INT (enumeral_type_class);
6756 if (code == BOOLEAN_TYPE)
6757 return GEN_INT (boolean_type_class);
6758 if (code == POINTER_TYPE)
6759 return GEN_INT (pointer_type_class);
6760 if (code == REFERENCE_TYPE)
6761 return GEN_INT (reference_type_class);
6762 if (code == OFFSET_TYPE)
6763 return GEN_INT (offset_type_class);
6764 if (code == REAL_TYPE)
6765 return GEN_INT (real_type_class);
6766 if (code == COMPLEX_TYPE)
6767 return GEN_INT (complex_type_class);
6768 if (code == FUNCTION_TYPE)
6769 return GEN_INT (function_type_class);
6770 if (code == METHOD_TYPE)
6771 return GEN_INT (method_type_class);
6772 if (code == RECORD_TYPE)
6773 return GEN_INT (record_type_class);
6774 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
6775 return GEN_INT (union_type_class);
6776 if (code == ARRAY_TYPE)
6777 return GEN_INT (array_type_class);
6778 if (code == STRING_TYPE)
6779 return GEN_INT (string_type_class);
6780 if (code == SET_TYPE)
6781 return GEN_INT (set_type_class);
6782 if (code == FILE_TYPE)
6783 return GEN_INT (file_type_class);
6784 if (code == LANG_TYPE)
6785 return GEN_INT (lang_type_class);
6787 return GEN_INT (no_type_class);
6789 case BUILT_IN_CONSTANT_P:
6790 if (arglist == 0)
6791 return const0_rtx;
6792 else
6793 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6794 ? const1_rtx : const0_rtx);
6796 case BUILT_IN_FRAME_ADDRESS:
6797 /* The argument must be a nonnegative integer constant.
6798 It counts the number of frames to scan up the stack.
6799 The value is the address of that frame. */
6800 case BUILT_IN_RETURN_ADDRESS:
6801 /* The argument must be a nonnegative integer constant.
6802 It counts the number of frames to scan up the stack.
6803 The value is the return address saved in that frame. */
6804 if (arglist == 0)
6805 /* Warning about missing arg was already issued. */
6806 return const0_rtx;
6807 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6809 error ("invalid arg to `__builtin_return_address'");
6810 return const0_rtx;
6812 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6814 error ("invalid arg to `__builtin_return_address'");
6815 return const0_rtx;
6817 else
6819 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6820 rtx tem = frame_pointer_rtx;
6821 int i;
6823 /* Some machines need special handling before we can access arbitrary
6824 frames. For example, on the sparc, we must first flush all
6825 register windows to the stack. */
6826 #ifdef SETUP_FRAME_ADDRESSES
6827 SETUP_FRAME_ADDRESSES ();
6828 #endif
6830 /* On the sparc, the return address is not in the frame, it is
6831 in a register. There is no way to access it off of the current
6832 frame pointer, but it can be accessed off the previous frame
6833 pointer by reading the value from the register window save
6834 area. */
6835 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6836 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6837 count--;
6838 #endif
6840 /* Scan back COUNT frames to the specified frame. */
6841 for (i = 0; i < count; i++)
6843 /* Assume the dynamic chain pointer is in the word that
6844 the frame address points to, unless otherwise specified. */
6845 #ifdef DYNAMIC_CHAIN_ADDRESS
6846 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6847 #endif
6848 tem = memory_address (Pmode, tem);
6849 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6852 /* For __builtin_frame_address, return what we've got. */
6853 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6854 return tem;
6856 /* For __builtin_return_address,
6857 Get the return address from that frame. */
6858 #ifdef RETURN_ADDR_RTX
6859 return RETURN_ADDR_RTX (count, tem);
6860 #else
6861 tem = memory_address (Pmode,
6862 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6863 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6864 #endif
6867 case BUILT_IN_ALLOCA:
6868 if (arglist == 0
6869 /* Arg could be non-integer if user redeclared this fcn wrong. */
6870 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6871 return const0_rtx;
6872 current_function_calls_alloca = 1;
6873 /* Compute the argument. */
6874 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6876 /* Allocate the desired space. */
6877 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6879 /* Record the new stack level for nonlocal gotos. */
6880 if (nonlocal_goto_handler_slot != 0)
6881 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6882 return target;
6884 case BUILT_IN_FFS:
6885 /* If not optimizing, call the library function. */
6886 if (!optimize)
6887 break;
6889 if (arglist == 0
6890 /* Arg could be non-integer if user redeclared this fcn wrong. */
6891 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6892 return const0_rtx;
6894 /* Compute the argument. */
6895 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6896 /* Compute ffs, into TARGET if possible.
6897 Set TARGET to wherever the result comes back. */
6898 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6899 ffs_optab, op0, target, 1);
6900 if (target == 0)
6901 abort ();
6902 return target;
6904 case BUILT_IN_STRLEN:
6905 /* If not optimizing, call the library function. */
6906 if (!optimize)
6907 break;
6909 if (arglist == 0
6910 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6911 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6912 return const0_rtx;
6913 else
6915 tree src = TREE_VALUE (arglist);
6916 tree len = c_strlen (src);
6918 int align
6919 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6921 rtx result, src_rtx, char_rtx;
6922 enum machine_mode insn_mode = value_mode, char_mode;
6923 enum insn_code icode;
6925 /* If the length is known, just return it. */
6926 if (len != 0)
6927 return expand_expr (len, target, mode, 0);
6929 /* If SRC is not a pointer type, don't do this operation inline. */
6930 if (align == 0)
6931 break;
6933 /* Call a function if we can't compute strlen in the right mode. */
6935 while (insn_mode != VOIDmode)
6937 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6938 if (icode != CODE_FOR_nothing)
6939 break;
6941 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6943 if (insn_mode == VOIDmode)
6944 break;
6946 /* Make a place to write the result of the instruction. */
6947 result = target;
6948 if (! (result != 0
6949 && GET_CODE (result) == REG
6950 && GET_MODE (result) == insn_mode
6951 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6952 result = gen_reg_rtx (insn_mode);
6954 /* Make sure the operands are acceptable to the predicates. */
6956 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6957 result = gen_reg_rtx (insn_mode);
6959 src_rtx = memory_address (BLKmode,
6960 expand_expr (src, NULL_RTX, Pmode,
6961 EXPAND_NORMAL));
6962 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6963 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6965 char_rtx = const0_rtx;
6966 char_mode = insn_operand_mode[(int)icode][2];
6967 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6968 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6970 emit_insn (GEN_FCN (icode) (result,
6971 gen_rtx (MEM, BLKmode, src_rtx),
6972 char_rtx, GEN_INT (align)));
6974 /* Return the value in the proper mode for this function. */
6975 if (GET_MODE (result) == value_mode)
6976 return result;
6977 else if (target != 0)
6979 convert_move (target, result, 0);
6980 return target;
6982 else
6983 return convert_to_mode (value_mode, result, 0);
6986 case BUILT_IN_STRCPY:
6987 /* If not optimizing, call the library function. */
6988 if (!optimize)
6989 break;
6991 if (arglist == 0
6992 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6993 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6994 || TREE_CHAIN (arglist) == 0
6995 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6996 return const0_rtx;
6997 else
6999 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7001 if (len == 0)
7002 break;
7004 len = size_binop (PLUS_EXPR, len, integer_one_node);
7006 chainon (arglist, build_tree_list (NULL_TREE, len));
7009 /* Drops in. */
7010 case BUILT_IN_MEMCPY:
7011 /* If not optimizing, call the library function. */
7012 if (!optimize)
7013 break;
7015 if (arglist == 0
7016 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7017 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7018 || TREE_CHAIN (arglist) == 0
7019 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7020 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7021 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7022 return const0_rtx;
7023 else
7025 tree dest = TREE_VALUE (arglist);
7026 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7027 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7029 int src_align
7030 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7031 int dest_align
7032 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7033 rtx dest_rtx, dest_mem, src_mem;
7035 /* If either SRC or DEST is not a pointer type, don't do
7036 this operation in-line. */
7037 if (src_align == 0 || dest_align == 0)
7039 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7040 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7041 break;
7044 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7045 dest_mem = gen_rtx (MEM, BLKmode,
7046 memory_address (BLKmode, dest_rtx));
7047 src_mem = gen_rtx (MEM, BLKmode,
7048 memory_address (BLKmode,
7049 expand_expr (src, NULL_RTX,
7050 Pmode,
7051 EXPAND_NORMAL)));
7053 /* Copy word part most expediently. */
7054 emit_block_move (dest_mem, src_mem,
7055 expand_expr (len, NULL_RTX, VOIDmode, 0),
7056 MIN (src_align, dest_align));
7057 return dest_rtx;
7060 /* These comparison functions need an instruction that returns an actual
7061 index. An ordinary compare that just sets the condition codes
7062 is not enough. */
7063 #ifdef HAVE_cmpstrsi
7064 case BUILT_IN_STRCMP:
7065 /* If not optimizing, call the library function. */
7066 if (!optimize)
7067 break;
7069 if (arglist == 0
7070 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7071 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7072 || TREE_CHAIN (arglist) == 0
7073 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7074 return const0_rtx;
7075 else if (!HAVE_cmpstrsi)
7076 break;
7078 tree arg1 = TREE_VALUE (arglist);
7079 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7080 tree offset;
7081 tree len, len2;
7083 len = c_strlen (arg1);
7084 if (len)
7085 len = size_binop (PLUS_EXPR, integer_one_node, len);
7086 len2 = c_strlen (arg2);
7087 if (len2)
7088 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7090 /* If we don't have a constant length for the first, use the length
7091 of the second, if we know it. We don't require a constant for
7092 this case; some cost analysis could be done if both are available
7093 but neither is constant. For now, assume they're equally cheap.
7095 If both strings have constant lengths, use the smaller. This
7096 could arise if optimization results in strcpy being called with
7097 two fixed strings, or if the code was machine-generated. We should
7098 add some code to the `memcmp' handler below to deal with such
7099 situations, someday. */
7100 if (!len || TREE_CODE (len) != INTEGER_CST)
7102 if (len2)
7103 len = len2;
7104 else if (len == 0)
7105 break;
7107 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7109 if (tree_int_cst_lt (len2, len))
7110 len = len2;
7113 chainon (arglist, build_tree_list (NULL_TREE, len));
7116 /* Drops in. */
7117 case BUILT_IN_MEMCMP:
7118 /* If not optimizing, call the library function. */
7119 if (!optimize)
7120 break;
7122 if (arglist == 0
7123 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7124 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7125 || TREE_CHAIN (arglist) == 0
7126 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7127 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7128 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7129 return const0_rtx;
7130 else if (!HAVE_cmpstrsi)
7131 break;
7133 tree arg1 = TREE_VALUE (arglist);
7134 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7135 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7136 rtx result;
7138 int arg1_align
7139 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7140 int arg2_align
7141 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7142 enum machine_mode insn_mode
7143 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7145 /* If we don't have POINTER_TYPE, call the function. */
7146 if (arg1_align == 0 || arg2_align == 0)
7148 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7149 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7150 break;
7153 /* Make a place to write the result of the instruction. */
7154 result = target;
7155 if (! (result != 0
7156 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7157 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7158 result = gen_reg_rtx (insn_mode);
7160 emit_insn (gen_cmpstrsi (result,
7161 gen_rtx (MEM, BLKmode,
7162 expand_expr (arg1, NULL_RTX, Pmode,
7163 EXPAND_NORMAL)),
7164 gen_rtx (MEM, BLKmode,
7165 expand_expr (arg2, NULL_RTX, Pmode,
7166 EXPAND_NORMAL)),
7167 expand_expr (len, NULL_RTX, VOIDmode, 0),
7168 GEN_INT (MIN (arg1_align, arg2_align))));
7170 /* Return the value in the proper mode for this function. */
7171 mode = TYPE_MODE (TREE_TYPE (exp));
7172 if (GET_MODE (result) == mode)
7173 return result;
7174 else if (target != 0)
7176 convert_move (target, result, 0);
7177 return target;
7179 else
7180 return convert_to_mode (mode, result, 0);
7182 #else
7183 case BUILT_IN_STRCMP:
7184 case BUILT_IN_MEMCMP:
7185 break;
7186 #endif
7188 default: /* just do library call, if unknown builtin */
7189 error ("built-in function `%s' not currently supported",
7190 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7193 /* The switch statement above can drop through to cause the function
7194 to be called normally. */
7196 return expand_call (exp, target, ignore);
7199 /* Built-in functions to perform an untyped call and return. */
7201 /* For each register that may be used for calling a function, this
7202 gives a mode used to copy the register's value. VOIDmode indicates
7203 the register is not used for calling a function. If the machine
7204 has register windows, this gives only the outbound registers.
7205 INCOMING_REGNO gives the corresponding inbound register. */
7206 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7208 /* For each register that may be used for returning values, this gives
7209 a mode used to copy the register's value. VOIDmode indicates the
7210 register is not used for returning values. If the machine has
7211 register windows, this gives only the outbound registers.
7212 INCOMING_REGNO gives the corresponding inbound register. */
7213 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7215 /* For each register that may be used for calling a function, this
7216 gives the offset of that register into the block returned by
7217 __bultin_apply_args. 0 indicates that the register is not
7218 used for calling a function. */
7219 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7221 /* Return the offset of register REGNO into the block returned by
7222 __builtin_apply_args. This is not declared static, since it is
7223 needed in objc-act.c. */
7225 int
7226 apply_args_register_offset (regno)
7227 int regno;
7229 apply_args_size ();
7231 /* Arguments are always put in outgoing registers (in the argument
7232 block) if such make sense. */
7233 #ifdef OUTGOING_REGNO
7234 regno = OUTGOING_REGNO(regno);
7235 #endif
7236 return apply_args_reg_offset[regno];
7239 /* Return the size required for the block returned by __builtin_apply_args,
7240 and initialize apply_args_mode. */
7242 static int
7243 apply_args_size ()
7245 static int size = -1;
7246 int align, regno;
7247 enum machine_mode mode;
7249 /* The values computed by this function never change. */
7250 if (size < 0)
7252 /* The first value is the incoming arg-pointer. */
7253 size = GET_MODE_SIZE (Pmode);
7255 /* The second value is the structure value address unless this is
7256 passed as an "invisible" first argument. */
7257 if (struct_value_rtx)
7258 size += GET_MODE_SIZE (Pmode);
7260 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7261 if (FUNCTION_ARG_REGNO_P (regno))
7263 /* Search for the proper mode for copying this register's
7264 value. I'm not sure this is right, but it works so far. */
7265 enum machine_mode best_mode = VOIDmode;
7267 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7268 mode != VOIDmode;
7269 mode = GET_MODE_WIDER_MODE (mode))
7270 if (HARD_REGNO_MODE_OK (regno, mode)
7271 && HARD_REGNO_NREGS (regno, mode) == 1)
7272 best_mode = mode;
7274 if (best_mode == VOIDmode)
7275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7276 mode != VOIDmode;
7277 mode = GET_MODE_WIDER_MODE (mode))
7278 if (HARD_REGNO_MODE_OK (regno, mode)
7279 && (mov_optab->handlers[(int) mode].insn_code
7280 != CODE_FOR_nothing))
7281 best_mode = mode;
7283 mode = best_mode;
7284 if (mode == VOIDmode)
7285 abort ();
7287 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7288 if (size % align != 0)
7289 size = CEIL (size, align) * align;
7290 apply_args_reg_offset[regno] = size;
7291 size += GET_MODE_SIZE (mode);
7292 apply_args_mode[regno] = mode;
7294 else
7296 apply_args_mode[regno] = VOIDmode;
7297 apply_args_reg_offset[regno] = 0;
7300 return size;
7303 /* Return the size required for the block returned by __builtin_apply,
7304 and initialize apply_result_mode. */
7306 static int
7307 apply_result_size ()
7309 static int size = -1;
7310 int align, regno;
7311 enum machine_mode mode;
7313 /* The values computed by this function never change. */
7314 if (size < 0)
7316 size = 0;
7318 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7319 if (FUNCTION_VALUE_REGNO_P (regno))
7321 /* Search for the proper mode for copying this register's
7322 value. I'm not sure this is right, but it works so far. */
7323 enum machine_mode best_mode = VOIDmode;
7325 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7326 mode != TImode;
7327 mode = GET_MODE_WIDER_MODE (mode))
7328 if (HARD_REGNO_MODE_OK (regno, mode))
7329 best_mode = mode;
7331 if (best_mode == VOIDmode)
7332 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7333 mode != VOIDmode;
7334 mode = GET_MODE_WIDER_MODE (mode))
7335 if (HARD_REGNO_MODE_OK (regno, mode)
7336 && (mov_optab->handlers[(int) mode].insn_code
7337 != CODE_FOR_nothing))
7338 best_mode = mode;
7340 mode = best_mode;
7341 if (mode == VOIDmode)
7342 abort ();
7344 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7345 if (size % align != 0)
7346 size = CEIL (size, align) * align;
7347 size += GET_MODE_SIZE (mode);
7348 apply_result_mode[regno] = mode;
7350 else
7351 apply_result_mode[regno] = VOIDmode;
7353 /* Allow targets that use untyped_call and untyped_return to override
7354 the size so that machine-specific information can be stored here. */
7355 #ifdef APPLY_RESULT_SIZE
7356 size = APPLY_RESULT_SIZE;
7357 #endif
7359 return size;
7362 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7363 /* Create a vector describing the result block RESULT. If SAVEP is true,
7364 the result block is used to save the values; otherwise it is used to
7365 restore the values. */
7367 static rtx
7368 result_vector (savep, result)
7369 int savep;
7370 rtx result;
7372 int regno, size, align, nelts;
7373 enum machine_mode mode;
7374 rtx reg, mem;
7375 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7377 size = nelts = 0;
7378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7379 if ((mode = apply_result_mode[regno]) != VOIDmode)
7381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7382 if (size % align != 0)
7383 size = CEIL (size, align) * align;
7384 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7385 mem = change_address (result, mode,
7386 plus_constant (XEXP (result, 0), size));
7387 savevec[nelts++] = (savep
7388 ? gen_rtx (SET, VOIDmode, mem, reg)
7389 : gen_rtx (SET, VOIDmode, reg, mem));
7390 size += GET_MODE_SIZE (mode);
7392 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7394 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7396 /* Save the state required to perform an untyped call with the same
7397 arguments as were passed to the current function. */
7399 static rtx
7400 expand_builtin_apply_args ()
7402 rtx registers;
7403 int size, align, regno;
7404 enum machine_mode mode;
7406 /* Create a block where the arg-pointer, structure value address,
7407 and argument registers can be saved. */
7408 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7410 /* Walk past the arg-pointer and structure value address. */
7411 size = GET_MODE_SIZE (Pmode);
7412 if (struct_value_rtx)
7413 size += GET_MODE_SIZE (Pmode);
7415 /* Save each register used in calling a function to the block. */
7416 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7417 if ((mode = apply_args_mode[regno]) != VOIDmode)
7419 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7420 if (size % align != 0)
7421 size = CEIL (size, align) * align;
7422 emit_move_insn (change_address (registers, mode,
7423 plus_constant (XEXP (registers, 0),
7424 size)),
7425 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7426 size += GET_MODE_SIZE (mode);
7429 /* Save the arg pointer to the block. */
7430 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7431 copy_to_reg (virtual_incoming_args_rtx));
7432 size = GET_MODE_SIZE (Pmode);
7434 /* Save the structure value address unless this is passed as an
7435 "invisible" first argument. */
7436 if (struct_value_incoming_rtx)
7438 emit_move_insn (change_address (registers, Pmode,
7439 plus_constant (XEXP (registers, 0),
7440 size)),
7441 copy_to_reg (struct_value_incoming_rtx));
7442 size += GET_MODE_SIZE (Pmode);
7445 /* Return the address of the block. */
7446 return copy_addr_to_reg (XEXP (registers, 0));
7449 /* Perform an untyped call and save the state required to perform an
7450 untyped return of whatever value was returned by the given function. */
7452 static rtx
7453 expand_builtin_apply (function, arguments, argsize)
7454 rtx function, arguments, argsize;
7456 int size, align, regno;
7457 enum machine_mode mode;
7458 rtx incoming_args, result, reg, dest, call_insn;
7459 rtx old_stack_level = 0;
7460 rtx use_insns = 0;
7462 /* Create a block where the return registers can be saved. */
7463 result = assign_stack_local (BLKmode, apply_result_size (), -1);
7465 /* ??? The argsize value should be adjusted here. */
7467 /* Fetch the arg pointer from the ARGUMENTS block. */
7468 incoming_args = gen_reg_rtx (Pmode);
7469 emit_move_insn (incoming_args,
7470 gen_rtx (MEM, Pmode, arguments));
7471 #ifndef STACK_GROWS_DOWNWARD
7472 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7473 incoming_args, 0, OPTAB_LIB_WIDEN);
7474 #endif
7476 /* Perform postincrements before actually calling the function. */
7477 emit_queue ();
7479 /* Push a new argument block and copy the arguments. */
7480 do_pending_stack_adjust ();
7481 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
7483 /* Push a block of memory onto the stack to store the memory arguments.
7484 Save the address in a register, and copy the memory arguments. ??? I
7485 haven't figured out how the calling convention macros effect this,
7486 but it's likely that the source and/or destination addresses in
7487 the block copy will need updating in machine specific ways. */
7488 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7489 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7490 gen_rtx (MEM, BLKmode, incoming_args),
7491 argsize,
7492 PARM_BOUNDARY / BITS_PER_UNIT);
7494 /* Refer to the argument block. */
7495 apply_args_size ();
7496 arguments = gen_rtx (MEM, BLKmode, arguments);
7498 /* Walk past the arg-pointer and structure value address. */
7499 size = GET_MODE_SIZE (Pmode);
7500 if (struct_value_rtx)
7501 size += GET_MODE_SIZE (Pmode);
7503 /* Restore each of the registers previously saved. Make USE insns
7504 for each of these registers for use in making the call. */
7505 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7506 if ((mode = apply_args_mode[regno]) != VOIDmode)
7508 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7509 if (size % align != 0)
7510 size = CEIL (size, align) * align;
7511 reg = gen_rtx (REG, mode, regno);
7512 emit_move_insn (reg,
7513 change_address (arguments, mode,
7514 plus_constant (XEXP (arguments, 0),
7515 size)));
7517 push_to_sequence (use_insns);
7518 emit_insn (gen_rtx (USE, VOIDmode, reg));
7519 use_insns = get_insns ();
7520 end_sequence ();
7521 size += GET_MODE_SIZE (mode);
7524 /* Restore the structure value address unless this is passed as an
7525 "invisible" first argument. */
7526 size = GET_MODE_SIZE (Pmode);
7527 if (struct_value_rtx)
7529 rtx value = gen_reg_rtx (Pmode);
7530 emit_move_insn (value,
7531 change_address (arguments, Pmode,
7532 plus_constant (XEXP (arguments, 0),
7533 size)));
7534 emit_move_insn (struct_value_rtx, value);
7535 if (GET_CODE (struct_value_rtx) == REG)
7537 push_to_sequence (use_insns);
7538 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7539 use_insns = get_insns ();
7540 end_sequence ();
7542 size += GET_MODE_SIZE (Pmode);
7545 /* All arguments and registers used for the call are set up by now! */
7546 function = prepare_call_address (function, NULL_TREE, &use_insns);
7548 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7549 and we don't want to load it into a register as an optimization,
7550 because prepare_call_address already did it if it should be done. */
7551 if (GET_CODE (function) != SYMBOL_REF)
7552 function = memory_address (FUNCTION_MODE, function);
7554 /* Generate the actual call instruction and save the return value. */
7555 #ifdef HAVE_untyped_call
7556 if (HAVE_untyped_call)
7557 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7558 result, result_vector (1, result)));
7559 else
7560 #endif
7561 #ifdef HAVE_call_value
7562 if (HAVE_call_value)
7564 rtx valreg = 0;
7566 /* Locate the unique return register. It is not possible to
7567 express a call that sets more than one return register using
7568 call_value; use untyped_call for that. In fact, untyped_call
7569 only needs to save the return registers in the given block. */
7570 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7571 if ((mode = apply_result_mode[regno]) != VOIDmode)
7573 if (valreg)
7574 abort (); /* HAVE_untyped_call required. */
7575 valreg = gen_rtx (REG, mode, regno);
7578 emit_call_insn (gen_call_value (valreg,
7579 gen_rtx (MEM, FUNCTION_MODE, function),
7580 const0_rtx, NULL_RTX, const0_rtx));
7582 emit_move_insn (change_address (result, GET_MODE (valreg),
7583 XEXP (result, 0)),
7584 valreg);
7586 else
7587 #endif
7588 abort ();
7590 /* Find the CALL insn we just emitted and write the USE insns before it. */
7591 for (call_insn = get_last_insn ();
7592 call_insn && GET_CODE (call_insn) != CALL_INSN;
7593 call_insn = PREV_INSN (call_insn))
7596 if (! call_insn)
7597 abort ();
7599 /* Put the USE insns before the CALL. */
7600 emit_insns_before (use_insns, call_insn);
7602 /* Restore the stack. */
7603 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
7605 /* Return the address of the result block. */
7606 return copy_addr_to_reg (XEXP (result, 0));
7609 /* Perform an untyped return. */
7611 static void
7612 expand_builtin_return (result)
7613 rtx result;
7615 int size, align, regno;
7616 enum machine_mode mode;
7617 rtx reg;
7618 rtx use_insns = 0;
7620 apply_result_size ();
7621 result = gen_rtx (MEM, BLKmode, result);
7623 #ifdef HAVE_untyped_return
7624 if (HAVE_untyped_return)
7626 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7627 emit_barrier ();
7628 return;
7630 #endif
7632 /* Restore the return value and note that each value is used. */
7633 size = 0;
7634 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7635 if ((mode = apply_result_mode[regno]) != VOIDmode)
7637 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7638 if (size % align != 0)
7639 size = CEIL (size, align) * align;
7640 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7641 emit_move_insn (reg,
7642 change_address (result, mode,
7643 plus_constant (XEXP (result, 0),
7644 size)));
7646 push_to_sequence (use_insns);
7647 emit_insn (gen_rtx (USE, VOIDmode, reg));
7648 use_insns = get_insns ();
7649 end_sequence ();
7650 size += GET_MODE_SIZE (mode);
7653 /* Put the USE insns before the return. */
7654 emit_insns (use_insns);
7656 /* Return whatever values was restored by jumping directly to the end
7657 of the function. */
7658 expand_null_return ();
7661 /* Expand code for a post- or pre- increment or decrement
7662 and return the RTX for the result.
7663 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
7665 static rtx
7666 expand_increment (exp, post)
7667 register tree exp;
7668 int post;
7670 register rtx op0, op1;
7671 register rtx temp, value;
7672 register tree incremented = TREE_OPERAND (exp, 0);
7673 optab this_optab = add_optab;
7674 int icode;
7675 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7676 int op0_is_copy = 0;
7677 int single_insn = 0;
7678 /* 1 means we can't store into OP0 directly,
7679 because it is a subreg narrower than a word,
7680 and we don't dare clobber the rest of the word. */
7681 int bad_subreg = 0;
7683 if (output_bytecode)
7685 bc_expand_expr (exp);
7686 return NULL_RTX;
7689 /* Stabilize any component ref that might need to be
7690 evaluated more than once below. */
7691 if (!post
7692 || TREE_CODE (incremented) == BIT_FIELD_REF
7693 || (TREE_CODE (incremented) == COMPONENT_REF
7694 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7695 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7696 incremented = stabilize_reference (incremented);
7697 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7698 ones into save exprs so that they don't accidentally get evaluated
7699 more than once by the code below. */
7700 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7701 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7702 incremented = save_expr (incremented);
7704 /* Compute the operands as RTX.
7705 Note whether OP0 is the actual lvalue or a copy of it:
7706 I believe it is a copy iff it is a register or subreg
7707 and insns were generated in computing it. */
7709 temp = get_last_insn ();
7710 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
7712 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7713 in place but intead must do sign- or zero-extension during assignment,
7714 so we copy it into a new register and let the code below use it as
7715 a copy.
7717 Note that we can safely modify this SUBREG since it is know not to be
7718 shared (it was made by the expand_expr call above). */
7720 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7721 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
7722 else if (GET_CODE (op0) == SUBREG
7723 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
7724 bad_subreg = 1;
7726 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7727 && temp != get_last_insn ());
7728 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7730 /* Decide whether incrementing or decrementing. */
7731 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7732 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7733 this_optab = sub_optab;
7735 /* Convert decrement by a constant into a negative increment. */
7736 if (this_optab == sub_optab
7737 && GET_CODE (op1) == CONST_INT)
7739 op1 = GEN_INT (- INTVAL (op1));
7740 this_optab = add_optab;
7743 /* For a preincrement, see if we can do this with a single instruction. */
7744 if (!post)
7746 icode = (int) this_optab->handlers[(int) mode].insn_code;
7747 if (icode != (int) CODE_FOR_nothing
7748 /* Make sure that OP0 is valid for operands 0 and 1
7749 of the insn we want to queue. */
7750 && (*insn_operand_predicate[icode][0]) (op0, mode)
7751 && (*insn_operand_predicate[icode][1]) (op0, mode)
7752 && (*insn_operand_predicate[icode][2]) (op1, mode))
7753 single_insn = 1;
7756 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7757 then we cannot just increment OP0. We must therefore contrive to
7758 increment the original value. Then, for postincrement, we can return
7759 OP0 since it is a copy of the old value. For preincrement, expand here
7760 unless we can do it with a single insn.
7762 Likewise if storing directly into OP0 would clobber high bits
7763 we need to preserve (bad_subreg). */
7764 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
7766 /* This is the easiest way to increment the value wherever it is.
7767 Problems with multiple evaluation of INCREMENTED are prevented
7768 because either (1) it is a component_ref or preincrement,
7769 in which case it was stabilized above, or (2) it is an array_ref
7770 with constant index in an array in a register, which is
7771 safe to reevaluate. */
7772 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
7773 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7774 ? MINUS_EXPR : PLUS_EXPR),
7775 TREE_TYPE (exp),
7776 incremented,
7777 TREE_OPERAND (exp, 1));
7778 temp = expand_assignment (incremented, newexp, ! post, 0);
7779 return post ? op0 : temp;
7782 if (post)
7784 /* We have a true reference to the value in OP0.
7785 If there is an insn to add or subtract in this mode, queue it.
7786 Queueing the increment insn avoids the register shuffling
7787 that often results if we must increment now and first save
7788 the old value for subsequent use. */
7790 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
7791 op0 = stabilize (op0);
7792 #endif
7794 icode = (int) this_optab->handlers[(int) mode].insn_code;
7795 if (icode != (int) CODE_FOR_nothing
7796 /* Make sure that OP0 is valid for operands 0 and 1
7797 of the insn we want to queue. */
7798 && (*insn_operand_predicate[icode][0]) (op0, mode)
7799 && (*insn_operand_predicate[icode][1]) (op0, mode))
7801 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
7802 op1 = force_reg (mode, op1);
7804 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
7808 /* Preincrement, or we can't increment with one simple insn. */
7809 if (post)
7810 /* Save a copy of the value before inc or dec, to return it later. */
7811 temp = value = copy_to_reg (op0);
7812 else
7813 /* Arrange to return the incremented value. */
7814 /* Copy the rtx because expand_binop will protect from the queue,
7815 and the results of that would be invalid for us to return
7816 if our caller does emit_queue before using our result. */
7817 temp = copy_rtx (value = op0);
7819 /* Increment however we can. */
7820 op1 = expand_binop (mode, this_optab, value, op1, op0,
7821 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
7822 /* Make sure the value is stored into OP0. */
7823 if (op1 != op0)
7824 emit_move_insn (op0, op1);
7826 return temp;
7829 /* Expand all function calls contained within EXP, innermost ones first.
7830 But don't look within expressions that have sequence points.
7831 For each CALL_EXPR, record the rtx for its value
7832 in the CALL_EXPR_RTL field. */
7834 static void
7835 preexpand_calls (exp)
7836 tree exp;
7838 register int nops, i;
7839 int type = TREE_CODE_CLASS (TREE_CODE (exp));
7841 if (! do_preexpand_calls)
7842 return;
7844 /* Only expressions and references can contain calls. */
7846 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
7847 return;
7849 switch (TREE_CODE (exp))
7851 case CALL_EXPR:
7852 /* Do nothing if already expanded. */
7853 if (CALL_EXPR_RTL (exp) != 0)
7854 return;
7856 /* Do nothing to built-in functions. */
7857 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
7858 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
7859 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7860 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
7861 return;
7863 case COMPOUND_EXPR:
7864 case COND_EXPR:
7865 case TRUTH_ANDIF_EXPR:
7866 case TRUTH_ORIF_EXPR:
7867 /* If we find one of these, then we can be sure
7868 the adjust will be done for it (since it makes jumps).
7869 Do it now, so that if this is inside an argument
7870 of a function, we don't get the stack adjustment
7871 after some other args have already been pushed. */
7872 do_pending_stack_adjust ();
7873 return;
7875 case BLOCK:
7876 case RTL_EXPR:
7877 case WITH_CLEANUP_EXPR:
7878 return;
7880 case SAVE_EXPR:
7881 if (SAVE_EXPR_RTL (exp) != 0)
7882 return;
7885 nops = tree_code_length[(int) TREE_CODE (exp)];
7886 for (i = 0; i < nops; i++)
7887 if (TREE_OPERAND (exp, i) != 0)
7889 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
7890 if (type == 'e' || type == '<' || type == '1' || type == '2'
7891 || type == 'r')
7892 preexpand_calls (TREE_OPERAND (exp, i));
7896 /* At the start of a function, record that we have no previously-pushed
7897 arguments waiting to be popped. */
7899 void
7900 init_pending_stack_adjust ()
7902 pending_stack_adjust = 0;
7905 /* When exiting from function, if safe, clear out any pending stack adjust
7906 so the adjustment won't get done. */
7908 void
7909 clear_pending_stack_adjust ()
7911 #ifdef EXIT_IGNORE_STACK
7912 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
7913 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
7914 && ! flag_inline_functions)
7915 pending_stack_adjust = 0;
7916 #endif
7919 /* Pop any previously-pushed arguments that have not been popped yet. */
7921 void
7922 do_pending_stack_adjust ()
7924 if (inhibit_defer_pop == 0)
7926 if (pending_stack_adjust != 0)
7927 adjust_stack (GEN_INT (pending_stack_adjust));
7928 pending_stack_adjust = 0;
7932 /* Expand all cleanups up to OLD_CLEANUPS.
7933 Needed here, and also for language-dependent calls. */
7935 void
7936 expand_cleanups_to (old_cleanups)
7937 tree old_cleanups;
7939 while (cleanups_this_call != old_cleanups)
7941 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
7942 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7946 /* Expand conditional expressions. */
7948 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7949 LABEL is an rtx of code CODE_LABEL, in this function and all the
7950 functions here. */
7952 void
7953 jumpifnot (exp, label)
7954 tree exp;
7955 rtx label;
7957 do_jump (exp, label, NULL_RTX);
7960 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7962 void
7963 jumpif (exp, label)
7964 tree exp;
7965 rtx label;
7967 do_jump (exp, NULL_RTX, label);
7970 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7971 the result is zero, or IF_TRUE_LABEL if the result is one.
7972 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7973 meaning fall through in that case.
7975 do_jump always does any pending stack adjust except when it does not
7976 actually perform a jump. An example where there is no jump
7977 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7979 This function is responsible for optimizing cases such as
7980 &&, || and comparison operators in EXP. */
7982 void
7983 do_jump (exp, if_false_label, if_true_label)
7984 tree exp;
7985 rtx if_false_label, if_true_label;
7987 register enum tree_code code = TREE_CODE (exp);
7988 /* Some cases need to create a label to jump to
7989 in order to properly fall through.
7990 These cases set DROP_THROUGH_LABEL nonzero. */
7991 rtx drop_through_label = 0;
7992 rtx temp;
7993 rtx comparison = 0;
7994 int i;
7995 tree type;
7997 emit_queue ();
7999 switch (code)
8001 case ERROR_MARK:
8002 break;
8004 case INTEGER_CST:
8005 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8006 if (temp)
8007 emit_jump (temp);
8008 break;
8010 #if 0
8011 /* This is not true with #pragma weak */
8012 case ADDR_EXPR:
8013 /* The address of something can never be zero. */
8014 if (if_true_label)
8015 emit_jump (if_true_label);
8016 break;
8017 #endif
8019 case NOP_EXPR:
8020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8021 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8022 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8023 goto normal;
8024 case CONVERT_EXPR:
8025 /* If we are narrowing the operand, we have to do the compare in the
8026 narrower mode. */
8027 if ((TYPE_PRECISION (TREE_TYPE (exp))
8028 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8029 goto normal;
8030 case NON_LVALUE_EXPR:
8031 case REFERENCE_EXPR:
8032 case ABS_EXPR:
8033 case NEGATE_EXPR:
8034 case LROTATE_EXPR:
8035 case RROTATE_EXPR:
8036 /* These cannot change zero->non-zero or vice versa. */
8037 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8038 break;
8040 #if 0
8041 /* This is never less insns than evaluating the PLUS_EXPR followed by
8042 a test and can be longer if the test is eliminated. */
8043 case PLUS_EXPR:
8044 /* Reduce to minus. */
8045 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8046 TREE_OPERAND (exp, 0),
8047 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8048 TREE_OPERAND (exp, 1))));
8049 /* Process as MINUS. */
8050 #endif
8052 case MINUS_EXPR:
8053 /* Non-zero iff operands of minus differ. */
8054 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8055 TREE_OPERAND (exp, 0),
8056 TREE_OPERAND (exp, 1)),
8057 NE, NE);
8058 break;
8060 case BIT_AND_EXPR:
8061 /* If we are AND'ing with a small constant, do this comparison in the
8062 smallest type that fits. If the machine doesn't have comparisons
8063 that small, it will be converted back to the wider comparison.
8064 This helps if we are testing the sign bit of a narrower object.
8065 combine can't do this for us because it can't know whether a
8066 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8068 if (! SLOW_BYTE_ACCESS
8069 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8070 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8071 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8072 && (type = type_for_size (i + 1, 1)) != 0
8073 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8074 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8075 != CODE_FOR_nothing))
8077 do_jump (convert (type, exp), if_false_label, if_true_label);
8078 break;
8080 goto normal;
8082 case TRUTH_NOT_EXPR:
8083 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8084 break;
8086 case TRUTH_ANDIF_EXPR:
8087 if (if_false_label == 0)
8088 if_false_label = drop_through_label = gen_label_rtx ();
8089 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8090 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8091 break;
8093 case TRUTH_ORIF_EXPR:
8094 if (if_true_label == 0)
8095 if_true_label = drop_through_label = gen_label_rtx ();
8096 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8097 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8098 break;
8100 case COMPOUND_EXPR:
8101 push_temp_slots ();
8102 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8103 free_temp_slots ();
8104 pop_temp_slots ();
8105 emit_queue ();
8106 do_pending_stack_adjust ();
8107 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8108 break;
8110 case COMPONENT_REF:
8111 case BIT_FIELD_REF:
8112 case ARRAY_REF:
8114 int bitsize, bitpos, unsignedp;
8115 enum machine_mode mode;
8116 tree type;
8117 tree offset;
8118 int volatilep = 0;
8120 /* Get description of this reference. We don't actually care
8121 about the underlying object here. */
8122 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8123 &mode, &unsignedp, &volatilep);
8125 type = type_for_size (bitsize, unsignedp);
8126 if (! SLOW_BYTE_ACCESS
8127 && type != 0 && bitsize >= 0
8128 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8129 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8130 != CODE_FOR_nothing))
8132 do_jump (convert (type, exp), if_false_label, if_true_label);
8133 break;
8135 goto normal;
8138 case COND_EXPR:
8139 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8140 if (integer_onep (TREE_OPERAND (exp, 1))
8141 && integer_zerop (TREE_OPERAND (exp, 2)))
8142 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8144 else if (integer_zerop (TREE_OPERAND (exp, 1))
8145 && integer_onep (TREE_OPERAND (exp, 2)))
8146 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8148 else
8150 register rtx label1 = gen_label_rtx ();
8151 drop_through_label = gen_label_rtx ();
8152 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8153 /* Now the THEN-expression. */
8154 do_jump (TREE_OPERAND (exp, 1),
8155 if_false_label ? if_false_label : drop_through_label,
8156 if_true_label ? if_true_label : drop_through_label);
8157 /* In case the do_jump just above never jumps. */
8158 do_pending_stack_adjust ();
8159 emit_label (label1);
8160 /* Now the ELSE-expression. */
8161 do_jump (TREE_OPERAND (exp, 2),
8162 if_false_label ? if_false_label : drop_through_label,
8163 if_true_label ? if_true_label : drop_through_label);
8165 break;
8167 case EQ_EXPR:
8168 if (integer_zerop (TREE_OPERAND (exp, 1)))
8169 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8170 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8171 == MODE_INT)
8173 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8174 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8175 else
8176 comparison = compare (exp, EQ, EQ);
8177 break;
8179 case NE_EXPR:
8180 if (integer_zerop (TREE_OPERAND (exp, 1)))
8181 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8182 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8183 == MODE_INT)
8185 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8186 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8187 else
8188 comparison = compare (exp, NE, NE);
8189 break;
8191 case LT_EXPR:
8192 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8193 == MODE_INT)
8194 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8195 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8196 else
8197 comparison = compare (exp, LT, LTU);
8198 break;
8200 case LE_EXPR:
8201 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8202 == MODE_INT)
8203 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8204 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8205 else
8206 comparison = compare (exp, LE, LEU);
8207 break;
8209 case GT_EXPR:
8210 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8211 == MODE_INT)
8212 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8213 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8214 else
8215 comparison = compare (exp, GT, GTU);
8216 break;
8218 case GE_EXPR:
8219 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8220 == MODE_INT)
8221 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8222 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8223 else
8224 comparison = compare (exp, GE, GEU);
8225 break;
8227 default:
8228 normal:
8229 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8230 #if 0
8231 /* This is not needed any more and causes poor code since it causes
8232 comparisons and tests from non-SI objects to have different code
8233 sequences. */
8234 /* Copy to register to avoid generating bad insns by cse
8235 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8236 if (!cse_not_expected && GET_CODE (temp) == MEM)
8237 temp = copy_to_reg (temp);
8238 #endif
8239 do_pending_stack_adjust ();
8240 if (GET_CODE (temp) == CONST_INT)
8241 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8242 else if (GET_CODE (temp) == LABEL_REF)
8243 comparison = const_true_rtx;
8244 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8245 && !can_compare_p (GET_MODE (temp)))
8246 /* Note swapping the labels gives us not-equal. */
8247 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8248 else if (GET_MODE (temp) != VOIDmode)
8249 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8250 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8251 GET_MODE (temp), NULL_RTX, 0);
8252 else
8253 abort ();
8256 /* Do any postincrements in the expression that was tested. */
8257 emit_queue ();
8259 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8260 straight into a conditional jump instruction as the jump condition.
8261 Otherwise, all the work has been done already. */
8263 if (comparison == const_true_rtx)
8265 if (if_true_label)
8266 emit_jump (if_true_label);
8268 else if (comparison == const0_rtx)
8270 if (if_false_label)
8271 emit_jump (if_false_label);
8273 else if (comparison)
8274 do_jump_for_compare (comparison, if_false_label, if_true_label);
8276 if (drop_through_label)
8278 /* If do_jump produces code that might be jumped around,
8279 do any stack adjusts from that code, before the place
8280 where control merges in. */
8281 do_pending_stack_adjust ();
8282 emit_label (drop_through_label);
8286 /* Given a comparison expression EXP for values too wide to be compared
8287 with one insn, test the comparison and jump to the appropriate label.
8288 The code of EXP is ignored; we always test GT if SWAP is 0,
8289 and LT if SWAP is 1. */
8291 static void
8292 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8293 tree exp;
8294 int swap;
8295 rtx if_false_label, if_true_label;
8297 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8298 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8299 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8300 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8301 rtx drop_through_label = 0;
8302 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8303 int i;
8305 if (! if_true_label || ! if_false_label)
8306 drop_through_label = gen_label_rtx ();
8307 if (! if_true_label)
8308 if_true_label = drop_through_label;
8309 if (! if_false_label)
8310 if_false_label = drop_through_label;
8312 /* Compare a word at a time, high order first. */
8313 for (i = 0; i < nwords; i++)
8315 rtx comp;
8316 rtx op0_word, op1_word;
8318 if (WORDS_BIG_ENDIAN)
8320 op0_word = operand_subword_force (op0, i, mode);
8321 op1_word = operand_subword_force (op1, i, mode);
8323 else
8325 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8326 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8329 /* All but high-order word must be compared as unsigned. */
8330 comp = compare_from_rtx (op0_word, op1_word,
8331 (unsignedp || i > 0) ? GTU : GT,
8332 unsignedp, word_mode, NULL_RTX, 0);
8333 if (comp == const_true_rtx)
8334 emit_jump (if_true_label);
8335 else if (comp != const0_rtx)
8336 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8338 /* Consider lower words only if these are equal. */
8339 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8340 NULL_RTX, 0);
8341 if (comp == const_true_rtx)
8342 emit_jump (if_false_label);
8343 else if (comp != const0_rtx)
8344 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8347 if (if_false_label)
8348 emit_jump (if_false_label);
8349 if (drop_through_label)
8350 emit_label (drop_through_label);
8353 /* Compare OP0 with OP1, word at a time, in mode MODE.
8354 UNSIGNEDP says to do unsigned comparison.
8355 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
8357 static void
8358 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8359 enum machine_mode mode;
8360 int unsignedp;
8361 rtx op0, op1;
8362 rtx if_false_label, if_true_label;
8364 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8365 rtx drop_through_label = 0;
8366 int i;
8368 if (! if_true_label || ! if_false_label)
8369 drop_through_label = gen_label_rtx ();
8370 if (! if_true_label)
8371 if_true_label = drop_through_label;
8372 if (! if_false_label)
8373 if_false_label = drop_through_label;
8375 /* Compare a word at a time, high order first. */
8376 for (i = 0; i < nwords; i++)
8378 rtx comp;
8379 rtx op0_word, op1_word;
8381 if (WORDS_BIG_ENDIAN)
8383 op0_word = operand_subword_force (op0, i, mode);
8384 op1_word = operand_subword_force (op1, i, mode);
8386 else
8388 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8389 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8392 /* All but high-order word must be compared as unsigned. */
8393 comp = compare_from_rtx (op0_word, op1_word,
8394 (unsignedp || i > 0) ? GTU : GT,
8395 unsignedp, word_mode, NULL_RTX, 0);
8396 if (comp == const_true_rtx)
8397 emit_jump (if_true_label);
8398 else if (comp != const0_rtx)
8399 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8401 /* Consider lower words only if these are equal. */
8402 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8403 NULL_RTX, 0);
8404 if (comp == const_true_rtx)
8405 emit_jump (if_false_label);
8406 else if (comp != const0_rtx)
8407 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8410 if (if_false_label)
8411 emit_jump (if_false_label);
8412 if (drop_through_label)
8413 emit_label (drop_through_label);
8416 /* Given an EQ_EXPR expression EXP for values too wide to be compared
8417 with one insn, test the comparison and jump to the appropriate label. */
8419 static void
8420 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8421 tree exp;
8422 rtx if_false_label, if_true_label;
8424 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8425 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8426 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8427 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8428 int i;
8429 rtx drop_through_label = 0;
8431 if (! if_false_label)
8432 drop_through_label = if_false_label = gen_label_rtx ();
8434 for (i = 0; i < nwords; i++)
8436 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8437 operand_subword_force (op1, i, mode),
8438 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8439 word_mode, NULL_RTX, 0);
8440 if (comp == const_true_rtx)
8441 emit_jump (if_false_label);
8442 else if (comp != const0_rtx)
8443 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8446 if (if_true_label)
8447 emit_jump (if_true_label);
8448 if (drop_through_label)
8449 emit_label (drop_through_label);
8452 /* Jump according to whether OP0 is 0.
8453 We assume that OP0 has an integer mode that is too wide
8454 for the available compare insns. */
8456 static void
8457 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8458 rtx op0;
8459 rtx if_false_label, if_true_label;
8461 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8462 int i;
8463 rtx drop_through_label = 0;
8465 if (! if_false_label)
8466 drop_through_label = if_false_label = gen_label_rtx ();
8468 for (i = 0; i < nwords; i++)
8470 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8471 GET_MODE (op0)),
8472 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8473 if (comp == const_true_rtx)
8474 emit_jump (if_false_label);
8475 else if (comp != const0_rtx)
8476 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8479 if (if_true_label)
8480 emit_jump (if_true_label);
8481 if (drop_through_label)
8482 emit_label (drop_through_label);
8485 /* Given a comparison expression in rtl form, output conditional branches to
8486 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
8488 static void
8489 do_jump_for_compare (comparison, if_false_label, if_true_label)
8490 rtx comparison, if_false_label, if_true_label;
8492 if (if_true_label)
8494 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8495 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8496 else
8497 abort ();
8499 if (if_false_label)
8500 emit_jump (if_false_label);
8502 else if (if_false_label)
8504 rtx insn;
8505 rtx prev = PREV_INSN (get_last_insn ());
8506 rtx branch = 0;
8508 /* Output the branch with the opposite condition. Then try to invert
8509 what is generated. If more than one insn is a branch, or if the
8510 branch is not the last insn written, abort. If we can't invert
8511 the branch, emit make a true label, redirect this jump to that,
8512 emit a jump to the false label and define the true label. */
8514 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8515 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8516 else
8517 abort ();
8519 /* Here we get the insn before what was just emitted.
8520 On some machines, emitting the branch can discard
8521 the previous compare insn and emit a replacement. */
8522 if (prev == 0)
8523 /* If there's only one preceding insn... */
8524 insn = get_insns ();
8525 else
8526 insn = NEXT_INSN (prev);
8528 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8529 if (GET_CODE (insn) == JUMP_INSN)
8531 if (branch)
8532 abort ();
8533 branch = insn;
8536 if (branch != get_last_insn ())
8537 abort ();
8539 if (! invert_jump (branch, if_false_label))
8541 if_true_label = gen_label_rtx ();
8542 redirect_jump (branch, if_true_label);
8543 emit_jump (if_false_label);
8544 emit_label (if_true_label);
8549 /* Generate code for a comparison expression EXP
8550 (including code to compute the values to be compared)
8551 and set (CC0) according to the result.
8552 SIGNED_CODE should be the rtx operation for this comparison for
8553 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8555 We force a stack adjustment unless there are currently
8556 things pushed on the stack that aren't yet used. */
8558 static rtx
8559 compare (exp, signed_code, unsigned_code)
8560 register tree exp;
8561 enum rtx_code signed_code, unsigned_code;
8563 register rtx op0
8564 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8565 register rtx op1
8566 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8567 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8568 register enum machine_mode mode = TYPE_MODE (type);
8569 int unsignedp = TREE_UNSIGNED (type);
8570 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
8572 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8573 ((mode == BLKmode)
8574 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8575 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8578 /* Like compare but expects the values to compare as two rtx's.
8579 The decision as to signed or unsigned comparison must be made by the caller.
8581 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8582 compared.
8584 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8585 size of MODE should be used. */
8588 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8589 register rtx op0, op1;
8590 enum rtx_code code;
8591 int unsignedp;
8592 enum machine_mode mode;
8593 rtx size;
8594 int align;
8596 rtx tem;
8598 /* If one operand is constant, make it the second one. Only do this
8599 if the other operand is not constant as well. */
8601 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8602 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8604 tem = op0;
8605 op0 = op1;
8606 op1 = tem;
8607 code = swap_condition (code);
8610 if (flag_force_mem)
8612 op0 = force_not_mem (op0);
8613 op1 = force_not_mem (op1);
8616 do_pending_stack_adjust ();
8618 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8619 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8620 return tem;
8622 #if 0
8623 /* There's no need to do this now that combine.c can eliminate lots of
8624 sign extensions. This can be less efficient in certain cases on other
8625 machines. */
8627 /* If this is a signed equality comparison, we can do it as an
8628 unsigned comparison since zero-extension is cheaper than sign
8629 extension and comparisons with zero are done as unsigned. This is
8630 the case even on machines that can do fast sign extension, since
8631 zero-extension is easier to combine with other operations than
8632 sign-extension is. If we are comparing against a constant, we must
8633 convert it to what it would look like unsigned. */
8634 if ((code == EQ || code == NE) && ! unsignedp
8635 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8637 if (GET_CODE (op1) == CONST_INT
8638 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8639 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8640 unsignedp = 1;
8642 #endif
8644 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
8646 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
8649 /* Generate code to calculate EXP using a store-flag instruction
8650 and return an rtx for the result. EXP is either a comparison
8651 or a TRUTH_NOT_EXPR whose operand is a comparison.
8653 If TARGET is nonzero, store the result there if convenient.
8655 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8656 cheap.
8658 Return zero if there is no suitable set-flag instruction
8659 available on this machine.
8661 Once expand_expr has been called on the arguments of the comparison,
8662 we are committed to doing the store flag, since it is not safe to
8663 re-evaluate the expression. We emit the store-flag insn by calling
8664 emit_store_flag, but only expand the arguments if we have a reason
8665 to believe that emit_store_flag will be successful. If we think that
8666 it will, but it isn't, we have to simulate the store-flag with a
8667 set/jump/set sequence. */
8669 static rtx
8670 do_store_flag (exp, target, mode, only_cheap)
8671 tree exp;
8672 rtx target;
8673 enum machine_mode mode;
8674 int only_cheap;
8676 enum rtx_code code;
8677 tree arg0, arg1, type;
8678 tree tem;
8679 enum machine_mode operand_mode;
8680 int invert = 0;
8681 int unsignedp;
8682 rtx op0, op1;
8683 enum insn_code icode;
8684 rtx subtarget = target;
8685 rtx result, label, pattern, jump_pat;
8687 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8688 result at the end. We can't simply invert the test since it would
8689 have already been inverted if it were valid. This case occurs for
8690 some floating-point comparisons. */
8692 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8693 invert = 1, exp = TREE_OPERAND (exp, 0);
8695 arg0 = TREE_OPERAND (exp, 0);
8696 arg1 = TREE_OPERAND (exp, 1);
8697 type = TREE_TYPE (arg0);
8698 operand_mode = TYPE_MODE (type);
8699 unsignedp = TREE_UNSIGNED (type);
8701 /* We won't bother with BLKmode store-flag operations because it would mean
8702 passing a lot of information to emit_store_flag. */
8703 if (operand_mode == BLKmode)
8704 return 0;
8706 STRIP_NOPS (arg0);
8707 STRIP_NOPS (arg1);
8709 /* Get the rtx comparison code to use. We know that EXP is a comparison
8710 operation of some type. Some comparisons against 1 and -1 can be
8711 converted to comparisons with zero. Do so here so that the tests
8712 below will be aware that we have a comparison with zero. These
8713 tests will not catch constants in the first operand, but constants
8714 are rarely passed as the first operand. */
8716 switch (TREE_CODE (exp))
8718 case EQ_EXPR:
8719 code = EQ;
8720 break;
8721 case NE_EXPR:
8722 code = NE;
8723 break;
8724 case LT_EXPR:
8725 if (integer_onep (arg1))
8726 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8727 else
8728 code = unsignedp ? LTU : LT;
8729 break;
8730 case LE_EXPR:
8731 if (! unsignedp && integer_all_onesp (arg1))
8732 arg1 = integer_zero_node, code = LT;
8733 else
8734 code = unsignedp ? LEU : LE;
8735 break;
8736 case GT_EXPR:
8737 if (! unsignedp && integer_all_onesp (arg1))
8738 arg1 = integer_zero_node, code = GE;
8739 else
8740 code = unsignedp ? GTU : GT;
8741 break;
8742 case GE_EXPR:
8743 if (integer_onep (arg1))
8744 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8745 else
8746 code = unsignedp ? GEU : GE;
8747 break;
8748 default:
8749 abort ();
8752 /* Put a constant second. */
8753 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8755 tem = arg0; arg0 = arg1; arg1 = tem;
8756 code = swap_condition (code);
8759 /* If this is an equality or inequality test of a single bit, we can
8760 do this by shifting the bit being tested to the low-order bit and
8761 masking the result with the constant 1. If the condition was EQ,
8762 we xor it with 1. This does not require an scc insn and is faster
8763 than an scc insn even if we have it. */
8765 if ((code == NE || code == EQ)
8766 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8767 && integer_pow2p (TREE_OPERAND (arg0, 1))
8768 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
8770 tree inner = TREE_OPERAND (arg0, 0);
8771 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
8772 NULL_RTX, VOIDmode, 0)));
8773 int ops_unsignedp;
8775 /* If INNER is a right shift of a constant and it plus BITNUM does
8776 not overflow, adjust BITNUM and INNER. */
8778 if (TREE_CODE (inner) == RSHIFT_EXPR
8779 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
8780 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
8781 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
8782 < TYPE_PRECISION (type)))
8784 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
8785 inner = TREE_OPERAND (inner, 0);
8788 /* If we are going to be able to omit the AND below, we must do our
8789 operations as unsigned. If we must use the AND, we have a choice.
8790 Normally unsigned is faster, but for some machines signed is. */
8791 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
8792 #ifdef LOAD_EXTEND_OP
8793 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
8794 #else
8796 #endif
8799 if (subtarget == 0 || GET_CODE (subtarget) != REG
8800 || GET_MODE (subtarget) != operand_mode
8801 || ! safe_from_p (subtarget, inner))
8802 subtarget = 0;
8804 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
8806 if (bitnum != 0)
8807 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
8808 size_int (bitnum), target, ops_unsignedp);
8810 if (GET_MODE (op0) != mode)
8811 op0 = convert_to_mode (mode, op0, ops_unsignedp);
8813 if ((code == EQ && ! invert) || (code == NE && invert))
8814 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
8815 ops_unsignedp, OPTAB_LIB_WIDEN);
8817 /* Put the AND last so it can combine with more things. */
8818 if (bitnum != TYPE_PRECISION (type) - 1)
8819 op0 = expand_and (op0, const1_rtx, target);
8821 return op0;
8824 /* Now see if we are likely to be able to do this. Return if not. */
8825 if (! can_compare_p (operand_mode))
8826 return 0;
8827 icode = setcc_gen_code[(int) code];
8828 if (icode == CODE_FOR_nothing
8829 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
8831 /* We can only do this if it is one of the special cases that
8832 can be handled without an scc insn. */
8833 if ((code == LT && integer_zerop (arg1))
8834 || (! only_cheap && code == GE && integer_zerop (arg1)))
8836 else if (BRANCH_COST >= 0
8837 && ! only_cheap && (code == NE || code == EQ)
8838 && TREE_CODE (type) != REAL_TYPE
8839 && ((abs_optab->handlers[(int) operand_mode].insn_code
8840 != CODE_FOR_nothing)
8841 || (ffs_optab->handlers[(int) operand_mode].insn_code
8842 != CODE_FOR_nothing)))
8844 else
8845 return 0;
8848 preexpand_calls (exp);
8849 if (subtarget == 0 || GET_CODE (subtarget) != REG
8850 || GET_MODE (subtarget) != operand_mode
8851 || ! safe_from_p (subtarget, arg1))
8852 subtarget = 0;
8854 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
8855 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
8857 if (target == 0)
8858 target = gen_reg_rtx (mode);
8860 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
8861 because, if the emit_store_flag does anything it will succeed and
8862 OP0 and OP1 will not be used subsequently. */
8864 result = emit_store_flag (target, code,
8865 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
8866 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
8867 operand_mode, unsignedp, 1);
8869 if (result)
8871 if (invert)
8872 result = expand_binop (mode, xor_optab, result, const1_rtx,
8873 result, 0, OPTAB_LIB_WIDEN);
8874 return result;
8877 /* If this failed, we have to do this with set/compare/jump/set code. */
8878 if (target == 0 || GET_CODE (target) != REG
8879 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8880 target = gen_reg_rtx (GET_MODE (target));
8882 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8883 result = compare_from_rtx (op0, op1, code, unsignedp,
8884 operand_mode, NULL_RTX, 0);
8885 if (GET_CODE (result) == CONST_INT)
8886 return (((result == const0_rtx && ! invert)
8887 || (result != const0_rtx && invert))
8888 ? const0_rtx : const1_rtx);
8890 label = gen_label_rtx ();
8891 if (bcc_gen_fctn[(int) code] == 0)
8892 abort ();
8894 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8895 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8896 emit_label (label);
8898 return target;
8901 /* Generate a tablejump instruction (used for switch statements). */
8903 #ifdef HAVE_tablejump
8905 /* INDEX is the value being switched on, with the lowest value
8906 in the table already subtracted.
8907 MODE is its expected mode (needed if INDEX is constant).
8908 RANGE is the length of the jump table.
8909 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8911 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8912 index value is out of range. */
8914 void
8915 do_tablejump (index, mode, range, table_label, default_label)
8916 rtx index, range, table_label, default_label;
8917 enum machine_mode mode;
8919 register rtx temp, vector;
8921 /* Do an unsigned comparison (in the proper mode) between the index
8922 expression and the value which represents the length of the range.
8923 Since we just finished subtracting the lower bound of the range
8924 from the index expression, this comparison allows us to simultaneously
8925 check that the original index expression value is both greater than
8926 or equal to the minimum value of the range and less than or equal to
8927 the maximum value of the range. */
8929 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
8930 emit_jump_insn (gen_bltu (default_label));
8932 /* If index is in range, it must fit in Pmode.
8933 Convert to Pmode so we can index with it. */
8934 if (mode != Pmode)
8935 index = convert_to_mode (Pmode, index, 1);
8937 /* Don't let a MEM slip thru, because then INDEX that comes
8938 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8939 and break_out_memory_refs will go to work on it and mess it up. */
8940 #ifdef PIC_CASE_VECTOR_ADDRESS
8941 if (flag_pic && GET_CODE (index) != REG)
8942 index = copy_to_mode_reg (Pmode, index);
8943 #endif
8945 /* If flag_force_addr were to affect this address
8946 it could interfere with the tricky assumptions made
8947 about addresses that contain label-refs,
8948 which may be valid only very near the tablejump itself. */
8949 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8950 GET_MODE_SIZE, because this indicates how large insns are. The other
8951 uses should all be Pmode, because they are addresses. This code
8952 could fail if addresses and insns are not the same size. */
8953 index = gen_rtx (PLUS, Pmode,
8954 gen_rtx (MULT, Pmode, index,
8955 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8956 gen_rtx (LABEL_REF, Pmode, table_label));
8957 #ifdef PIC_CASE_VECTOR_ADDRESS
8958 if (flag_pic)
8959 index = PIC_CASE_VECTOR_ADDRESS (index);
8960 else
8961 #endif
8962 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8963 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8964 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8965 RTX_UNCHANGING_P (vector) = 1;
8966 convert_move (temp, vector, 0);
8968 emit_jump_insn (gen_tablejump (temp, table_label));
8970 #ifndef CASE_VECTOR_PC_RELATIVE
8971 /* If we are generating PIC code or if the table is PC-relative, the
8972 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8973 if (! flag_pic)
8974 emit_barrier ();
8975 #endif
8978 #endif /* HAVE_tablejump */
8981 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
8982 to that value is on the top of the stack. The resulting type is TYPE, and
8983 the source declaration is DECL. */
8985 void
8986 bc_load_memory (type, decl)
8987 tree type, decl;
8989 enum bytecode_opcode opcode;
8992 /* Bit fields are special. We only know about signed and
8993 unsigned ints, and enums. The latter are treated as
8994 signed integers. */
8996 if (DECL_BIT_FIELD (decl))
8997 if (TREE_CODE (type) == ENUMERAL_TYPE
8998 || TREE_CODE (type) == INTEGER_TYPE)
8999 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9000 else
9001 abort ();
9002 else
9003 /* See corresponding comment in bc_store_memory(). */
9004 if (TYPE_MODE (type) == BLKmode
9005 || TYPE_MODE (type) == VOIDmode)
9006 return;
9007 else
9008 opcode = mode_to_load_map [TYPE_MODE (type)];
9010 if (opcode == neverneverland)
9011 abort ();
9013 bc_emit_bytecode (opcode);
9015 #ifdef DEBUG_PRINT_CODE
9016 fputc ('\n', stderr);
9017 #endif
9021 /* Store the contents of the second stack slot to the address in the
9022 top stack slot. DECL is the declaration of the destination and is used
9023 to determine whether we're dealing with a bitfield. */
9025 void
9026 bc_store_memory (type, decl)
9027 tree type, decl;
9029 enum bytecode_opcode opcode;
9032 if (DECL_BIT_FIELD (decl))
9034 if (TREE_CODE (type) == ENUMERAL_TYPE
9035 || TREE_CODE (type) == INTEGER_TYPE)
9036 opcode = sstoreBI;
9037 else
9038 abort ();
9040 else
9041 if (TYPE_MODE (type) == BLKmode)
9043 /* Copy structure. This expands to a block copy instruction, storeBLK.
9044 In addition to the arguments expected by the other store instructions,
9045 it also expects a type size (SImode) on top of the stack, which is the
9046 structure size in size units (usually bytes). The two first arguments
9047 are already on the stack; so we just put the size on level 1. For some
9048 other languages, the size may be variable, this is why we don't encode
9049 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9051 bc_expand_expr (TYPE_SIZE (type));
9052 opcode = storeBLK;
9054 else
9055 opcode = mode_to_store_map [TYPE_MODE (type)];
9057 if (opcode == neverneverland)
9058 abort ();
9060 bc_emit_bytecode (opcode);
9062 #ifdef DEBUG_PRINT_CODE
9063 fputc ('\n', stderr);
9064 #endif
9068 /* Allocate local stack space sufficient to hold a value of the given
9069 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9070 integral power of 2. A special case is locals of type VOID, which
9071 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9072 remapped into the corresponding attribute of SI. */
9075 bc_allocate_local (size, alignment)
9076 int size, alignment;
9078 rtx retval;
9079 int byte_alignment;
9081 if (size < 0)
9082 abort ();
9084 /* Normalize size and alignment */
9085 if (!size)
9086 size = UNITS_PER_WORD;
9088 if (alignment < BITS_PER_UNIT)
9089 byte_alignment = 1 << (INT_ALIGN - 1);
9090 else
9091 /* Align */
9092 byte_alignment = alignment / BITS_PER_UNIT;
9094 if (local_vars_size & (byte_alignment - 1))
9095 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9097 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9098 local_vars_size += size;
9100 return retval;
9104 /* Allocate variable-sized local array. Variable-sized arrays are
9105 actually pointers to the address in memory where they are stored. */
9108 bc_allocate_variable_array (size)
9109 tree size;
9111 rtx retval;
9112 const int ptralign = (1 << (PTR_ALIGN - 1));
9114 /* Align pointer */
9115 if (local_vars_size & ptralign)
9116 local_vars_size += ptralign - (local_vars_size & ptralign);
9118 /* Note down local space needed: pointer to block; also return
9119 dummy rtx */
9121 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9122 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9123 return retval;
9127 /* Push the machine address for the given external variable offset. */
9128 void
9129 bc_load_externaddr (externaddr)
9130 rtx externaddr;
9132 bc_emit_bytecode (constP);
9133 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9134 BYTECODE_BC_LABEL (externaddr)->offset);
9136 #ifdef DEBUG_PRINT_CODE
9137 fputc ('\n', stderr);
9138 #endif
9142 static char *
9143 bc_strdup (s)
9144 char *s;
9146 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9147 strcpy (new, s);
9148 return new;
9152 /* Like above, but expects an IDENTIFIER. */
9153 void
9154 bc_load_externaddr_id (id, offset)
9155 tree id;
9156 int offset;
9158 if (!IDENTIFIER_POINTER (id))
9159 abort ();
9161 bc_emit_bytecode (constP);
9162 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9164 #ifdef DEBUG_PRINT_CODE
9165 fputc ('\n', stderr);
9166 #endif
9170 /* Push the machine address for the given local variable offset. */
9171 void
9172 bc_load_localaddr (localaddr)
9173 rtx localaddr;
9175 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
9179 /* Push the machine address for the given parameter offset.
9180 NOTE: offset is in bits. */
9181 void
9182 bc_load_parmaddr (parmaddr)
9183 rtx parmaddr;
9185 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9186 / BITS_PER_UNIT));
9190 /* Convert a[i] into *(a + i). */
9191 tree
9192 bc_canonicalize_array_ref (exp)
9193 tree exp;
9195 tree type = TREE_TYPE (exp);
9196 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9197 TREE_OPERAND (exp, 0));
9198 tree index = TREE_OPERAND (exp, 1);
9201 /* Convert the integer argument to a type the same size as a pointer
9202 so the multiply won't overflow spuriously. */
9204 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9205 index = convert (type_for_size (POINTER_SIZE, 0), index);
9207 /* The array address isn't volatile even if the array is.
9208 (Of course this isn't terribly relevant since the bytecode
9209 translator treats nearly everything as volatile anyway.) */
9210 TREE_THIS_VOLATILE (array_adr) = 0;
9212 return build1 (INDIRECT_REF, type,
9213 fold (build (PLUS_EXPR,
9214 TYPE_POINTER_TO (type),
9215 array_adr,
9216 fold (build (MULT_EXPR,
9217 TYPE_POINTER_TO (type),
9218 index,
9219 size_in_bytes (type))))));
9223 /* Load the address of the component referenced by the given
9224 COMPONENT_REF expression.
9226 Returns innermost lvalue. */
9228 tree
9229 bc_expand_component_address (exp)
9230 tree exp;
9232 tree tem, chain;
9233 enum machine_mode mode;
9234 int bitpos = 0;
9235 HOST_WIDE_INT SIval;
9238 tem = TREE_OPERAND (exp, 1);
9239 mode = DECL_MODE (tem);
9242 /* Compute cumulative bit offset for nested component refs
9243 and array refs, and find the ultimate containing object. */
9245 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
9247 if (TREE_CODE (tem) == COMPONENT_REF)
9248 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9249 else
9250 if (TREE_CODE (tem) == ARRAY_REF
9251 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9252 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
9254 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9255 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9256 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9257 else
9258 break;
9261 bc_expand_expr (tem);
9264 /* For bitfields also push their offset and size */
9265 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9266 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9267 else
9268 if (SIval = bitpos / BITS_PER_UNIT)
9269 bc_emit_instruction (addconstPSI, SIval);
9271 return (TREE_OPERAND (exp, 1));
9275 /* Emit code to push two SI constants */
9276 void
9277 bc_push_offset_and_size (offset, size)
9278 HOST_WIDE_INT offset, size;
9280 bc_emit_instruction (constSI, offset);
9281 bc_emit_instruction (constSI, size);
9285 /* Emit byte code to push the address of the given lvalue expression to
9286 the stack. If it's a bit field, we also push offset and size info.
9288 Returns innermost component, which allows us to determine not only
9289 its type, but also whether it's a bitfield. */
9291 tree
9292 bc_expand_address (exp)
9293 tree exp;
9295 /* Safeguard */
9296 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9297 return (exp);
9300 switch (TREE_CODE (exp))
9302 case ARRAY_REF:
9304 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
9306 case COMPONENT_REF:
9308 return (bc_expand_component_address (exp));
9310 case INDIRECT_REF:
9312 bc_expand_expr (TREE_OPERAND (exp, 0));
9314 /* For variable-sized types: retrieve pointer. Sometimes the
9315 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9316 also make sure we have an operand, just in case... */
9318 if (TREE_OPERAND (exp, 0)
9319 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9320 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9321 bc_emit_instruction (loadP);
9323 /* If packed, also return offset and size */
9324 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9326 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9327 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9329 return (TREE_OPERAND (exp, 0));
9331 case FUNCTION_DECL:
9333 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9334 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
9335 break;
9337 case PARM_DECL:
9339 bc_load_parmaddr (DECL_RTL (exp));
9341 /* For variable-sized types: retrieve pointer */
9342 if (TYPE_SIZE (TREE_TYPE (exp))
9343 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9344 bc_emit_instruction (loadP);
9346 /* If packed, also return offset and size */
9347 if (DECL_BIT_FIELD (exp))
9348 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9349 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9351 break;
9353 case RESULT_DECL:
9355 bc_emit_instruction (returnP);
9356 break;
9358 case VAR_DECL:
9360 #if 0
9361 if (BYTECODE_LABEL (DECL_RTL (exp)))
9362 bc_load_externaddr (DECL_RTL (exp));
9363 #endif
9365 if (DECL_EXTERNAL (exp))
9366 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9367 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
9368 else
9369 bc_load_localaddr (DECL_RTL (exp));
9371 /* For variable-sized types: retrieve pointer */
9372 if (TYPE_SIZE (TREE_TYPE (exp))
9373 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9374 bc_emit_instruction (loadP);
9376 /* If packed, also return offset and size */
9377 if (DECL_BIT_FIELD (exp))
9378 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9379 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9381 break;
9383 case STRING_CST:
9385 rtx r;
9387 bc_emit_bytecode (constP);
9388 r = output_constant_def (exp);
9389 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
9391 #ifdef DEBUG_PRINT_CODE
9392 fputc ('\n', stderr);
9393 #endif
9395 break;
9397 default:
9399 abort();
9400 break;
9403 /* Most lvalues don't have components. */
9404 return (exp);
9408 /* Emit a type code to be used by the runtime support in handling
9409 parameter passing. The type code consists of the machine mode
9410 plus the minimal alignment shifted left 8 bits. */
9412 tree
9413 bc_runtime_type_code (type)
9414 tree type;
9416 int val;
9418 switch (TREE_CODE (type))
9420 case VOID_TYPE:
9421 case INTEGER_TYPE:
9422 case REAL_TYPE:
9423 case COMPLEX_TYPE:
9424 case ENUMERAL_TYPE:
9425 case POINTER_TYPE:
9426 case RECORD_TYPE:
9428 val = TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
9429 break;
9431 case ERROR_MARK:
9433 val = 0;
9434 break;
9436 default:
9438 abort ();
9440 return build_int_2 (val, 0);
9444 /* Generate constructor label */
9445 char *
9446 bc_gen_constr_label ()
9448 static int label_counter;
9449 static char label[20];
9451 sprintf (label, "*LR%d", label_counter++);
9453 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9457 /* Evaluate constructor CONSTR and return pointer to it on level one. We
9458 expand the constructor data as static data, and push a pointer to it.
9459 The pointer is put in the pointer table and is retrieved by a constP
9460 bytecode instruction. We then loop and store each constructor member in
9461 the corresponding component. Finally, we return the original pointer on
9462 the stack. */
9464 void
9465 bc_expand_constructor (constr)
9466 tree constr;
9468 char *l;
9469 HOST_WIDE_INT ptroffs;
9470 rtx constr_rtx;
9473 /* Literal constructors are handled as constants, whereas
9474 non-literals are evaluated and stored element by element
9475 into the data segment. */
9477 /* Allocate space in proper segment and push pointer to space on stack.
9480 l = bc_gen_constr_label ();
9482 if (TREE_CONSTANT (constr))
9484 text_section ();
9486 bc_emit_const_labeldef (l);
9487 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
9489 else
9491 data_section ();
9493 bc_emit_data_labeldef (l);
9494 bc_output_data_constructor (constr);
9498 /* Add reference to pointer table and recall pointer to stack;
9499 this code is common for both types of constructors: literals
9500 and non-literals. */
9502 ptroffs = bc_define_pointer (l);
9503 bc_emit_instruction (constP, ptroffs);
9505 /* This is all that has to be done if it's a literal. */
9506 if (TREE_CONSTANT (constr))
9507 return;
9510 /* At this point, we have the pointer to the structure on top of the stack.
9511 Generate sequences of store_memory calls for the constructor. */
9513 /* constructor type is structure */
9514 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
9516 register tree elt;
9518 /* If the constructor has fewer fields than the structure,
9519 clear the whole structure first. */
9521 if (list_length (CONSTRUCTOR_ELTS (constr))
9522 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9524 bc_emit_instruction (dup);
9525 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9526 bc_emit_instruction (clearBLK);
9529 /* Store each element of the constructor into the corresponding
9530 field of TARGET. */
9532 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9534 register tree field = TREE_PURPOSE (elt);
9535 register enum machine_mode mode;
9536 int bitsize;
9537 int bitpos;
9538 int unsignedp;
9540 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9541 mode = DECL_MODE (field);
9542 unsignedp = TREE_UNSIGNED (field);
9544 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9546 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9547 /* The alignment of TARGET is
9548 at least what its type requires. */
9549 VOIDmode, 0,
9550 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9551 int_size_in_bytes (TREE_TYPE (constr)));
9554 else
9556 /* Constructor type is array */
9557 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9559 register tree elt;
9560 register int i;
9561 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9562 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9563 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9564 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9566 /* If the constructor has fewer fields than the structure,
9567 clear the whole structure first. */
9569 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9571 bc_emit_instruction (dup);
9572 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9573 bc_emit_instruction (clearBLK);
9577 /* Store each element of the constructor into the corresponding
9578 element of TARGET, determined by counting the elements. */
9580 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9581 elt;
9582 elt = TREE_CHAIN (elt), i++)
9584 register enum machine_mode mode;
9585 int bitsize;
9586 int bitpos;
9587 int unsignedp;
9589 mode = TYPE_MODE (elttype);
9590 bitsize = GET_MODE_BITSIZE (mode);
9591 unsignedp = TREE_UNSIGNED (elttype);
9593 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9594 /* * TYPE_SIZE_UNIT (elttype) */ );
9596 bc_store_field (elt, bitsize, bitpos, mode,
9597 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9598 /* The alignment of TARGET is
9599 at least what its type requires. */
9600 VOIDmode, 0,
9601 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9602 int_size_in_bytes (TREE_TYPE (constr)));
9609 /* Store the value of EXP (an expression tree) into member FIELD of
9610 structure at address on stack, which has type TYPE, mode MODE and
9611 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9612 structure.
9614 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9615 TOTAL_SIZE is its size in bytes, or -1 if variable. */
9617 void
9618 bc_store_field (field, bitsize, bitpos, mode, exp, type,
9619 value_mode, unsignedp, align, total_size)
9620 int bitsize, bitpos;
9621 enum machine_mode mode;
9622 tree field, exp, type;
9623 enum machine_mode value_mode;
9624 int unsignedp;
9625 int align;
9626 int total_size;
9629 /* Expand expression and copy pointer */
9630 bc_expand_expr (exp);
9631 bc_emit_instruction (over);
9634 /* If the component is a bit field, we cannot use addressing to access
9635 it. Use bit-field techniques to store in it. */
9637 if (DECL_BIT_FIELD (field))
9639 bc_store_bit_field (bitpos, bitsize, unsignedp);
9640 return;
9642 else
9643 /* Not bit field */
9645 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9647 /* Advance pointer to the desired member */
9648 if (offset)
9649 bc_emit_instruction (addconstPSI, offset);
9651 /* Store */
9652 bc_store_memory (type, field);
9657 /* Store SI/SU in bitfield */
9658 void
9659 bc_store_bit_field (offset, size, unsignedp)
9660 int offset, size, unsignedp;
9662 /* Push bitfield offset and size */
9663 bc_push_offset_and_size (offset, size);
9665 /* Store */
9666 bc_emit_instruction (sstoreBI);
9670 /* Load SI/SU from bitfield */
9671 void
9672 bc_load_bit_field (offset, size, unsignedp)
9673 int offset, size, unsignedp;
9675 /* Push bitfield offset and size */
9676 bc_push_offset_and_size (offset, size);
9678 /* Load: sign-extend if signed, else zero-extend */
9679 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9683 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9684 (adjust stack pointer upwards), negative means add that number of
9685 levels (adjust the stack pointer downwards). Only positive values
9686 normally make sense. */
9688 void
9689 bc_adjust_stack (nlevels)
9690 int nlevels;
9692 switch (nlevels)
9694 case 0:
9695 break;
9697 case 2:
9698 bc_emit_instruction (drop);
9700 case 1:
9701 bc_emit_instruction (drop);
9702 break;
9704 default:
9706 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9707 stack_depth -= nlevels;
9710 #if defined (VALIDATE_STACK_FOR_BC)
9711 VALIDATE_STACK_FOR_BC ();
9712 #endif