(LIB1FUNCS): Delete _lshlsi3.
[official-gcc.git] / gcc / expr.c
blob6034a46c7de5bbe8d0929e47543622e88106ee36
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 #include "config.h"
22 #include "machmode.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "obstack.h"
26 #include "flags.h"
27 #include "regs.h"
28 #include "function.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "expr.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "output.h"
35 #include "typeclass.h"
37 #include "bytecode.h"
38 #include "bc-opcode.h"
39 #include "bc-typecd.h"
40 #include "bc-optab.h"
41 #include "bc-emit.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
52 #ifdef PUSH_ROUNDING
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
58 #endif
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* If this is nonzero, we do not bother generating VOLATILE
72 around volatile memory references, and we are willing to
73 output indirect addresses. If cse is to follow, we reject
74 indirect addresses so a useful potential cse is generated;
75 if it is used only once, instruction combination will produce
76 the same indirect address eventually. */
77 int cse_not_expected;
79 /* Nonzero to generate code for all the subroutines within an
80 expression before generating the upper levels of the expression.
81 Nowadays this is never zero. */
82 int do_preexpand_calls = 1;
84 /* Number of units that we should eventually pop off the stack.
85 These are the arguments to function calls that have already returned. */
86 int pending_stack_adjust;
88 /* Nonzero means stack pops must not be deferred, and deferred stack
89 pops must not be output. It is nonzero inside a function call,
90 inside a conditional expression, inside a statement expression,
91 and in other cases as well. */
92 int inhibit_defer_pop;
94 /* A list of all cleanups which belong to the arguments of
95 function calls being expanded by expand_call. */
96 tree cleanups_this_call;
98 /* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
101 of TARGET_EXPRs. */
102 int target_temp_slot_level;
104 /* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
106 returned. */
107 static rtx saveregs_value;
109 /* Similarly for __builtin_apply_args. */
110 static rtx apply_args_value;
112 /* This structure is used by move_by_pieces to describe the move to
113 be performed. */
115 struct move_by_pieces
117 rtx to;
118 rtx to_addr;
119 int autinc_to;
120 int explicit_inc_to;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 int len;
126 int offset;
127 int reverse;
130 /* Used to generate bytecodes: keep track of size of local variables,
131 as well as depth of arithmetic stack. (Notice that variables are
132 stored on the machine's stack, not the arithmetic stack.) */
134 extern int local_vars_size;
135 extern int stack_depth;
136 extern int max_stack_depth;
137 extern struct obstack permanent_obstack;
140 static rtx enqueue_insn PROTO((rtx, rtx));
141 static int queued_subexp_p PROTO((rtx));
142 static void init_queue PROTO((void));
143 static void move_by_pieces PROTO((rtx, rtx, int, int));
144 static int move_by_pieces_ninsns PROTO((unsigned int, int));
145 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
146 struct move_by_pieces *));
147 static void store_constructor PROTO((tree, rtx));
148 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
149 enum machine_mode, int, int, int));
150 static int get_inner_unaligned_p PROTO((tree));
151 static tree save_noncopied_parts PROTO((tree, tree));
152 static tree init_noncopied_parts PROTO((tree, tree));
153 static int safe_from_p PROTO((rtx, tree));
154 static int fixed_type_p PROTO((tree));
155 static int get_pointer_alignment PROTO((tree, unsigned));
156 static tree string_constant PROTO((tree, tree *));
157 static tree c_strlen PROTO((tree));
158 static rtx expand_builtin PROTO((tree, rtx, rtx,
159 enum machine_mode, int));
160 static int apply_args_size PROTO((void));
161 static int apply_result_size PROTO((void));
162 static rtx result_vector PROTO((int, rtx));
163 static rtx expand_builtin_apply_args PROTO((void));
164 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
165 static void expand_builtin_return PROTO((rtx));
166 static rtx expand_increment PROTO((tree, int));
167 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
168 tree bc_runtime_type_code PROTO((tree));
169 rtx bc_allocate_local PROTO((int, int));
170 void bc_store_memory PROTO((tree, tree));
171 tree bc_expand_component_address PROTO((tree));
172 tree bc_expand_address PROTO((tree));
173 void bc_expand_constructor PROTO((tree));
174 void bc_adjust_stack PROTO((int));
175 tree bc_canonicalize_array_ref PROTO((tree));
176 void bc_load_memory PROTO((tree, tree));
177 void bc_load_externaddr PROTO((rtx));
178 void bc_load_externaddr_id PROTO((tree, int));
179 void bc_load_localaddr PROTO((rtx));
180 void bc_load_parmaddr PROTO((rtx));
181 static void preexpand_calls PROTO((tree));
182 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
183 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
184 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
185 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
186 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
187 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
188 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
189 static tree defer_cleanups_to PROTO((tree));
190 extern void (*interim_eh_hook) PROTO((tree));
191 extern tree get_set_constructor_words PROTO((tree, HOST_WIDE_INT*, int));
193 /* Record for each mode whether we can move a register directly to or
194 from an object of that mode in memory. If we can't, we won't try
195 to use that mode directly when accessing a field of that mode. */
197 static char direct_load[NUM_MACHINE_MODES];
198 static char direct_store[NUM_MACHINE_MODES];
200 /* MOVE_RATIO is the number of move instructions that is better than
201 a block move. */
203 #ifndef MOVE_RATIO
204 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
205 #define MOVE_RATIO 2
206 #else
207 /* A value of around 6 would minimize code size; infinity would minimize
208 execution time. */
209 #define MOVE_RATIO 15
210 #endif
211 #endif
213 /* This array records the insn_code of insns to perform block moves. */
214 enum insn_code movstr_optab[NUM_MACHINE_MODES];
216 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
218 #ifndef SLOW_UNALIGNED_ACCESS
219 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
220 #endif
222 /* Register mappings for target machines without register windows. */
223 #ifndef INCOMING_REGNO
224 #define INCOMING_REGNO(OUT) (OUT)
225 #endif
226 #ifndef OUTGOING_REGNO
227 #define OUTGOING_REGNO(IN) (IN)
228 #endif
230 /* Maps used to convert modes to const, load, and store bytecodes. */
231 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
232 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
233 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
235 /* Initialize maps used to convert modes to const, load, and store
236 bytecodes. */
237 void
238 bc_init_mode_to_opcode_maps ()
240 int mode;
242 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
243 mode_to_const_map[mode] =
244 mode_to_load_map[mode] =
245 mode_to_store_map[mode] = neverneverland;
247 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
248 mode_to_const_map[(int) SYM] = CONST; \
249 mode_to_load_map[(int) SYM] = LOAD; \
250 mode_to_store_map[(int) SYM] = STORE;
252 #include "modemap.def"
253 #undef DEF_MODEMAP
256 /* This is run once per compilation to set up which modes can be used
257 directly in memory and to initialize the block move optab. */
259 void
260 init_expr_once ()
262 rtx insn, pat;
263 enum machine_mode mode;
264 /* Try indexing by frame ptr and try by stack ptr.
265 It is known that on the Convex the stack ptr isn't a valid index.
266 With luck, one or the other is valid on any machine. */
267 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
268 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
270 start_sequence ();
271 insn = emit_insn (gen_rtx (SET, 0, 0));
272 pat = PATTERN (insn);
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
277 int regno;
278 rtx reg;
279 int num_clobbers;
281 direct_load[(int) mode] = direct_store[(int) mode] = 0;
282 PUT_MODE (mem, mode);
283 PUT_MODE (mem1, mode);
285 /* See if there is some register that can be used in this mode and
286 directly loaded or stored from memory. */
288 if (mode != VOIDmode && mode != BLKmode)
289 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
290 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
291 regno++)
293 if (! HARD_REGNO_MODE_OK (regno, mode))
294 continue;
296 reg = gen_rtx (REG, mode, regno);
298 SET_SRC (pat) = mem;
299 SET_DEST (pat) = reg;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_load[(int) mode] = 1;
303 SET_SRC (pat) = mem1;
304 SET_DEST (pat) = reg;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_load[(int) mode] = 1;
308 SET_SRC (pat) = reg;
309 SET_DEST (pat) = mem;
310 if (recog (pat, insn, &num_clobbers) >= 0)
311 direct_store[(int) mode] = 1;
313 SET_SRC (pat) = reg;
314 SET_DEST (pat) = mem1;
315 if (recog (pat, insn, &num_clobbers) >= 0)
316 direct_store[(int) mode] = 1;
320 end_sequence ();
323 /* This is run at the start of compiling a function. */
325 void
326 init_expr ()
328 init_queue ();
330 pending_stack_adjust = 0;
331 inhibit_defer_pop = 0;
332 cleanups_this_call = 0;
333 saveregs_value = 0;
334 apply_args_value = 0;
335 forced_labels = 0;
338 /* Save all variables describing the current status into the structure *P.
339 This is used before starting a nested function. */
341 void
342 save_expr_status (p)
343 struct function *p;
345 /* Instead of saving the postincrement queue, empty it. */
346 emit_queue ();
348 p->pending_stack_adjust = pending_stack_adjust;
349 p->inhibit_defer_pop = inhibit_defer_pop;
350 p->cleanups_this_call = cleanups_this_call;
351 p->saveregs_value = saveregs_value;
352 p->apply_args_value = apply_args_value;
353 p->forced_labels = forced_labels;
355 pending_stack_adjust = 0;
356 inhibit_defer_pop = 0;
357 cleanups_this_call = 0;
358 saveregs_value = 0;
359 apply_args_value = 0;
360 forced_labels = 0;
363 /* Restore all variables describing the current status from the structure *P.
364 This is used after a nested function. */
366 void
367 restore_expr_status (p)
368 struct function *p;
370 pending_stack_adjust = p->pending_stack_adjust;
371 inhibit_defer_pop = p->inhibit_defer_pop;
372 cleanups_this_call = p->cleanups_this_call;
373 saveregs_value = p->saveregs_value;
374 apply_args_value = p->apply_args_value;
375 forced_labels = p->forced_labels;
378 /* Manage the queue of increment instructions to be output
379 for POSTINCREMENT_EXPR expressions, etc. */
381 static rtx pending_chain;
383 /* Queue up to increment (or change) VAR later. BODY says how:
384 BODY should be the same thing you would pass to emit_insn
385 to increment right away. It will go to emit_insn later on.
387 The value is a QUEUED expression to be used in place of VAR
388 where you want to guarantee the pre-incrementation value of VAR. */
390 static rtx
391 enqueue_insn (var, body)
392 rtx var, body;
394 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
395 var, NULL_RTX, NULL_RTX, body, pending_chain);
396 return pending_chain;
399 /* Use protect_from_queue to convert a QUEUED expression
400 into something that you can put immediately into an instruction.
401 If the queued incrementation has not happened yet,
402 protect_from_queue returns the variable itself.
403 If the incrementation has happened, protect_from_queue returns a temp
404 that contains a copy of the old value of the variable.
406 Any time an rtx which might possibly be a QUEUED is to be put
407 into an instruction, it must be passed through protect_from_queue first.
408 QUEUED expressions are not meaningful in instructions.
410 Do not pass a value through protect_from_queue and then hold
411 on to it for a while before putting it in an instruction!
412 If the queue is flushed in between, incorrect code will result. */
415 protect_from_queue (x, modify)
416 register rtx x;
417 int modify;
419 register RTX_CODE code = GET_CODE (x);
421 #if 0 /* A QUEUED can hang around after the queue is forced out. */
422 /* Shortcut for most common case. */
423 if (pending_chain == 0)
424 return x;
425 #endif
427 if (code != QUEUED)
429 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
430 use of autoincrement. Make a copy of the contents of the memory
431 location rather than a copy of the address, but not if the value is
432 of mode BLKmode. Don't modify X in place since it might be
433 shared. */
434 if (code == MEM && GET_MODE (x) != BLKmode
435 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
437 register rtx y = XEXP (x, 0);
438 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
440 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
441 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
442 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
444 if (QUEUED_INSN (y))
446 register rtx temp = gen_reg_rtx (GET_MODE (new));
447 emit_insn_before (gen_move_insn (temp, new),
448 QUEUED_INSN (y));
449 return temp;
451 return new;
453 /* Otherwise, recursively protect the subexpressions of all
454 the kinds of rtx's that can contain a QUEUED. */
455 if (code == MEM)
457 rtx tem = protect_from_queue (XEXP (x, 0), 0);
458 if (tem != XEXP (x, 0))
460 x = copy_rtx (x);
461 XEXP (x, 0) = tem;
464 else if (code == PLUS || code == MULT)
466 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
467 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
468 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
470 x = copy_rtx (x);
471 XEXP (x, 0) = new0;
472 XEXP (x, 1) = new1;
475 return x;
477 /* If the increment has not happened, use the variable itself. */
478 if (QUEUED_INSN (x) == 0)
479 return QUEUED_VAR (x);
480 /* If the increment has happened and a pre-increment copy exists,
481 use that copy. */
482 if (QUEUED_COPY (x) != 0)
483 return QUEUED_COPY (x);
484 /* The increment has happened but we haven't set up a pre-increment copy.
485 Set one up now, and use it. */
486 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
487 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
488 QUEUED_INSN (x));
489 return QUEUED_COPY (x);
492 /* Return nonzero if X contains a QUEUED expression:
493 if it contains anything that will be altered by a queued increment.
494 We handle only combinations of MEM, PLUS, MINUS and MULT operators
495 since memory addresses generally contain only those. */
497 static int
498 queued_subexp_p (x)
499 rtx x;
501 register enum rtx_code code = GET_CODE (x);
502 switch (code)
504 case QUEUED:
505 return 1;
506 case MEM:
507 return queued_subexp_p (XEXP (x, 0));
508 case MULT:
509 case PLUS:
510 case MINUS:
511 return queued_subexp_p (XEXP (x, 0))
512 || queued_subexp_p (XEXP (x, 1));
514 return 0;
517 /* Perform all the pending incrementations. */
519 void
520 emit_queue ()
522 register rtx p;
523 while (p = pending_chain)
525 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
526 pending_chain = QUEUED_NEXT (p);
530 static void
531 init_queue ()
533 if (pending_chain)
534 abort ();
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
542 void
543 convert_move (to, from, unsignedp)
544 register rtx to, from;
545 int unsignedp;
547 enum machine_mode to_mode = GET_MODE (to);
548 enum machine_mode from_mode = GET_MODE (from);
549 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
550 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
551 enum insn_code code;
552 rtx libcall;
554 /* rtx code for making an equivalent value. */
555 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
560 if (to_real != from_real)
561 abort ();
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
565 TO here. */
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 abort ();
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
579 emit_move_insn (to, from);
580 return;
583 if (to_real)
585 rtx value;
587 #ifdef HAVE_extendqfhf2
588 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
590 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
591 return;
593 #endif
594 #ifdef HAVE_extendqfsf2
595 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
597 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
598 return;
600 #endif
601 #ifdef HAVE_extendqfdf2
602 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
604 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
605 return;
607 #endif
608 #ifdef HAVE_extendqfxf2
609 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
611 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
612 return;
614 #endif
615 #ifdef HAVE_extendqftf2
616 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
618 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
619 return;
621 #endif
623 #ifdef HAVE_extendhftqf2
624 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
626 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
627 return;
629 #endif
631 #ifdef HAVE_extendhfsf2
632 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
634 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
635 return;
637 #endif
638 #ifdef HAVE_extendhfdf2
639 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
641 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
642 return;
644 #endif
645 #ifdef HAVE_extendhfxf2
646 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
648 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
649 return;
651 #endif
652 #ifdef HAVE_extendhftf2
653 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
655 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
656 return;
658 #endif
660 #ifdef HAVE_extendsfdf2
661 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
663 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
664 return;
666 #endif
667 #ifdef HAVE_extendsfxf2
668 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
670 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
671 return;
673 #endif
674 #ifdef HAVE_extendsftf2
675 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
677 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
678 return;
680 #endif
681 #ifdef HAVE_extenddfxf2
682 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
684 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
685 return;
687 #endif
688 #ifdef HAVE_extenddftf2
689 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
691 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
692 return;
694 #endif
696 #ifdef HAVE_trunchfqf2
697 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
699 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
700 return;
702 #endif
703 #ifdef HAVE_truncsfqf2
704 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
706 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
707 return;
709 #endif
710 #ifdef HAVE_truncdfqf2
711 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
713 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
714 return;
716 #endif
717 #ifdef HAVE_truncxfqf2
718 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
720 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
721 return;
723 #endif
724 #ifdef HAVE_trunctfqf2
725 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
727 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
728 return;
730 #endif
732 #ifdef HAVE_trunctqfhf2
733 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
735 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
736 return;
738 #endif
739 #ifdef HAVE_truncsfhf2
740 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
742 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
743 return;
745 #endif
746 #ifdef HAVE_truncdfhf2
747 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
749 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
750 return;
752 #endif
753 #ifdef HAVE_truncxfhf2
754 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
756 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
757 return;
759 #endif
760 #ifdef HAVE_trunctfhf2
761 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
763 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
764 return;
766 #endif
767 #ifdef HAVE_truncdfsf2
768 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
770 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
771 return;
773 #endif
774 #ifdef HAVE_truncxfsf2
775 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
777 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
778 return;
780 #endif
781 #ifdef HAVE_trunctfsf2
782 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
784 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
785 return;
787 #endif
788 #ifdef HAVE_truncxfdf2
789 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
791 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
792 return;
794 #endif
795 #ifdef HAVE_trunctfdf2
796 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
798 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
799 return;
801 #endif
803 libcall = (rtx) 0;
804 switch (from_mode)
806 case SFmode:
807 switch (to_mode)
809 case DFmode:
810 libcall = extendsfdf2_libfunc;
811 break;
813 case XFmode:
814 libcall = extendsfxf2_libfunc;
815 break;
817 case TFmode:
818 libcall = extendsftf2_libfunc;
819 break;
821 break;
823 case DFmode:
824 switch (to_mode)
826 case SFmode:
827 libcall = truncdfsf2_libfunc;
828 break;
830 case XFmode:
831 libcall = extenddfxf2_libfunc;
832 break;
834 case TFmode:
835 libcall = extenddftf2_libfunc;
836 break;
838 break;
840 case XFmode:
841 switch (to_mode)
843 case SFmode:
844 libcall = truncxfsf2_libfunc;
845 break;
847 case DFmode:
848 libcall = truncxfdf2_libfunc;
849 break;
851 break;
853 case TFmode:
854 switch (to_mode)
856 case SFmode:
857 libcall = trunctfsf2_libfunc;
858 break;
860 case DFmode:
861 libcall = trunctfdf2_libfunc;
862 break;
864 break;
867 if (libcall == (rtx) 0)
868 /* This conversion is not implemented yet. */
869 abort ();
871 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
872 1, from, from_mode);
873 emit_move_insn (to, value);
874 return;
877 /* Now both modes are integers. */
879 /* Handle expanding beyond a word. */
880 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
881 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
883 rtx insns;
884 rtx lowpart;
885 rtx fill_value;
886 rtx lowfrom;
887 int i;
888 enum machine_mode lowpart_mode;
889 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
891 /* Try converting directly if the insn is supported. */
892 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
893 != CODE_FOR_nothing)
895 /* If FROM is a SUBREG, put it into a register. Do this
896 so that we always generate the same set of insns for
897 better cse'ing; if an intermediate assignment occurred,
898 we won't be doing the operation directly on the SUBREG. */
899 if (optimize > 0 && GET_CODE (from) == SUBREG)
900 from = force_reg (from_mode, from);
901 emit_unop_insn (code, to, from, equiv_code);
902 return;
904 /* Next, try converting via full word. */
905 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
906 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
907 != CODE_FOR_nothing))
909 if (GET_CODE (to) == REG)
910 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
911 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
912 emit_unop_insn (code, to,
913 gen_lowpart (word_mode, to), equiv_code);
914 return;
917 /* No special multiword conversion insn; do it by hand. */
918 start_sequence ();
920 /* Since we will turn this into a no conflict block, we must ensure
921 that the source does not overlap the target. */
923 if (reg_overlap_mentioned_p (to, from))
924 from = force_reg (from_mode, from);
926 /* Get a copy of FROM widened to a word, if necessary. */
927 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
928 lowpart_mode = word_mode;
929 else
930 lowpart_mode = from_mode;
932 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
934 lowpart = gen_lowpart (lowpart_mode, to);
935 emit_move_insn (lowpart, lowfrom);
937 /* Compute the value to put in each remaining word. */
938 if (unsignedp)
939 fill_value = const0_rtx;
940 else
942 #ifdef HAVE_slt
943 if (HAVE_slt
944 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
945 && STORE_FLAG_VALUE == -1)
947 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
948 lowpart_mode, 0, 0);
949 fill_value = gen_reg_rtx (word_mode);
950 emit_insn (gen_slt (fill_value));
952 else
953 #endif
955 fill_value
956 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
957 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
958 NULL_RTX, 0);
959 fill_value = convert_to_mode (word_mode, fill_value, 1);
963 /* Fill the remaining words. */
964 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
966 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
967 rtx subword = operand_subword (to, index, 1, to_mode);
969 if (subword == 0)
970 abort ();
972 if (fill_value != subword)
973 emit_move_insn (subword, fill_value);
976 insns = get_insns ();
977 end_sequence ();
979 emit_no_conflict_block (insns, to, from, NULL_RTX,
980 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
981 return;
984 /* Truncating multi-word to a word or less. */
985 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
986 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
988 if (!((GET_CODE (from) == MEM
989 && ! MEM_VOLATILE_P (from)
990 && direct_load[(int) to_mode]
991 && ! mode_dependent_address_p (XEXP (from, 0)))
992 || GET_CODE (from) == REG
993 || GET_CODE (from) == SUBREG))
994 from = force_reg (from_mode, from);
995 convert_move (to, gen_lowpart (word_mode, from), 0);
996 return;
999 /* Handle pointer conversion */ /* SPEE 900220 */
1000 if (to_mode == PSImode)
1002 if (from_mode != SImode)
1003 from = convert_to_mode (SImode, from, unsignedp);
1005 #ifdef HAVE_truncsipsi2
1006 if (HAVE_truncsipsi2)
1008 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1009 return;
1011 #endif /* HAVE_truncsipsi2 */
1012 abort ();
1015 if (from_mode == PSImode)
1017 if (to_mode != SImode)
1019 from = convert_to_mode (SImode, from, unsignedp);
1020 from_mode = SImode;
1022 else
1024 #ifdef HAVE_extendpsisi2
1025 if (HAVE_extendpsisi2)
1027 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1028 return;
1030 #endif /* HAVE_extendpsisi2 */
1031 abort ();
1035 if (to_mode == PDImode)
1037 if (from_mode != DImode)
1038 from = convert_to_mode (DImode, from, unsignedp);
1040 #ifdef HAVE_truncdipdi2
1041 if (HAVE_truncdipdi2)
1043 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1044 return;
1046 #endif /* HAVE_truncdipdi2 */
1047 abort ();
1050 if (from_mode == PDImode)
1052 if (to_mode != DImode)
1054 from = convert_to_mode (DImode, from, unsignedp);
1055 from_mode = DImode;
1057 else
1059 #ifdef HAVE_extendpdidi2
1060 if (HAVE_extendpdidi2)
1062 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1063 return;
1065 #endif /* HAVE_extendpdidi2 */
1066 abort ();
1070 /* Now follow all the conversions between integers
1071 no more than a word long. */
1073 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1074 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1075 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1076 GET_MODE_BITSIZE (from_mode)))
1078 if (!((GET_CODE (from) == MEM
1079 && ! MEM_VOLATILE_P (from)
1080 && direct_load[(int) to_mode]
1081 && ! mode_dependent_address_p (XEXP (from, 0)))
1082 || GET_CODE (from) == REG
1083 || GET_CODE (from) == SUBREG))
1084 from = force_reg (from_mode, from);
1085 emit_move_insn (to, gen_lowpart (to_mode, from));
1086 return;
1089 /* Handle extension. */
1090 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1092 /* Convert directly if that works. */
1093 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1094 != CODE_FOR_nothing)
1096 emit_unop_insn (code, to, from, equiv_code);
1097 return;
1099 else
1101 enum machine_mode intermediate;
1103 /* Search for a mode to convert via. */
1104 for (intermediate = from_mode; intermediate != VOIDmode;
1105 intermediate = GET_MODE_WIDER_MODE (intermediate))
1106 if (((can_extend_p (to_mode, intermediate, unsignedp)
1107 != CODE_FOR_nothing)
1108 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1109 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1110 && (can_extend_p (intermediate, from_mode, unsignedp)
1111 != CODE_FOR_nothing))
1113 convert_move (to, convert_to_mode (intermediate, from,
1114 unsignedp), unsignedp);
1115 return;
1118 /* No suitable intermediate mode. */
1119 abort ();
1123 /* Support special truncate insns for certain modes. */
1125 if (from_mode == DImode && to_mode == SImode)
1127 #ifdef HAVE_truncdisi2
1128 if (HAVE_truncdisi2)
1130 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1131 return;
1133 #endif
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 return;
1138 if (from_mode == DImode && to_mode == HImode)
1140 #ifdef HAVE_truncdihi2
1141 if (HAVE_truncdihi2)
1143 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1144 return;
1146 #endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1151 if (from_mode == DImode && to_mode == QImode)
1153 #ifdef HAVE_truncdiqi2
1154 if (HAVE_truncdiqi2)
1156 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1157 return;
1159 #endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1164 if (from_mode == SImode && to_mode == HImode)
1166 #ifdef HAVE_truncsihi2
1167 if (HAVE_truncsihi2)
1169 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1170 return;
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1177 if (from_mode == SImode && to_mode == QImode)
1179 #ifdef HAVE_truncsiqi2
1180 if (HAVE_truncsiqi2)
1182 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1183 return;
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1190 if (from_mode == HImode && to_mode == QImode)
1192 #ifdef HAVE_trunchiqi2
1193 if (HAVE_trunchiqi2)
1195 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1196 return;
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1203 if (from_mode == TImode && to_mode == DImode)
1205 #ifdef HAVE_trunctidi2
1206 if (HAVE_trunctidi2)
1208 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1209 return;
1211 #endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1216 if (from_mode == TImode && to_mode == SImode)
1218 #ifdef HAVE_trunctisi2
1219 if (HAVE_trunctisi2)
1221 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1222 return;
1224 #endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1229 if (from_mode == TImode && to_mode == HImode)
1231 #ifdef HAVE_trunctihi2
1232 if (HAVE_trunctihi2)
1234 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1235 return;
1237 #endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1242 if (from_mode == TImode && to_mode == QImode)
1244 #ifdef HAVE_trunctiqi2
1245 if (HAVE_trunctiqi2)
1247 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1248 return;
1250 #endif
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 return;
1255 /* Handle truncation of volatile memrefs, and so on;
1256 the things that couldn't be truncated directly,
1257 and for which there was no special instruction. */
1258 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1260 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1261 emit_move_insn (to, temp);
1262 return;
1265 /* Mode combination is not recognized. */
1266 abort ();
1269 /* Return an rtx for a value that would result
1270 from converting X to mode MODE.
1271 Both X and MODE may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1273 This can be done by referring to a part of X in place
1274 or by copying to a new temporary with conversion.
1276 This function *must not* call protect_from_queue
1277 except when putting X into an insn (in which case convert_move does it). */
1280 convert_to_mode (mode, x, unsignedp)
1281 enum machine_mode mode;
1282 rtx x;
1283 int unsignedp;
1285 return convert_modes (mode, VOIDmode, x, unsignedp);
1288 /* Return an rtx for a value that would result
1289 from converting X from mode OLDMODE to mode MODE.
1290 Both modes may be floating, or both integer.
1291 UNSIGNEDP is nonzero if X is an unsigned value.
1293 This can be done by referring to a part of X in place
1294 or by copying to a new temporary with conversion.
1296 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1298 This function *must not* call protect_from_queue
1299 except when putting X into an insn (in which case convert_move does it). */
1302 convert_modes (mode, oldmode, x, unsignedp)
1303 enum machine_mode mode, oldmode;
1304 rtx x;
1305 int unsignedp;
1307 register rtx temp;
1309 /* If FROM is a SUBREG that indicates that we have already done at least
1310 the required extension, strip it. */
1312 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1313 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1314 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1315 x = gen_lowpart (mode, x);
1317 if (GET_MODE (x) != VOIDmode)
1318 oldmode = GET_MODE (x);
1320 if (mode == oldmode)
1321 return x;
1323 /* There is one case that we must handle specially: If we are converting
1324 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1325 we are to interpret the constant as unsigned, gen_lowpart will do
1326 the wrong if the constant appears negative. What we want to do is
1327 make the high-order word of the constant zero, not all ones. */
1329 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1331 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1332 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1334 /* We can do this with a gen_lowpart if both desired and current modes
1335 are integer, and this is either a constant integer, a register, or a
1336 non-volatile MEM. Except for the constant case where MODE is no
1337 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1339 if ((GET_CODE (x) == CONST_INT
1340 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1341 || (GET_MODE_CLASS (mode) == MODE_INT
1342 && GET_MODE_CLASS (oldmode) == MODE_INT
1343 && (GET_CODE (x) == CONST_DOUBLE
1344 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1345 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1346 && direct_load[(int) mode])
1347 || (GET_CODE (x) == REG
1348 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1349 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1351 /* ?? If we don't know OLDMODE, we have to assume here that
1352 X does not need sign- or zero-extension. This may not be
1353 the case, but it's the best we can do. */
1354 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1355 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 HOST_WIDE_INT val = INTVAL (x);
1358 int width = GET_MODE_BITSIZE (oldmode);
1360 /* We must sign or zero-extend in this case. Start by
1361 zero-extending, then sign extend if we need to. */
1362 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1363 if (! unsignedp
1364 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1365 val |= (HOST_WIDE_INT) (-1) << width;
1367 return GEN_INT (val);
1370 return gen_lowpart (mode, x);
1373 temp = gen_reg_rtx (mode);
1374 convert_move (temp, x, unsignedp);
1375 return temp;
1378 /* Generate several move instructions to copy LEN bytes
1379 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1380 The caller must pass FROM and TO
1381 through protect_from_queue before calling.
1382 ALIGN (in bytes) is maximum alignment we can assume. */
1384 static void
1385 move_by_pieces (to, from, len, align)
1386 rtx to, from;
1387 int len, align;
1389 struct move_by_pieces data;
1390 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1391 int max_size = MOVE_MAX + 1;
1393 data.offset = 0;
1394 data.to_addr = to_addr;
1395 data.from_addr = from_addr;
1396 data.to = to;
1397 data.from = from;
1398 data.autinc_to
1399 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1400 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1401 data.autinc_from
1402 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1403 || GET_CODE (from_addr) == POST_INC
1404 || GET_CODE (from_addr) == POST_DEC);
1406 data.explicit_inc_from = 0;
1407 data.explicit_inc_to = 0;
1408 data.reverse
1409 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1410 if (data.reverse) data.offset = len;
1411 data.len = len;
1413 /* If copying requires more than two move insns,
1414 copy addresses to registers (to make displacements shorter)
1415 and use post-increment if available. */
1416 if (!(data.autinc_from && data.autinc_to)
1417 && move_by_pieces_ninsns (len, align) > 2)
1419 #ifdef HAVE_PRE_DECREMENT
1420 if (data.reverse && ! data.autinc_from)
1422 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1423 data.autinc_from = 1;
1424 data.explicit_inc_from = -1;
1426 #endif
1427 #ifdef HAVE_POST_INCREMENT
1428 if (! data.autinc_from)
1430 data.from_addr = copy_addr_to_reg (from_addr);
1431 data.autinc_from = 1;
1432 data.explicit_inc_from = 1;
1434 #endif
1435 if (!data.autinc_from && CONSTANT_P (from_addr))
1436 data.from_addr = copy_addr_to_reg (from_addr);
1437 #ifdef HAVE_PRE_DECREMENT
1438 if (data.reverse && ! data.autinc_to)
1440 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1441 data.autinc_to = 1;
1442 data.explicit_inc_to = -1;
1444 #endif
1445 #ifdef HAVE_POST_INCREMENT
1446 if (! data.reverse && ! data.autinc_to)
1448 data.to_addr = copy_addr_to_reg (to_addr);
1449 data.autinc_to = 1;
1450 data.explicit_inc_to = 1;
1452 #endif
1453 if (!data.autinc_to && CONSTANT_P (to_addr))
1454 data.to_addr = copy_addr_to_reg (to_addr);
1457 if (! SLOW_UNALIGNED_ACCESS
1458 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1459 align = MOVE_MAX;
1461 /* First move what we can in the largest integer mode, then go to
1462 successively smaller modes. */
1464 while (max_size > 1)
1466 enum machine_mode mode = VOIDmode, tmode;
1467 enum insn_code icode;
1469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1471 if (GET_MODE_SIZE (tmode) < max_size)
1472 mode = tmode;
1474 if (mode == VOIDmode)
1475 break;
1477 icode = mov_optab->handlers[(int) mode].insn_code;
1478 if (icode != CODE_FOR_nothing
1479 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1480 GET_MODE_SIZE (mode)))
1481 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1483 max_size = GET_MODE_SIZE (mode);
1486 /* The code above should have handled everything. */
1487 if (data.len != 0)
1488 abort ();
1491 /* Return number of insns required to move L bytes by pieces.
1492 ALIGN (in bytes) is maximum alignment we can assume. */
1494 static int
1495 move_by_pieces_ninsns (l, align)
1496 unsigned int l;
1497 int align;
1499 register int n_insns = 0;
1500 int max_size = MOVE_MAX + 1;
1502 if (! SLOW_UNALIGNED_ACCESS
1503 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1504 align = MOVE_MAX;
1506 while (max_size > 1)
1508 enum machine_mode mode = VOIDmode, tmode;
1509 enum insn_code icode;
1511 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1512 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1513 if (GET_MODE_SIZE (tmode) < max_size)
1514 mode = tmode;
1516 if (mode == VOIDmode)
1517 break;
1519 icode = mov_optab->handlers[(int) mode].insn_code;
1520 if (icode != CODE_FOR_nothing
1521 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1522 GET_MODE_SIZE (mode)))
1523 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1525 max_size = GET_MODE_SIZE (mode);
1528 return n_insns;
1531 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1532 with move instructions for mode MODE. GENFUN is the gen_... function
1533 to make a move insn for that mode. DATA has all the other info. */
1535 static void
1536 move_by_pieces_1 (genfun, mode, data)
1537 rtx (*genfun) ();
1538 enum machine_mode mode;
1539 struct move_by_pieces *data;
1541 register int size = GET_MODE_SIZE (mode);
1542 register rtx to1, from1;
1544 while (data->len >= size)
1546 if (data->reverse) data->offset -= size;
1548 to1 = (data->autinc_to
1549 ? gen_rtx (MEM, mode, data->to_addr)
1550 : change_address (data->to, mode,
1551 plus_constant (data->to_addr, data->offset)));
1552 from1 =
1553 (data->autinc_from
1554 ? gen_rtx (MEM, mode, data->from_addr)
1555 : change_address (data->from, mode,
1556 plus_constant (data->from_addr, data->offset)));
1558 #ifdef HAVE_PRE_DECREMENT
1559 if (data->explicit_inc_to < 0)
1560 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1561 if (data->explicit_inc_from < 0)
1562 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1563 #endif
1565 emit_insn ((*genfun) (to1, from1));
1566 #ifdef HAVE_POST_INCREMENT
1567 if (data->explicit_inc_to > 0)
1568 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1569 if (data->explicit_inc_from > 0)
1570 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1571 #endif
1573 if (! data->reverse) data->offset += size;
1575 data->len -= size;
1579 /* Emit code to move a block Y to a block X.
1580 This may be done with string-move instructions,
1581 with multiple scalar move instructions, or with a library call.
1583 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1584 with mode BLKmode.
1585 SIZE is an rtx that says how long they are.
1586 ALIGN is the maximum alignment we can assume they have,
1587 measured in bytes. */
1589 void
1590 emit_block_move (x, y, size, align)
1591 rtx x, y;
1592 rtx size;
1593 int align;
1595 if (GET_MODE (x) != BLKmode)
1596 abort ();
1598 if (GET_MODE (y) != BLKmode)
1599 abort ();
1601 x = protect_from_queue (x, 1);
1602 y = protect_from_queue (y, 0);
1603 size = protect_from_queue (size, 0);
1605 if (GET_CODE (x) != MEM)
1606 abort ();
1607 if (GET_CODE (y) != MEM)
1608 abort ();
1609 if (size == 0)
1610 abort ();
1612 if (GET_CODE (size) == CONST_INT
1613 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1614 move_by_pieces (x, y, INTVAL (size), align);
1615 else
1617 /* Try the most limited insn first, because there's no point
1618 including more than one in the machine description unless
1619 the more limited one has some advantage. */
1621 rtx opalign = GEN_INT (align);
1622 enum machine_mode mode;
1624 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1625 mode = GET_MODE_WIDER_MODE (mode))
1627 enum insn_code code = movstr_optab[(int) mode];
1629 if (code != CODE_FOR_nothing
1630 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1631 here because if SIZE is less than the mode mask, as it is
1632 returned by the macro, it will definitely be less than the
1633 actual mode mask. */
1634 && ((GET_CODE (size) == CONST_INT
1635 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1636 <= GET_MODE_MASK (mode)))
1637 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1638 && (insn_operand_predicate[(int) code][0] == 0
1639 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1640 && (insn_operand_predicate[(int) code][1] == 0
1641 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1642 && (insn_operand_predicate[(int) code][3] == 0
1643 || (*insn_operand_predicate[(int) code][3]) (opalign,
1644 VOIDmode)))
1646 rtx op2;
1647 rtx last = get_last_insn ();
1648 rtx pat;
1650 op2 = convert_to_mode (mode, size, 1);
1651 if (insn_operand_predicate[(int) code][2] != 0
1652 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1653 op2 = copy_to_mode_reg (mode, op2);
1655 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1656 if (pat)
1658 emit_insn (pat);
1659 return;
1661 else
1662 delete_insns_since (last);
1666 #ifdef TARGET_MEM_FUNCTIONS
1667 emit_library_call (memcpy_libfunc, 0,
1668 VOIDmode, 3, XEXP (x, 0), Pmode,
1669 XEXP (y, 0), Pmode,
1670 convert_to_mode (TYPE_MODE (sizetype), size,
1671 TREE_UNSIGNED (sizetype)),
1672 TYPE_MODE (sizetype));
1673 #else
1674 emit_library_call (bcopy_libfunc, 0,
1675 VOIDmode, 3, XEXP (y, 0), Pmode,
1676 XEXP (x, 0), Pmode,
1677 convert_to_mode (TYPE_MODE (sizetype), size,
1678 TREE_UNSIGNED (sizetype)),
1679 TYPE_MODE (sizetype));
1680 #endif
1684 /* Copy all or part of a value X into registers starting at REGNO.
1685 The number of registers to be filled is NREGS. */
1687 void
1688 move_block_to_reg (regno, x, nregs, mode)
1689 int regno;
1690 rtx x;
1691 int nregs;
1692 enum machine_mode mode;
1694 int i;
1695 rtx pat, last;
1697 if (nregs == 0)
1698 return;
1700 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1701 x = validize_mem (force_const_mem (mode, x));
1703 /* See if the machine can do this with a load multiple insn. */
1704 #ifdef HAVE_load_multiple
1705 if (HAVE_load_multiple)
1707 last = get_last_insn ();
1708 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1709 GEN_INT (nregs));
1710 if (pat)
1712 emit_insn (pat);
1713 return;
1715 else
1716 delete_insns_since (last);
1718 #endif
1720 for (i = 0; i < nregs; i++)
1721 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1722 operand_subword_force (x, i, mode));
1725 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1726 The number of registers to be filled is NREGS. SIZE indicates the number
1727 of bytes in the object X. */
1730 void
1731 move_block_from_reg (regno, x, nregs, size)
1732 int regno;
1733 rtx x;
1734 int nregs;
1735 int size;
1737 int i;
1738 rtx pat, last;
1740 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1741 to the left before storing to memory. */
1742 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1744 rtx tem = operand_subword (x, 0, 1, BLKmode);
1745 rtx shift;
1747 if (tem == 0)
1748 abort ();
1750 shift = expand_shift (LSHIFT_EXPR, word_mode,
1751 gen_rtx (REG, word_mode, regno),
1752 build_int_2 ((UNITS_PER_WORD - size)
1753 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1754 emit_move_insn (tem, shift);
1755 return;
1758 /* See if the machine can do this with a store multiple insn. */
1759 #ifdef HAVE_store_multiple
1760 if (HAVE_store_multiple)
1762 last = get_last_insn ();
1763 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1764 GEN_INT (nregs));
1765 if (pat)
1767 emit_insn (pat);
1768 return;
1770 else
1771 delete_insns_since (last);
1773 #endif
1775 for (i = 0; i < nregs; i++)
1777 rtx tem = operand_subword (x, i, 1, BLKmode);
1779 if (tem == 0)
1780 abort ();
1782 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1786 /* Add a USE expression for REG to the (possibly empty) list pointed
1787 to by CALL_FUSAGE. REG must denote a hard register. */
1789 void
1790 use_reg (call_fusage, reg)
1791 rtx *call_fusage, reg;
1793 if (GET_CODE (reg) != REG
1794 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1795 abort();
1797 *call_fusage
1798 = gen_rtx (EXPR_LIST, VOIDmode,
1799 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1802 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1803 starting at REGNO. All of these registers must be hard registers. */
1805 void
1806 use_regs (call_fusage, regno, nregs)
1807 rtx *call_fusage;
1808 int regno;
1809 int nregs;
1811 int i;
1813 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1814 abort ();
1816 for (i = 0; i < nregs; i++)
1817 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1820 /* Write zeros through the storage of OBJECT.
1821 If OBJECT has BLKmode, SIZE is its length in bytes. */
1823 void
1824 clear_storage (object, size)
1825 rtx object;
1826 int size;
1828 if (GET_MODE (object) == BLKmode)
1830 #ifdef TARGET_MEM_FUNCTIONS
1831 emit_library_call (memset_libfunc, 0,
1832 VOIDmode, 3,
1833 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1834 GEN_INT (size), Pmode);
1835 #else
1836 emit_library_call (bzero_libfunc, 0,
1837 VOIDmode, 2,
1838 XEXP (object, 0), Pmode,
1839 GEN_INT (size), Pmode);
1840 #endif
1842 else
1843 emit_move_insn (object, const0_rtx);
1846 /* Generate code to copy Y into X.
1847 Both Y and X must have the same mode, except that
1848 Y can be a constant with VOIDmode.
1849 This mode cannot be BLKmode; use emit_block_move for that.
1851 Return the last instruction emitted. */
1854 emit_move_insn (x, y)
1855 rtx x, y;
1857 enum machine_mode mode = GET_MODE (x);
1859 x = protect_from_queue (x, 1);
1860 y = protect_from_queue (y, 0);
1862 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1863 abort ();
1865 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1866 y = force_const_mem (mode, y);
1868 /* If X or Y are memory references, verify that their addresses are valid
1869 for the machine. */
1870 if (GET_CODE (x) == MEM
1871 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1872 && ! push_operand (x, GET_MODE (x)))
1873 || (flag_force_addr
1874 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1875 x = change_address (x, VOIDmode, XEXP (x, 0));
1877 if (GET_CODE (y) == MEM
1878 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1879 || (flag_force_addr
1880 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1881 y = change_address (y, VOIDmode, XEXP (y, 0));
1883 if (mode == BLKmode)
1884 abort ();
1886 return emit_move_insn_1 (x, y);
1889 /* Low level part of emit_move_insn.
1890 Called just like emit_move_insn, but assumes X and Y
1891 are basically valid. */
1894 emit_move_insn_1 (x, y)
1895 rtx x, y;
1897 enum machine_mode mode = GET_MODE (x);
1898 enum machine_mode submode;
1899 enum mode_class class = GET_MODE_CLASS (mode);
1900 int i;
1902 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1903 return
1904 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1906 /* Expand complex moves by moving real part and imag part, if possible. */
1907 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1908 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1909 * BITS_PER_UNIT),
1910 (class == MODE_COMPLEX_INT
1911 ? MODE_INT : MODE_FLOAT),
1913 && (mov_optab->handlers[(int) submode].insn_code
1914 != CODE_FOR_nothing))
1916 /* Don't split destination if it is a stack push. */
1917 int stack = push_operand (x, GET_MODE (x));
1918 rtx insns;
1920 /* If this is a stack, push the highpart first, so it
1921 will be in the argument order.
1923 In that case, change_address is used only to convert
1924 the mode, not to change the address. */
1925 if (stack)
1927 /* Note that the real part always precedes the imag part in memory
1928 regardless of machine's endianness. */
1929 #ifdef STACK_GROWS_DOWNWARD
1930 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1931 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1932 gen_imagpart (submode, y)));
1933 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1934 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1935 gen_realpart (submode, y)));
1936 #else
1937 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1938 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1939 gen_realpart (submode, y)));
1940 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1941 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1942 gen_imagpart (submode, y)));
1943 #endif
1945 else
1947 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1948 (gen_realpart (submode, x), gen_realpart (submode, y)));
1949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1950 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1953 return get_last_insn ();
1956 /* This will handle any multi-word mode that lacks a move_insn pattern.
1957 However, you will get better code if you define such patterns,
1958 even if they must turn into multiple assembler instructions. */
1959 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1961 rtx last_insn = 0;
1962 rtx insns;
1964 #ifdef PUSH_ROUNDING
1966 /* If X is a push on the stack, do the push now and replace
1967 X with a reference to the stack pointer. */
1968 if (push_operand (x, GET_MODE (x)))
1970 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1971 x = change_address (x, VOIDmode, stack_pointer_rtx);
1973 #endif
1975 for (i = 0;
1976 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1977 i++)
1979 rtx xpart = operand_subword (x, i, 1, mode);
1980 rtx ypart = operand_subword (y, i, 1, mode);
1982 /* If we can't get a part of Y, put Y into memory if it is a
1983 constant. Otherwise, force it into a register. If we still
1984 can't get a part of Y, abort. */
1985 if (ypart == 0 && CONSTANT_P (y))
1987 y = force_const_mem (mode, y);
1988 ypart = operand_subword (y, i, 1, mode);
1990 else if (ypart == 0)
1991 ypart = operand_subword_force (y, i, mode);
1993 if (xpart == 0 || ypart == 0)
1994 abort ();
1996 last_insn = emit_move_insn (xpart, ypart);
1999 return last_insn;
2001 else
2002 abort ();
2005 /* Pushing data onto the stack. */
2007 /* Push a block of length SIZE (perhaps variable)
2008 and return an rtx to address the beginning of the block.
2009 Note that it is not possible for the value returned to be a QUEUED.
2010 The value may be virtual_outgoing_args_rtx.
2012 EXTRA is the number of bytes of padding to push in addition to SIZE.
2013 BELOW nonzero means this padding comes at low addresses;
2014 otherwise, the padding comes at high addresses. */
2017 push_block (size, extra, below)
2018 rtx size;
2019 int extra, below;
2021 register rtx temp;
2022 if (CONSTANT_P (size))
2023 anti_adjust_stack (plus_constant (size, extra));
2024 else if (GET_CODE (size) == REG && extra == 0)
2025 anti_adjust_stack (size);
2026 else
2028 rtx temp = copy_to_mode_reg (Pmode, size);
2029 if (extra != 0)
2030 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2031 temp, 0, OPTAB_LIB_WIDEN);
2032 anti_adjust_stack (temp);
2035 #ifdef STACK_GROWS_DOWNWARD
2036 temp = virtual_outgoing_args_rtx;
2037 if (extra != 0 && below)
2038 temp = plus_constant (temp, extra);
2039 #else
2040 if (GET_CODE (size) == CONST_INT)
2041 temp = plus_constant (virtual_outgoing_args_rtx,
2042 - INTVAL (size) - (below ? 0 : extra));
2043 else if (extra != 0 && !below)
2044 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2045 negate_rtx (Pmode, plus_constant (size, extra)));
2046 else
2047 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2048 negate_rtx (Pmode, size));
2049 #endif
2051 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2055 gen_push_operand ()
2057 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2060 /* Generate code to push X onto the stack, assuming it has mode MODE and
2061 type TYPE.
2062 MODE is redundant except when X is a CONST_INT (since they don't
2063 carry mode info).
2064 SIZE is an rtx for the size of data to be copied (in bytes),
2065 needed only if X is BLKmode.
2067 ALIGN (in bytes) is maximum alignment we can assume.
2069 If PARTIAL and REG are both nonzero, then copy that many of the first
2070 words of X into registers starting with REG, and push the rest of X.
2071 The amount of space pushed is decreased by PARTIAL words,
2072 rounded *down* to a multiple of PARM_BOUNDARY.
2073 REG must be a hard register in this case.
2074 If REG is zero but PARTIAL is not, take any all others actions for an
2075 argument partially in registers, but do not actually load any
2076 registers.
2078 EXTRA is the amount in bytes of extra space to leave next to this arg.
2079 This is ignored if an argument block has already been allocated.
2081 On a machine that lacks real push insns, ARGS_ADDR is the address of
2082 the bottom of the argument block for this call. We use indexing off there
2083 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2084 argument block has not been preallocated.
2086 ARGS_SO_FAR is the size of args previously pushed for this call. */
2088 void
2089 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2090 args_addr, args_so_far)
2091 register rtx x;
2092 enum machine_mode mode;
2093 tree type;
2094 rtx size;
2095 int align;
2096 int partial;
2097 rtx reg;
2098 int extra;
2099 rtx args_addr;
2100 rtx args_so_far;
2102 rtx xinner;
2103 enum direction stack_direction
2104 #ifdef STACK_GROWS_DOWNWARD
2105 = downward;
2106 #else
2107 = upward;
2108 #endif
2110 /* Decide where to pad the argument: `downward' for below,
2111 `upward' for above, or `none' for don't pad it.
2112 Default is below for small data on big-endian machines; else above. */
2113 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2115 /* Invert direction if stack is post-update. */
2116 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2117 if (where_pad != none)
2118 where_pad = (where_pad == downward ? upward : downward);
2120 xinner = x = protect_from_queue (x, 0);
2122 if (mode == BLKmode)
2124 /* Copy a block into the stack, entirely or partially. */
2126 register rtx temp;
2127 int used = partial * UNITS_PER_WORD;
2128 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2129 int skip;
2131 if (size == 0)
2132 abort ();
2134 used -= offset;
2136 /* USED is now the # of bytes we need not copy to the stack
2137 because registers will take care of them. */
2139 if (partial != 0)
2140 xinner = change_address (xinner, BLKmode,
2141 plus_constant (XEXP (xinner, 0), used));
2143 /* If the partial register-part of the arg counts in its stack size,
2144 skip the part of stack space corresponding to the registers.
2145 Otherwise, start copying to the beginning of the stack space,
2146 by setting SKIP to 0. */
2147 #ifndef REG_PARM_STACK_SPACE
2148 skip = 0;
2149 #else
2150 skip = used;
2151 #endif
2153 #ifdef PUSH_ROUNDING
2154 /* Do it with several push insns if that doesn't take lots of insns
2155 and if there is no difficulty with push insns that skip bytes
2156 on the stack for alignment purposes. */
2157 if (args_addr == 0
2158 && GET_CODE (size) == CONST_INT
2159 && skip == 0
2160 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2161 < MOVE_RATIO)
2162 /* Here we avoid the case of a structure whose weak alignment
2163 forces many pushes of a small amount of data,
2164 and such small pushes do rounding that causes trouble. */
2165 && ((! SLOW_UNALIGNED_ACCESS)
2166 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2167 || PUSH_ROUNDING (align) == align)
2168 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2170 /* Push padding now if padding above and stack grows down,
2171 or if padding below and stack grows up.
2172 But if space already allocated, this has already been done. */
2173 if (extra && args_addr == 0
2174 && where_pad != none && where_pad != stack_direction)
2175 anti_adjust_stack (GEN_INT (extra));
2177 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2178 INTVAL (size) - used, align);
2180 else
2181 #endif /* PUSH_ROUNDING */
2183 /* Otherwise make space on the stack and copy the data
2184 to the address of that space. */
2186 /* Deduct words put into registers from the size we must copy. */
2187 if (partial != 0)
2189 if (GET_CODE (size) == CONST_INT)
2190 size = GEN_INT (INTVAL (size) - used);
2191 else
2192 size = expand_binop (GET_MODE (size), sub_optab, size,
2193 GEN_INT (used), NULL_RTX, 0,
2194 OPTAB_LIB_WIDEN);
2197 /* Get the address of the stack space.
2198 In this case, we do not deal with EXTRA separately.
2199 A single stack adjust will do. */
2200 if (! args_addr)
2202 temp = push_block (size, extra, where_pad == downward);
2203 extra = 0;
2205 else if (GET_CODE (args_so_far) == CONST_INT)
2206 temp = memory_address (BLKmode,
2207 plus_constant (args_addr,
2208 skip + INTVAL (args_so_far)));
2209 else
2210 temp = memory_address (BLKmode,
2211 plus_constant (gen_rtx (PLUS, Pmode,
2212 args_addr, args_so_far),
2213 skip));
2215 /* TEMP is the address of the block. Copy the data there. */
2216 if (GET_CODE (size) == CONST_INT
2217 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2218 < MOVE_RATIO))
2220 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2221 INTVAL (size), align);
2222 goto ret;
2224 /* Try the most limited insn first, because there's no point
2225 including more than one in the machine description unless
2226 the more limited one has some advantage. */
2227 #ifdef HAVE_movstrqi
2228 if (HAVE_movstrqi
2229 && GET_CODE (size) == CONST_INT
2230 && ((unsigned) INTVAL (size)
2231 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2233 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2234 xinner, size, GEN_INT (align));
2235 if (pat != 0)
2237 emit_insn (pat);
2238 goto ret;
2241 #endif
2242 #ifdef HAVE_movstrhi
2243 if (HAVE_movstrhi
2244 && GET_CODE (size) == CONST_INT
2245 && ((unsigned) INTVAL (size)
2246 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2248 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2249 xinner, size, GEN_INT (align));
2250 if (pat != 0)
2252 emit_insn (pat);
2253 goto ret;
2256 #endif
2257 #ifdef HAVE_movstrsi
2258 if (HAVE_movstrsi)
2260 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2261 xinner, size, GEN_INT (align));
2262 if (pat != 0)
2264 emit_insn (pat);
2265 goto ret;
2268 #endif
2269 #ifdef HAVE_movstrdi
2270 if (HAVE_movstrdi)
2272 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2273 xinner, size, GEN_INT (align));
2274 if (pat != 0)
2276 emit_insn (pat);
2277 goto ret;
2280 #endif
2282 #ifndef ACCUMULATE_OUTGOING_ARGS
2283 /* If the source is referenced relative to the stack pointer,
2284 copy it to another register to stabilize it. We do not need
2285 to do this if we know that we won't be changing sp. */
2287 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2288 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2289 temp = copy_to_reg (temp);
2290 #endif
2292 /* Make inhibit_defer_pop nonzero around the library call
2293 to force it to pop the bcopy-arguments right away. */
2294 NO_DEFER_POP;
2295 #ifdef TARGET_MEM_FUNCTIONS
2296 emit_library_call (memcpy_libfunc, 0,
2297 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2298 convert_to_mode (TYPE_MODE (sizetype),
2299 size, TREE_UNSIGNED (sizetype)),
2300 TYPE_MODE (sizetype));
2301 #else
2302 emit_library_call (bcopy_libfunc, 0,
2303 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2304 convert_to_mode (TYPE_MODE (sizetype),
2305 size, TREE_UNSIGNED (sizetype)),
2306 TYPE_MODE (sizetype));
2307 #endif
2308 OK_DEFER_POP;
2311 else if (partial > 0)
2313 /* Scalar partly in registers. */
2315 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2316 int i;
2317 int not_stack;
2318 /* # words of start of argument
2319 that we must make space for but need not store. */
2320 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2321 int args_offset = INTVAL (args_so_far);
2322 int skip;
2324 /* Push padding now if padding above and stack grows down,
2325 or if padding below and stack grows up.
2326 But if space already allocated, this has already been done. */
2327 if (extra && args_addr == 0
2328 && where_pad != none && where_pad != stack_direction)
2329 anti_adjust_stack (GEN_INT (extra));
2331 /* If we make space by pushing it, we might as well push
2332 the real data. Otherwise, we can leave OFFSET nonzero
2333 and leave the space uninitialized. */
2334 if (args_addr == 0)
2335 offset = 0;
2337 /* Now NOT_STACK gets the number of words that we don't need to
2338 allocate on the stack. */
2339 not_stack = partial - offset;
2341 /* If the partial register-part of the arg counts in its stack size,
2342 skip the part of stack space corresponding to the registers.
2343 Otherwise, start copying to the beginning of the stack space,
2344 by setting SKIP to 0. */
2345 #ifndef REG_PARM_STACK_SPACE
2346 skip = 0;
2347 #else
2348 skip = not_stack;
2349 #endif
2351 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2352 x = validize_mem (force_const_mem (mode, x));
2354 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2355 SUBREGs of such registers are not allowed. */
2356 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2357 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2358 x = copy_to_reg (x);
2360 /* Loop over all the words allocated on the stack for this arg. */
2361 /* We can do it by words, because any scalar bigger than a word
2362 has a size a multiple of a word. */
2363 #ifndef PUSH_ARGS_REVERSED
2364 for (i = not_stack; i < size; i++)
2365 #else
2366 for (i = size - 1; i >= not_stack; i--)
2367 #endif
2368 if (i >= not_stack + offset)
2369 emit_push_insn (operand_subword_force (x, i, mode),
2370 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2371 0, args_addr,
2372 GEN_INT (args_offset + ((i - not_stack + skip)
2373 * UNITS_PER_WORD)));
2375 else
2377 rtx addr;
2379 /* Push padding now if padding above and stack grows down,
2380 or if padding below and stack grows up.
2381 But if space already allocated, this has already been done. */
2382 if (extra && args_addr == 0
2383 && where_pad != none && where_pad != stack_direction)
2384 anti_adjust_stack (GEN_INT (extra));
2386 #ifdef PUSH_ROUNDING
2387 if (args_addr == 0)
2388 addr = gen_push_operand ();
2389 else
2390 #endif
2391 if (GET_CODE (args_so_far) == CONST_INT)
2392 addr
2393 = memory_address (mode,
2394 plus_constant (args_addr, INTVAL (args_so_far)));
2395 else
2396 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2397 args_so_far));
2399 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2402 ret:
2403 /* If part should go in registers, copy that part
2404 into the appropriate registers. Do this now, at the end,
2405 since mem-to-mem copies above may do function calls. */
2406 if (partial > 0 && reg != 0)
2407 move_block_to_reg (REGNO (reg), x, partial, mode);
2409 if (extra && args_addr == 0 && where_pad == stack_direction)
2410 anti_adjust_stack (GEN_INT (extra));
2413 /* Expand an assignment that stores the value of FROM into TO.
2414 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2415 (This may contain a QUEUED rtx;
2416 if the value is constant, this rtx is a constant.)
2417 Otherwise, the returned value is NULL_RTX.
2419 SUGGEST_REG is no longer actually used.
2420 It used to mean, copy the value through a register
2421 and return that register, if that is possible.
2422 We now use WANT_VALUE to decide whether to do this. */
2425 expand_assignment (to, from, want_value, suggest_reg)
2426 tree to, from;
2427 int want_value;
2428 int suggest_reg;
2430 register rtx to_rtx = 0;
2431 rtx result;
2433 /* Don't crash if the lhs of the assignment was erroneous. */
2435 if (TREE_CODE (to) == ERROR_MARK)
2437 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2438 return want_value ? result : NULL_RTX;
2441 if (output_bytecode)
2443 tree dest_innermost;
2445 bc_expand_expr (from);
2446 bc_emit_instruction (duplicate);
2448 dest_innermost = bc_expand_address (to);
2450 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2451 take care of it here. */
2453 bc_store_memory (TREE_TYPE (to), dest_innermost);
2454 return NULL;
2457 /* Assignment of a structure component needs special treatment
2458 if the structure component's rtx is not simply a MEM.
2459 Assignment of an array element at a constant index, and assignment of
2460 an array element in an unaligned packed structure field, has the same
2461 problem. */
2463 if (TREE_CODE (to) == COMPONENT_REF
2464 || TREE_CODE (to) == BIT_FIELD_REF
2465 || (TREE_CODE (to) == ARRAY_REF
2466 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2467 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2468 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2470 enum machine_mode mode1;
2471 int bitsize;
2472 int bitpos;
2473 tree offset;
2474 int unsignedp;
2475 int volatilep = 0;
2476 tree tem;
2477 int alignment;
2479 push_temp_slots ();
2480 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2481 &mode1, &unsignedp, &volatilep);
2483 /* If we are going to use store_bit_field and extract_bit_field,
2484 make sure to_rtx will be safe for multiple use. */
2486 if (mode1 == VOIDmode && want_value)
2487 tem = stabilize_reference (tem);
2489 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2490 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2491 if (offset != 0)
2493 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2495 if (GET_CODE (to_rtx) != MEM)
2496 abort ();
2497 to_rtx = change_address (to_rtx, VOIDmode,
2498 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2499 force_reg (Pmode, offset_rtx)));
2500 /* If we have a variable offset, the known alignment
2501 is only that of the innermost structure containing the field.
2502 (Actually, we could sometimes do better by using the
2503 align of an element of the innermost array, but no need.) */
2504 if (TREE_CODE (to) == COMPONENT_REF
2505 || TREE_CODE (to) == BIT_FIELD_REF)
2506 alignment
2507 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2509 if (volatilep)
2511 if (GET_CODE (to_rtx) == MEM)
2512 MEM_VOLATILE_P (to_rtx) = 1;
2513 #if 0 /* This was turned off because, when a field is volatile
2514 in an object which is not volatile, the object may be in a register,
2515 and then we would abort over here. */
2516 else
2517 abort ();
2518 #endif
2521 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2522 (want_value
2523 /* Spurious cast makes HPUX compiler happy. */
2524 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2525 : VOIDmode),
2526 unsignedp,
2527 /* Required alignment of containing datum. */
2528 alignment,
2529 int_size_in_bytes (TREE_TYPE (tem)));
2530 preserve_temp_slots (result);
2531 free_temp_slots ();
2532 pop_temp_slots ();
2534 /* If the value is meaningful, convert RESULT to the proper mode.
2535 Otherwise, return nothing. */
2536 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2537 TYPE_MODE (TREE_TYPE (from)),
2538 result,
2539 TREE_UNSIGNED (TREE_TYPE (to)))
2540 : NULL_RTX);
2543 /* If the rhs is a function call and its value is not an aggregate,
2544 call the function before we start to compute the lhs.
2545 This is needed for correct code for cases such as
2546 val = setjmp (buf) on machines where reference to val
2547 requires loading up part of an address in a separate insn.
2549 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2550 a promoted variable where the zero- or sign- extension needs to be done.
2551 Handling this in the normal way is safe because no computation is done
2552 before the call. */
2553 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2554 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2556 rtx value;
2558 push_temp_slots ();
2559 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2560 if (to_rtx == 0)
2561 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2563 if (GET_MODE (to_rtx) == BLKmode)
2565 int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
2566 emit_block_move (to_rtx, value, expr_size (from), align);
2568 else
2569 emit_move_insn (to_rtx, value);
2570 preserve_temp_slots (to_rtx);
2571 free_temp_slots ();
2572 pop_temp_slots ();
2573 return want_value ? to_rtx : NULL_RTX;
2576 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2577 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2579 if (to_rtx == 0)
2580 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2582 /* Don't move directly into a return register. */
2583 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2585 rtx temp;
2587 push_temp_slots ();
2588 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2589 emit_move_insn (to_rtx, temp);
2590 preserve_temp_slots (to_rtx);
2591 free_temp_slots ();
2592 pop_temp_slots ();
2593 return want_value ? to_rtx : NULL_RTX;
2596 /* In case we are returning the contents of an object which overlaps
2597 the place the value is being stored, use a safe function when copying
2598 a value through a pointer into a structure value return block. */
2599 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2600 && current_function_returns_struct
2601 && !current_function_returns_pcc_struct)
2603 rtx from_rtx, size;
2605 push_temp_slots ();
2606 size = expr_size (from);
2607 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2609 #ifdef TARGET_MEM_FUNCTIONS
2610 emit_library_call (memcpy_libfunc, 0,
2611 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2612 XEXP (from_rtx, 0), Pmode,
2613 convert_to_mode (TYPE_MODE (sizetype),
2614 size, TREE_UNSIGNED (sizetype)),
2615 TYPE_MODE (sizetype));
2616 #else
2617 emit_library_call (bcopy_libfunc, 0,
2618 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2619 XEXP (to_rtx, 0), Pmode,
2620 convert_to_mode (TYPE_MODE (sizetype),
2621 size, TREE_UNSIGNED (sizetype)),
2622 TYPE_MODE (sizetype));
2623 #endif
2625 preserve_temp_slots (to_rtx);
2626 free_temp_slots ();
2627 pop_temp_slots ();
2628 return want_value ? to_rtx : NULL_RTX;
2631 /* Compute FROM and store the value in the rtx we got. */
2633 push_temp_slots ();
2634 result = store_expr (from, to_rtx, want_value);
2635 preserve_temp_slots (result);
2636 free_temp_slots ();
2637 pop_temp_slots ();
2638 return want_value ? result : NULL_RTX;
2641 /* Generate code for computing expression EXP,
2642 and storing the value into TARGET.
2643 TARGET may contain a QUEUED rtx.
2645 If WANT_VALUE is nonzero, return a copy of the value
2646 not in TARGET, so that we can be sure to use the proper
2647 value in a containing expression even if TARGET has something
2648 else stored in it. If possible, we copy the value through a pseudo
2649 and return that pseudo. Or, if the value is constant, we try to
2650 return the constant. In some cases, we return a pseudo
2651 copied *from* TARGET.
2653 If the mode is BLKmode then we may return TARGET itself.
2654 It turns out that in BLKmode it doesn't cause a problem.
2655 because C has no operators that could combine two different
2656 assignments into the same BLKmode object with different values
2657 with no sequence point. Will other languages need this to
2658 be more thorough?
2660 If WANT_VALUE is 0, we return NULL, to make sure
2661 to catch quickly any cases where the caller uses the value
2662 and fails to set WANT_VALUE. */
2665 store_expr (exp, target, want_value)
2666 register tree exp;
2667 register rtx target;
2668 int want_value;
2670 register rtx temp;
2671 int dont_return_target = 0;
2673 if (TREE_CODE (exp) == COMPOUND_EXPR)
2675 /* Perform first part of compound expression, then assign from second
2676 part. */
2677 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2678 emit_queue ();
2679 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2681 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2683 /* For conditional expression, get safe form of the target. Then
2684 test the condition, doing the appropriate assignment on either
2685 side. This avoids the creation of unnecessary temporaries.
2686 For non-BLKmode, it is more efficient not to do this. */
2688 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2690 emit_queue ();
2691 target = protect_from_queue (target, 1);
2693 NO_DEFER_POP;
2694 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2695 store_expr (TREE_OPERAND (exp, 1), target, 0);
2696 emit_queue ();
2697 emit_jump_insn (gen_jump (lab2));
2698 emit_barrier ();
2699 emit_label (lab1);
2700 store_expr (TREE_OPERAND (exp, 2), target, 0);
2701 emit_queue ();
2702 emit_label (lab2);
2703 OK_DEFER_POP;
2704 return want_value ? target : NULL_RTX;
2706 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2707 && GET_MODE (target) != BLKmode)
2708 /* If target is in memory and caller wants value in a register instead,
2709 arrange that. Pass TARGET as target for expand_expr so that,
2710 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2711 We know expand_expr will not use the target in that case.
2712 Don't do this if TARGET is volatile because we are supposed
2713 to write it and then read it. */
2715 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2716 GET_MODE (target), 0);
2717 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2718 temp = copy_to_reg (temp);
2719 dont_return_target = 1;
2721 else if (queued_subexp_p (target))
2722 /* If target contains a postincrement, let's not risk
2723 using it as the place to generate the rhs. */
2725 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2727 /* Expand EXP into a new pseudo. */
2728 temp = gen_reg_rtx (GET_MODE (target));
2729 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2731 else
2732 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2734 /* If target is volatile, ANSI requires accessing the value
2735 *from* the target, if it is accessed. So make that happen.
2736 In no case return the target itself. */
2737 if (! MEM_VOLATILE_P (target) && want_value)
2738 dont_return_target = 1;
2740 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2741 /* If this is an scalar in a register that is stored in a wider mode
2742 than the declared mode, compute the result into its declared mode
2743 and then convert to the wider mode. Our value is the computed
2744 expression. */
2746 /* If we don't want a value, we can do the conversion inside EXP,
2747 which will often result in some optimizations. */
2748 if (! want_value)
2749 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2750 SUBREG_PROMOTED_UNSIGNED_P (target)),
2751 exp);
2753 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2755 /* If TEMP is a volatile MEM and we want a result value, make
2756 the access now so it gets done only once. */
2757 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp) && want_value)
2758 temp = copy_to_reg (temp);
2760 /* If TEMP is a VOIDmode constant, use convert_modes to make
2761 sure that we properly convert it. */
2762 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2763 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2764 TYPE_MODE (TREE_TYPE (exp)), temp,
2765 SUBREG_PROMOTED_UNSIGNED_P (target));
2767 convert_move (SUBREG_REG (target), temp,
2768 SUBREG_PROMOTED_UNSIGNED_P (target));
2769 return want_value ? temp : NULL_RTX;
2771 else
2773 temp = expand_expr (exp, target, GET_MODE (target), 0);
2774 /* Return TARGET if it's a specified hardware register.
2775 If TARGET is a volatile mem ref, either return TARGET
2776 or return a reg copied *from* TARGET; ANSI requires this.
2778 Otherwise, if TEMP is not TARGET, return TEMP
2779 if it is constant (for efficiency),
2780 or if we really want the correct value. */
2781 if (!(target && GET_CODE (target) == REG
2782 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2783 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2784 && temp != target
2785 && (CONSTANT_P (temp) || want_value))
2786 dont_return_target = 1;
2789 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2790 the same as that of TARGET, adjust the constant. This is needed, for
2791 example, in case it is a CONST_DOUBLE and we want only a word-sized
2792 value. */
2793 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2794 && TREE_CODE (exp) != ERROR_MARK
2795 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2796 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2797 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2799 /* If value was not generated in the target, store it there.
2800 Convert the value to TARGET's type first if nec. */
2802 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2804 target = protect_from_queue (target, 1);
2805 if (GET_MODE (temp) != GET_MODE (target)
2806 && GET_MODE (temp) != VOIDmode)
2808 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2809 if (dont_return_target)
2811 /* In this case, we will return TEMP,
2812 so make sure it has the proper mode.
2813 But don't forget to store the value into TARGET. */
2814 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2815 emit_move_insn (target, temp);
2817 else
2818 convert_move (target, temp, unsignedp);
2821 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2823 /* Handle copying a string constant into an array.
2824 The string constant may be shorter than the array.
2825 So copy just the string's actual length, and clear the rest. */
2826 rtx size;
2827 rtx addr;
2829 /* Get the size of the data type of the string,
2830 which is actually the size of the target. */
2831 size = expr_size (exp);
2832 if (GET_CODE (size) == CONST_INT
2833 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2834 emit_block_move (target, temp, size,
2835 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2836 else
2838 /* Compute the size of the data to copy from the string. */
2839 tree copy_size
2840 = size_binop (MIN_EXPR,
2841 make_tree (sizetype, size),
2842 convert (sizetype,
2843 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2844 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2845 VOIDmode, 0);
2846 rtx label = 0;
2848 /* Copy that much. */
2849 emit_block_move (target, temp, copy_size_rtx,
2850 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2852 /* Figure out how much is left in TARGET
2853 that we have to clear. */
2854 if (GET_CODE (copy_size_rtx) == CONST_INT)
2856 addr = plus_constant (XEXP (target, 0),
2857 TREE_STRING_LENGTH (exp));
2858 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
2860 else
2862 enum machine_mode size_mode = Pmode;
2864 addr = force_reg (Pmode, XEXP (target, 0));
2865 addr = expand_binop (size_mode, add_optab, addr,
2866 copy_size_rtx, NULL_RTX, 0,
2867 OPTAB_LIB_WIDEN);
2869 size = expand_binop (size_mode, sub_optab, size,
2870 copy_size_rtx, NULL_RTX, 0,
2871 OPTAB_LIB_WIDEN);
2873 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2874 GET_MODE (size), 0, 0);
2875 label = gen_label_rtx ();
2876 emit_jump_insn (gen_blt (label));
2879 if (size != const0_rtx)
2881 #ifdef TARGET_MEM_FUNCTIONS
2882 emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
2883 Pmode, const0_rtx, Pmode, size, Pmode);
2884 #else
2885 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2886 addr, Pmode, size, Pmode);
2887 #endif
2890 if (label)
2891 emit_label (label);
2894 else if (GET_MODE (temp) == BLKmode)
2895 emit_block_move (target, temp, expr_size (exp),
2896 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2897 else
2898 emit_move_insn (target, temp);
2901 /* If we don't want a value, return NULL_RTX. */
2902 if (! want_value)
2903 return NULL_RTX;
2905 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2906 ??? The latter test doesn't seem to make sense. */
2907 else if (dont_return_target && GET_CODE (temp) != MEM)
2908 return temp;
2910 /* Return TARGET itself if it is a hard register. */
2911 else if (want_value && GET_MODE (target) != BLKmode
2912 && ! (GET_CODE (target) == REG
2913 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2914 return copy_to_reg (target);
2916 else
2917 return target;
2920 /* Store the value of constructor EXP into the rtx TARGET.
2921 TARGET is either a REG or a MEM. */
2923 static void
2924 store_constructor (exp, target)
2925 tree exp;
2926 rtx target;
2928 tree type = TREE_TYPE (exp);
2930 /* We know our target cannot conflict, since safe_from_p has been called. */
2931 #if 0
2932 /* Don't try copying piece by piece into a hard register
2933 since that is vulnerable to being clobbered by EXP.
2934 Instead, construct in a pseudo register and then copy it all. */
2935 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2937 rtx temp = gen_reg_rtx (GET_MODE (target));
2938 store_constructor (exp, temp);
2939 emit_move_insn (target, temp);
2940 return;
2942 #endif
2944 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2945 || TREE_CODE (type) == QUAL_UNION_TYPE)
2947 register tree elt;
2949 /* Inform later passes that the whole union value is dead. */
2950 if (TREE_CODE (type) == UNION_TYPE
2951 || TREE_CODE (type) == QUAL_UNION_TYPE)
2952 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2954 /* If we are building a static constructor into a register,
2955 set the initial value as zero so we can fold the value into
2956 a constant. */
2957 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2958 emit_move_insn (target, const0_rtx);
2960 /* If the constructor has fewer fields than the structure,
2961 clear the whole structure first. */
2962 else if (list_length (CONSTRUCTOR_ELTS (exp))
2963 != list_length (TYPE_FIELDS (type)))
2964 clear_storage (target, int_size_in_bytes (type));
2965 else
2966 /* Inform later passes that the old value is dead. */
2967 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2969 /* Store each element of the constructor into
2970 the corresponding field of TARGET. */
2972 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2974 register tree field = TREE_PURPOSE (elt);
2975 register enum machine_mode mode;
2976 int bitsize;
2977 int bitpos = 0;
2978 int unsignedp;
2979 tree pos, constant = 0, offset = 0;
2980 rtx to_rtx = target;
2982 /* Just ignore missing fields.
2983 We cleared the whole structure, above,
2984 if any fields are missing. */
2985 if (field == 0)
2986 continue;
2988 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2989 unsignedp = TREE_UNSIGNED (field);
2990 mode = DECL_MODE (field);
2991 if (DECL_BIT_FIELD (field))
2992 mode = VOIDmode;
2994 pos = DECL_FIELD_BITPOS (field);
2995 if (TREE_CODE (pos) == INTEGER_CST)
2996 constant = pos;
2997 else if (TREE_CODE (pos) == PLUS_EXPR
2998 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2999 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3000 else
3001 offset = pos;
3003 if (constant)
3004 bitpos = TREE_INT_CST_LOW (constant);
3006 if (offset)
3008 rtx offset_rtx;
3010 if (contains_placeholder_p (offset))
3011 offset = build (WITH_RECORD_EXPR, sizetype,
3012 offset, exp);
3014 offset = size_binop (FLOOR_DIV_EXPR, offset,
3015 size_int (BITS_PER_UNIT));
3017 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3018 if (GET_CODE (to_rtx) != MEM)
3019 abort ();
3021 to_rtx
3022 = change_address (to_rtx, VOIDmode,
3023 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
3024 force_reg (Pmode, offset_rtx)));
3027 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
3028 /* The alignment of TARGET is
3029 at least what its type requires. */
3030 VOIDmode, 0,
3031 TYPE_ALIGN (type) / BITS_PER_UNIT,
3032 int_size_in_bytes (type));
3035 else if (TREE_CODE (type) == ARRAY_TYPE)
3037 register tree elt;
3038 register int i;
3039 tree domain = TYPE_DOMAIN (type);
3040 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3041 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3042 tree elttype = TREE_TYPE (type);
3044 /* If the constructor has fewer fields than the structure,
3045 clear the whole structure first. Similarly if this this is
3046 static constructor of a non-BLKmode object. */
3048 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3049 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3050 clear_storage (target, int_size_in_bytes (type));
3051 else
3052 /* Inform later passes that the old value is dead. */
3053 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3055 /* Store each element of the constructor into
3056 the corresponding element of TARGET, determined
3057 by counting the elements. */
3058 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3059 elt;
3060 elt = TREE_CHAIN (elt), i++)
3062 register enum machine_mode mode;
3063 int bitsize;
3064 int bitpos;
3065 int unsignedp;
3066 tree index = TREE_PURPOSE (elt);
3067 rtx xtarget = target;
3069 mode = TYPE_MODE (elttype);
3070 bitsize = GET_MODE_BITSIZE (mode);
3071 unsignedp = TREE_UNSIGNED (elttype);
3073 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
3075 /* We don't currently allow variable indices in a
3076 C initializer, but let's try here to support them. */
3077 rtx pos_rtx, addr, xtarget;
3078 tree position;
3080 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
3081 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3082 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3083 xtarget = change_address (target, mode, addr);
3084 store_expr (TREE_VALUE (elt), xtarget, 0);
3086 else
3088 if (index != 0)
3089 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3090 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3091 else
3092 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3094 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3095 /* The alignment of TARGET is
3096 at least what its type requires. */
3097 VOIDmode, 0,
3098 TYPE_ALIGN (type) / BITS_PER_UNIT,
3099 int_size_in_bytes (type));
3103 /* set constructor assignments */
3104 else if (TREE_CODE (type) == SET_TYPE)
3106 tree elt;
3107 rtx xtarget = XEXP (target, 0);
3108 int set_word_size = TYPE_ALIGN (type);
3109 int nbytes = int_size_in_bytes (type);
3110 int nwords;
3111 tree non_const_elements;
3112 int need_to_clear_first;
3113 tree domain = TYPE_DOMAIN (type);
3114 tree domain_min, domain_max, bitlength;
3116 /* The default implementation stategy is to extract the constant
3117 parts of the constructor, use that to initialize the target,
3118 and then "or" in whatever non-constant ranges we need in addition.
3120 If a large set is all zero or all ones, it is
3121 probably better to set it using memset (if available) or bzero.
3122 Also, if a large set has just a single range, it may also be
3123 better to first clear all the first clear the set (using
3124 bzero/memset), and set the bits we want. */
3126 /* Check for all zeros. */
3127 if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
3129 clear_storage (target, nbytes);
3130 return;
3133 if (nbytes < 0)
3134 abort();
3136 nwords = (nbytes * BITS_PER_UNIT) / set_word_size;
3137 if (nwords == 0)
3138 nwords = 1;
3140 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3141 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3142 bitlength = size_binop (PLUS_EXPR,
3143 size_binop (MINUS_EXPR, domain_max, domain_min),
3144 size_one_node);
3146 /* Check for range all ones, or at most a single range.
3147 (This optimization is only a win for big sets.) */
3148 if (GET_MODE (target) == BLKmode && nbytes > 16
3149 && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
3151 need_to_clear_first = 1;
3152 non_const_elements = CONSTRUCTOR_ELTS (exp);
3154 else
3156 HOST_WIDE_INT *buffer
3157 = (HOST_WIDE_INT*) alloca (sizeof (HOST_WIDE_INT) * nwords);
3158 non_const_elements = get_set_constructor_words (exp, buffer, nwords);
3160 if (nbytes * BITS_PER_UNIT <= set_word_size)
3162 if (BITS_BIG_ENDIAN)
3163 buffer[0] >>= set_word_size - nbytes * BITS_PER_UNIT;
3164 emit_move_insn (target, GEN_INT (buffer[0]));
3166 else
3168 rtx addr = XEXP (target, 0);
3169 rtx to_rtx;
3170 register int i;
3171 enum machine_mode mode
3172 = mode_for_size (set_word_size, MODE_INT, 1);
3174 for (i = 0; i < nwords; i++)
3176 int offset = i * set_word_size / BITS_PER_UNIT;
3177 rtx datum = GEN_INT (buffer[i]);
3178 rtx to_rtx = change_address (target, mode,
3179 plus_constant (addr, offset));
3180 MEM_IN_STRUCT_P (to_rtx) = 1;
3181 emit_move_insn (to_rtx, datum);
3184 need_to_clear_first = 0;
3187 for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3189 /* start of range of element or NULL */
3190 tree startbit = TREE_PURPOSE (elt);
3191 /* end of range of element, or element value */
3192 tree endbit = TREE_VALUE (elt);
3193 HOST_WIDE_INT startb, endb;
3194 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3196 bitlength_rtx = expand_expr (bitlength,
3197 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3199 /* handle non-range tuple element like [ expr ] */
3200 if (startbit == NULL_TREE)
3202 startbit = save_expr (endbit);
3203 endbit = startbit;
3205 startbit = convert (sizetype, startbit);
3206 endbit = convert (sizetype, endbit);
3207 if (! integer_zerop (domain_min))
3209 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3210 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3212 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3213 EXPAND_CONST_ADDRESS);
3214 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3215 EXPAND_CONST_ADDRESS);
3217 if (REG_P (target))
3219 targetx = assign_stack_temp (GET_MODE (target),
3220 GET_MODE_SIZE (GET_MODE (target)),
3222 emit_move_insn (targetx, target);
3224 else if (GET_CODE (target) == MEM)
3225 targetx = target;
3226 else
3227 abort ();
3229 #ifdef TARGET_MEM_FUNCTIONS
3230 /* Optimization: If startbit and endbit are
3231 constants divisble by BITS_PER_UNIT,
3232 call memset instead. */
3233 if (TREE_CODE (startbit) == INTEGER_CST
3234 && TREE_CODE (endbit) == INTEGER_CST
3235 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3236 && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
3239 if (need_to_clear_first
3240 && endb - startb != nbytes * BITS_PER_UNIT)
3241 clear_storage (target, nbytes);
3242 need_to_clear_first = 0;
3243 emit_library_call (memset_libfunc, 0,
3244 VOIDmode, 3,
3245 plus_constant (XEXP (targetx, 0), startb),
3246 Pmode,
3247 constm1_rtx, Pmode,
3248 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3249 Pmode);
3251 else
3252 #endif
3254 if (need_to_clear_first)
3256 clear_storage (target, nbytes);
3257 need_to_clear_first = 0;
3259 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3260 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3261 bitlength_rtx, TYPE_MODE (sizetype),
3262 startbit_rtx, TYPE_MODE (sizetype),
3263 endbit_rtx, TYPE_MODE (sizetype));
3265 if (REG_P (target))
3266 emit_move_insn (target, targetx);
3270 else
3271 abort ();
3274 /* Store the value of EXP (an expression tree)
3275 into a subfield of TARGET which has mode MODE and occupies
3276 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3277 If MODE is VOIDmode, it means that we are storing into a bit-field.
3279 If VALUE_MODE is VOIDmode, return nothing in particular.
3280 UNSIGNEDP is not used in this case.
3282 Otherwise, return an rtx for the value stored. This rtx
3283 has mode VALUE_MODE if that is convenient to do.
3284 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3286 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3287 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3289 static rtx
3290 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3291 unsignedp, align, total_size)
3292 rtx target;
3293 int bitsize, bitpos;
3294 enum machine_mode mode;
3295 tree exp;
3296 enum machine_mode value_mode;
3297 int unsignedp;
3298 int align;
3299 int total_size;
3301 HOST_WIDE_INT width_mask = 0;
3303 if (bitsize < HOST_BITS_PER_WIDE_INT)
3304 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3306 /* If we are storing into an unaligned field of an aligned union that is
3307 in a register, we may have the mode of TARGET being an integer mode but
3308 MODE == BLKmode. In that case, get an aligned object whose size and
3309 alignment are the same as TARGET and store TARGET into it (we can avoid
3310 the store if the field being stored is the entire width of TARGET). Then
3311 call ourselves recursively to store the field into a BLKmode version of
3312 that object. Finally, load from the object into TARGET. This is not
3313 very efficient in general, but should only be slightly more expensive
3314 than the otherwise-required unaligned accesses. Perhaps this can be
3315 cleaned up later. */
3317 if (mode == BLKmode
3318 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3320 rtx object = assign_stack_temp (GET_MODE (target),
3321 GET_MODE_SIZE (GET_MODE (target)), 0);
3322 rtx blk_object = copy_rtx (object);
3324 MEM_IN_STRUCT_P (object) = 1;
3325 MEM_IN_STRUCT_P (blk_object) = 1;
3326 PUT_MODE (blk_object, BLKmode);
3328 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3329 emit_move_insn (object, target);
3331 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3332 align, total_size);
3334 /* Even though we aren't returning target, we need to
3335 give it the updated value. */
3336 emit_move_insn (target, object);
3338 return blk_object;
3341 /* If the structure is in a register or if the component
3342 is a bit field, we cannot use addressing to access it.
3343 Use bit-field techniques or SUBREG to store in it. */
3345 if (mode == VOIDmode
3346 || (mode != BLKmode && ! direct_store[(int) mode])
3347 || GET_CODE (target) == REG
3348 || GET_CODE (target) == SUBREG
3349 /* If the field isn't aligned enough to store as an ordinary memref,
3350 store it as a bit field. */
3351 || (SLOW_UNALIGNED_ACCESS
3352 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3353 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3355 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3357 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3358 MODE. */
3359 if (mode != VOIDmode && mode != BLKmode
3360 && mode != TYPE_MODE (TREE_TYPE (exp)))
3361 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3363 /* Store the value in the bitfield. */
3364 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3365 if (value_mode != VOIDmode)
3367 /* The caller wants an rtx for the value. */
3368 /* If possible, avoid refetching from the bitfield itself. */
3369 if (width_mask != 0
3370 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3372 tree count;
3373 enum machine_mode tmode;
3375 if (unsignedp)
3376 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3377 tmode = GET_MODE (temp);
3378 if (tmode == VOIDmode)
3379 tmode = value_mode;
3380 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3381 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3382 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3384 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3385 NULL_RTX, value_mode, 0, align,
3386 total_size);
3388 return const0_rtx;
3390 else
3392 rtx addr = XEXP (target, 0);
3393 rtx to_rtx;
3395 /* If a value is wanted, it must be the lhs;
3396 so make the address stable for multiple use. */
3398 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3399 && ! CONSTANT_ADDRESS_P (addr)
3400 /* A frame-pointer reference is already stable. */
3401 && ! (GET_CODE (addr) == PLUS
3402 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3403 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3404 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3405 addr = copy_to_reg (addr);
3407 /* Now build a reference to just the desired component. */
3409 to_rtx = change_address (target, mode,
3410 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3411 MEM_IN_STRUCT_P (to_rtx) = 1;
3413 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3417 /* Return true if any object containing the innermost array is an unaligned
3418 packed structure field. */
3420 static int
3421 get_inner_unaligned_p (exp)
3422 tree exp;
3424 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3426 while (1)
3428 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3430 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3431 < needed_alignment)
3432 return 1;
3434 else if (TREE_CODE (exp) != ARRAY_REF
3435 && TREE_CODE (exp) != NON_LVALUE_EXPR
3436 && ! ((TREE_CODE (exp) == NOP_EXPR
3437 || TREE_CODE (exp) == CONVERT_EXPR)
3438 && (TYPE_MODE (TREE_TYPE (exp))
3439 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3440 break;
3442 exp = TREE_OPERAND (exp, 0);
3445 return 0;
3448 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3449 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3450 ARRAY_REFs and find the ultimate containing object, which we return.
3452 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3453 bit position, and *PUNSIGNEDP to the signedness of the field.
3454 If the position of the field is variable, we store a tree
3455 giving the variable offset (in units) in *POFFSET.
3456 This offset is in addition to the bit position.
3457 If the position is not variable, we store 0 in *POFFSET.
3459 If any of the extraction expressions is volatile,
3460 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3462 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3463 is a mode that can be used to access the field. In that case, *PBITSIZE
3464 is redundant.
3466 If the field describes a variable-sized object, *PMODE is set to
3467 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3468 this case, but the address of the object can be found. */
3470 tree
3471 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3472 punsignedp, pvolatilep)
3473 tree exp;
3474 int *pbitsize;
3475 int *pbitpos;
3476 tree *poffset;
3477 enum machine_mode *pmode;
3478 int *punsignedp;
3479 int *pvolatilep;
3481 tree orig_exp = exp;
3482 tree size_tree = 0;
3483 enum machine_mode mode = VOIDmode;
3484 tree offset = integer_zero_node;
3486 if (TREE_CODE (exp) == COMPONENT_REF)
3488 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3489 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3490 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3491 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3493 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3495 size_tree = TREE_OPERAND (exp, 1);
3496 *punsignedp = TREE_UNSIGNED (exp);
3498 else
3500 mode = TYPE_MODE (TREE_TYPE (exp));
3501 *pbitsize = GET_MODE_BITSIZE (mode);
3502 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3505 if (size_tree)
3507 if (TREE_CODE (size_tree) != INTEGER_CST)
3508 mode = BLKmode, *pbitsize = -1;
3509 else
3510 *pbitsize = TREE_INT_CST_LOW (size_tree);
3513 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3514 and find the ultimate containing object. */
3516 *pbitpos = 0;
3518 while (1)
3520 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3522 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3523 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3524 : TREE_OPERAND (exp, 2));
3525 tree constant = integer_zero_node, var = pos;
3527 /* If this field hasn't been filled in yet, don't go
3528 past it. This should only happen when folding expressions
3529 made during type construction. */
3530 if (pos == 0)
3531 break;
3533 /* Assume here that the offset is a multiple of a unit.
3534 If not, there should be an explicitly added constant. */
3535 if (TREE_CODE (pos) == PLUS_EXPR
3536 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3537 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
3538 else if (TREE_CODE (pos) == INTEGER_CST)
3539 constant = pos, var = integer_zero_node;
3541 *pbitpos += TREE_INT_CST_LOW (constant);
3543 if (var)
3544 offset = size_binop (PLUS_EXPR, offset,
3545 size_binop (EXACT_DIV_EXPR, var,
3546 size_int (BITS_PER_UNIT)));
3549 else if (TREE_CODE (exp) == ARRAY_REF)
3551 /* This code is based on the code in case ARRAY_REF in expand_expr
3552 below. We assume here that the size of an array element is
3553 always an integral multiple of BITS_PER_UNIT. */
3555 tree index = TREE_OPERAND (exp, 1);
3556 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3557 tree low_bound
3558 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3559 tree index_type = TREE_TYPE (index);
3561 if (! integer_zerop (low_bound))
3562 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3564 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3566 index = convert (type_for_size (POINTER_SIZE, 0), index);
3567 index_type = TREE_TYPE (index);
3570 index = fold (build (MULT_EXPR, index_type, index,
3571 TYPE_SIZE (TREE_TYPE (exp))));
3573 if (TREE_CODE (index) == INTEGER_CST
3574 && TREE_INT_CST_HIGH (index) == 0)
3575 *pbitpos += TREE_INT_CST_LOW (index);
3576 else
3577 offset = size_binop (PLUS_EXPR, offset,
3578 size_binop (FLOOR_DIV_EXPR, index,
3579 size_int (BITS_PER_UNIT)));
3581 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3582 && ! ((TREE_CODE (exp) == NOP_EXPR
3583 || TREE_CODE (exp) == CONVERT_EXPR)
3584 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3585 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
3586 != UNION_TYPE))
3587 && (TYPE_MODE (TREE_TYPE (exp))
3588 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3589 break;
3591 /* If any reference in the chain is volatile, the effect is volatile. */
3592 if (TREE_THIS_VOLATILE (exp))
3593 *pvolatilep = 1;
3594 exp = TREE_OPERAND (exp, 0);
3597 /* If this was a bit-field, see if there is a mode that allows direct
3598 access in case EXP is in memory. */
3599 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3601 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3602 if (mode == BLKmode)
3603 mode = VOIDmode;
3606 if (integer_zerop (offset))
3607 offset = 0;
3609 if (offset != 0 && contains_placeholder_p (offset))
3610 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3612 *pmode = mode;
3613 *poffset = offset;
3614 return exp;
3617 /* Given an rtx VALUE that may contain additions and multiplications,
3618 return an equivalent value that just refers to a register or memory.
3619 This is done by generating instructions to perform the arithmetic
3620 and returning a pseudo-register containing the value.
3622 The returned value may be a REG, SUBREG, MEM or constant. */
3625 force_operand (value, target)
3626 rtx value, target;
3628 register optab binoptab = 0;
3629 /* Use a temporary to force order of execution of calls to
3630 `force_operand'. */
3631 rtx tmp;
3632 register rtx op2;
3633 /* Use subtarget as the target for operand 0 of a binary operation. */
3634 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3636 if (GET_CODE (value) == PLUS)
3637 binoptab = add_optab;
3638 else if (GET_CODE (value) == MINUS)
3639 binoptab = sub_optab;
3640 else if (GET_CODE (value) == MULT)
3642 op2 = XEXP (value, 1);
3643 if (!CONSTANT_P (op2)
3644 && !(GET_CODE (op2) == REG && op2 != subtarget))
3645 subtarget = 0;
3646 tmp = force_operand (XEXP (value, 0), subtarget);
3647 return expand_mult (GET_MODE (value), tmp,
3648 force_operand (op2, NULL_RTX),
3649 target, 0);
3652 if (binoptab)
3654 op2 = XEXP (value, 1);
3655 if (!CONSTANT_P (op2)
3656 && !(GET_CODE (op2) == REG && op2 != subtarget))
3657 subtarget = 0;
3658 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3660 binoptab = add_optab;
3661 op2 = negate_rtx (GET_MODE (value), op2);
3664 /* Check for an addition with OP2 a constant integer and our first
3665 operand a PLUS of a virtual register and something else. In that
3666 case, we want to emit the sum of the virtual register and the
3667 constant first and then add the other value. This allows virtual
3668 register instantiation to simply modify the constant rather than
3669 creating another one around this addition. */
3670 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3671 && GET_CODE (XEXP (value, 0)) == PLUS
3672 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3673 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3674 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3676 rtx temp = expand_binop (GET_MODE (value), binoptab,
3677 XEXP (XEXP (value, 0), 0), op2,
3678 subtarget, 0, OPTAB_LIB_WIDEN);
3679 return expand_binop (GET_MODE (value), binoptab, temp,
3680 force_operand (XEXP (XEXP (value, 0), 1), 0),
3681 target, 0, OPTAB_LIB_WIDEN);
3684 tmp = force_operand (XEXP (value, 0), subtarget);
3685 return expand_binop (GET_MODE (value), binoptab, tmp,
3686 force_operand (op2, NULL_RTX),
3687 target, 0, OPTAB_LIB_WIDEN);
3688 /* We give UNSIGNEDP = 0 to expand_binop
3689 because the only operations we are expanding here are signed ones. */
3691 return value;
3694 /* Subroutine of expand_expr:
3695 save the non-copied parts (LIST) of an expr (LHS), and return a list
3696 which can restore these values to their previous values,
3697 should something modify their storage. */
3699 static tree
3700 save_noncopied_parts (lhs, list)
3701 tree lhs;
3702 tree list;
3704 tree tail;
3705 tree parts = 0;
3707 for (tail = list; tail; tail = TREE_CHAIN (tail))
3708 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3709 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3710 else
3712 tree part = TREE_VALUE (tail);
3713 tree part_type = TREE_TYPE (part);
3714 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3715 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3716 int_size_in_bytes (part_type), 0);
3717 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
3718 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3719 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3720 parts = tree_cons (to_be_saved,
3721 build (RTL_EXPR, part_type, NULL_TREE,
3722 (tree) target),
3723 parts);
3724 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3726 return parts;
3729 /* Subroutine of expand_expr:
3730 record the non-copied parts (LIST) of an expr (LHS), and return a list
3731 which specifies the initial values of these parts. */
3733 static tree
3734 init_noncopied_parts (lhs, list)
3735 tree lhs;
3736 tree list;
3738 tree tail;
3739 tree parts = 0;
3741 for (tail = list; tail; tail = TREE_CHAIN (tail))
3742 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3743 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3744 else
3746 tree part = TREE_VALUE (tail);
3747 tree part_type = TREE_TYPE (part);
3748 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3749 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3751 return parts;
3754 /* Subroutine of expand_expr: return nonzero iff there is no way that
3755 EXP can reference X, which is being modified. */
3757 static int
3758 safe_from_p (x, exp)
3759 rtx x;
3760 tree exp;
3762 rtx exp_rtl = 0;
3763 int i, nops;
3765 if (x == 0
3766 /* If EXP has varying size, we MUST use a target since we currently
3767 have no way of allocating temporaries of variable size. So we
3768 assume here that something at a higher level has prevented a
3769 clash. This is somewhat bogus, but the best we can do. */
3770 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3771 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST))
3772 return 1;
3774 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3775 find the underlying pseudo. */
3776 if (GET_CODE (x) == SUBREG)
3778 x = SUBREG_REG (x);
3779 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3780 return 0;
3783 /* If X is a location in the outgoing argument area, it is always safe. */
3784 if (GET_CODE (x) == MEM
3785 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3786 || (GET_CODE (XEXP (x, 0)) == PLUS
3787 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3788 return 1;
3790 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3792 case 'd':
3793 exp_rtl = DECL_RTL (exp);
3794 break;
3796 case 'c':
3797 return 1;
3799 case 'x':
3800 if (TREE_CODE (exp) == TREE_LIST)
3801 return ((TREE_VALUE (exp) == 0
3802 || safe_from_p (x, TREE_VALUE (exp)))
3803 && (TREE_CHAIN (exp) == 0
3804 || safe_from_p (x, TREE_CHAIN (exp))));
3805 else
3806 return 0;
3808 case '1':
3809 return safe_from_p (x, TREE_OPERAND (exp, 0));
3811 case '2':
3812 case '<':
3813 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3814 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3816 case 'e':
3817 case 'r':
3818 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3819 the expression. If it is set, we conflict iff we are that rtx or
3820 both are in memory. Otherwise, we check all operands of the
3821 expression recursively. */
3823 switch (TREE_CODE (exp))
3825 case ADDR_EXPR:
3826 return (staticp (TREE_OPERAND (exp, 0))
3827 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3829 case INDIRECT_REF:
3830 if (GET_CODE (x) == MEM)
3831 return 0;
3832 break;
3834 case CALL_EXPR:
3835 exp_rtl = CALL_EXPR_RTL (exp);
3836 if (exp_rtl == 0)
3838 /* Assume that the call will clobber all hard registers and
3839 all of memory. */
3840 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3841 || GET_CODE (x) == MEM)
3842 return 0;
3845 break;
3847 case RTL_EXPR:
3848 exp_rtl = RTL_EXPR_RTL (exp);
3849 if (exp_rtl == 0)
3850 /* We don't know what this can modify. */
3851 return 0;
3853 break;
3855 case WITH_CLEANUP_EXPR:
3856 exp_rtl = RTL_EXPR_RTL (exp);
3857 break;
3859 case CLEANUP_POINT_EXPR:
3860 return safe_from_p (x, TREE_OPERAND (exp, 0));
3862 case SAVE_EXPR:
3863 exp_rtl = SAVE_EXPR_RTL (exp);
3864 break;
3866 case BIND_EXPR:
3867 /* The only operand we look at is operand 1. The rest aren't
3868 part of the expression. */
3869 return safe_from_p (x, TREE_OPERAND (exp, 1));
3871 case METHOD_CALL_EXPR:
3872 /* This takes a rtx argument, but shouldn't appear here. */
3873 abort ();
3876 /* If we have an rtx, we do not need to scan our operands. */
3877 if (exp_rtl)
3878 break;
3880 nops = tree_code_length[(int) TREE_CODE (exp)];
3881 for (i = 0; i < nops; i++)
3882 if (TREE_OPERAND (exp, i) != 0
3883 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3884 return 0;
3887 /* If we have an rtl, find any enclosed object. Then see if we conflict
3888 with it. */
3889 if (exp_rtl)
3891 if (GET_CODE (exp_rtl) == SUBREG)
3893 exp_rtl = SUBREG_REG (exp_rtl);
3894 if (GET_CODE (exp_rtl) == REG
3895 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3896 return 0;
3899 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3900 are memory and EXP is not readonly. */
3901 return ! (rtx_equal_p (x, exp_rtl)
3902 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3903 && ! TREE_READONLY (exp)));
3906 /* If we reach here, it is safe. */
3907 return 1;
3910 /* Subroutine of expand_expr: return nonzero iff EXP is an
3911 expression whose type is statically determinable. */
3913 static int
3914 fixed_type_p (exp)
3915 tree exp;
3917 if (TREE_CODE (exp) == PARM_DECL
3918 || TREE_CODE (exp) == VAR_DECL
3919 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3920 || TREE_CODE (exp) == COMPONENT_REF
3921 || TREE_CODE (exp) == ARRAY_REF)
3922 return 1;
3923 return 0;
3926 /* expand_expr: generate code for computing expression EXP.
3927 An rtx for the computed value is returned. The value is never null.
3928 In the case of a void EXP, const0_rtx is returned.
3930 The value may be stored in TARGET if TARGET is nonzero.
3931 TARGET is just a suggestion; callers must assume that
3932 the rtx returned may not be the same as TARGET.
3934 If TARGET is CONST0_RTX, it means that the value will be ignored.
3936 If TMODE is not VOIDmode, it suggests generating the
3937 result in mode TMODE. But this is done only when convenient.
3938 Otherwise, TMODE is ignored and the value generated in its natural mode.
3939 TMODE is just a suggestion; callers must assume that
3940 the rtx returned may not have mode TMODE.
3942 Note that TARGET may have neither TMODE nor MODE. In that case, it
3943 probably will not be used.
3945 If MODIFIER is EXPAND_SUM then when EXP is an addition
3946 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3947 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3948 products as above, or REG or MEM, or constant.
3949 Ordinarily in such cases we would output mul or add instructions
3950 and then return a pseudo reg containing the sum.
3952 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3953 it also marks a label as absolutely required (it can't be dead).
3954 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3955 This is used for outputting expressions used in initializers.
3957 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3958 with a constant address even if that address is not normally legitimate.
3959 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
3962 expand_expr (exp, target, tmode, modifier)
3963 register tree exp;
3964 rtx target;
3965 enum machine_mode tmode;
3966 enum expand_modifier modifier;
3968 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3969 This is static so it will be accessible to our recursive callees. */
3970 static tree placeholder_list = 0;
3971 register rtx op0, op1, temp;
3972 tree type = TREE_TYPE (exp);
3973 int unsignedp = TREE_UNSIGNED (type);
3974 register enum machine_mode mode = TYPE_MODE (type);
3975 register enum tree_code code = TREE_CODE (exp);
3976 optab this_optab;
3977 /* Use subtarget as the target for operand 0 of a binary operation. */
3978 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3979 rtx original_target = target;
3980 /* Maybe defer this until sure not doing bytecode? */
3981 int ignore = (target == const0_rtx
3982 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3983 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3984 || code == COND_EXPR)
3985 && TREE_CODE (type) == VOID_TYPE));
3986 tree context;
3989 if (output_bytecode && modifier != EXPAND_INITIALIZER)
3991 bc_expand_expr (exp);
3992 return NULL;
3995 /* Don't use hard regs as subtargets, because the combiner
3996 can only handle pseudo regs. */
3997 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3998 subtarget = 0;
3999 /* Avoid subtargets inside loops,
4000 since they hide some invariant expressions. */
4001 if (preserve_subexpressions_p ())
4002 subtarget = 0;
4004 /* If we are going to ignore this result, we need only do something
4005 if there is a side-effect somewhere in the expression. If there
4006 is, short-circuit the most common cases here. Note that we must
4007 not call expand_expr with anything but const0_rtx in case this
4008 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4010 if (ignore)
4012 if (! TREE_SIDE_EFFECTS (exp))
4013 return const0_rtx;
4015 /* Ensure we reference a volatile object even if value is ignored. */
4016 if (TREE_THIS_VOLATILE (exp)
4017 && TREE_CODE (exp) != FUNCTION_DECL
4018 && mode != VOIDmode && mode != BLKmode)
4020 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4021 if (GET_CODE (temp) == MEM)
4022 temp = copy_to_reg (temp);
4023 return const0_rtx;
4026 if (TREE_CODE_CLASS (code) == '1')
4027 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4028 VOIDmode, modifier);
4029 else if (TREE_CODE_CLASS (code) == '2'
4030 || TREE_CODE_CLASS (code) == '<')
4032 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4033 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4034 return const0_rtx;
4036 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4037 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4038 /* If the second operand has no side effects, just evaluate
4039 the first. */
4040 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4041 VOIDmode, modifier);
4043 target = 0;
4046 /* If will do cse, generate all results into pseudo registers
4047 since 1) that allows cse to find more things
4048 and 2) otherwise cse could produce an insn the machine
4049 cannot support. */
4051 if (! cse_not_expected && mode != BLKmode && target
4052 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4053 target = subtarget;
4055 switch (code)
4057 case LABEL_DECL:
4059 tree function = decl_function_context (exp);
4060 /* Handle using a label in a containing function. */
4061 if (function != current_function_decl && function != 0)
4063 struct function *p = find_function_data (function);
4064 /* Allocate in the memory associated with the function
4065 that the label is in. */
4066 push_obstacks (p->function_obstack,
4067 p->function_maybepermanent_obstack);
4069 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4070 label_rtx (exp), p->forced_labels);
4071 pop_obstacks ();
4073 else if (modifier == EXPAND_INITIALIZER)
4074 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4075 label_rtx (exp), forced_labels);
4076 temp = gen_rtx (MEM, FUNCTION_MODE,
4077 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4078 if (function != current_function_decl && function != 0)
4079 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4080 return temp;
4083 case PARM_DECL:
4084 if (DECL_RTL (exp) == 0)
4086 error_with_decl (exp, "prior parameter's size depends on `%s'");
4087 return CONST0_RTX (mode);
4090 /* ... fall through ... */
4092 case VAR_DECL:
4093 /* If a static var's type was incomplete when the decl was written,
4094 but the type is complete now, lay out the decl now. */
4095 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4096 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4098 push_obstacks_nochange ();
4099 end_temporary_allocation ();
4100 layout_decl (exp, 0);
4101 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4102 pop_obstacks ();
4105 /* ... fall through ... */
4107 case FUNCTION_DECL:
4108 case RESULT_DECL:
4109 if (DECL_RTL (exp) == 0)
4110 abort ();
4112 /* Ensure variable marked as used even if it doesn't go through
4113 a parser. If it hasn't be used yet, write out an external
4114 definition. */
4115 if (! TREE_USED (exp))
4117 assemble_external (exp);
4118 TREE_USED (exp) = 1;
4121 /* Handle variables inherited from containing functions. */
4122 context = decl_function_context (exp);
4124 /* We treat inline_function_decl as an alias for the current function
4125 because that is the inline function whose vars, types, etc.
4126 are being merged into the current function.
4127 See expand_inline_function. */
4129 if (context != 0 && context != current_function_decl
4130 && context != inline_function_decl
4131 /* If var is static, we don't need a static chain to access it. */
4132 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4133 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4135 rtx addr;
4137 /* Mark as non-local and addressable. */
4138 DECL_NONLOCAL (exp) = 1;
4139 mark_addressable (exp);
4140 if (GET_CODE (DECL_RTL (exp)) != MEM)
4141 abort ();
4142 addr = XEXP (DECL_RTL (exp), 0);
4143 if (GET_CODE (addr) == MEM)
4144 addr = gen_rtx (MEM, Pmode,
4145 fix_lexical_addr (XEXP (addr, 0), exp));
4146 else
4147 addr = fix_lexical_addr (addr, exp);
4148 return change_address (DECL_RTL (exp), mode, addr);
4151 /* This is the case of an array whose size is to be determined
4152 from its initializer, while the initializer is still being parsed.
4153 See expand_decl. */
4155 if (GET_CODE (DECL_RTL (exp)) == MEM
4156 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4157 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4158 XEXP (DECL_RTL (exp), 0));
4160 /* If DECL_RTL is memory, we are in the normal case and either
4161 the address is not valid or it is not a register and -fforce-addr
4162 is specified, get the address into a register. */
4164 if (GET_CODE (DECL_RTL (exp)) == MEM
4165 && modifier != EXPAND_CONST_ADDRESS
4166 && modifier != EXPAND_SUM
4167 && modifier != EXPAND_INITIALIZER
4168 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
4169 || (flag_force_addr
4170 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4171 return change_address (DECL_RTL (exp), VOIDmode,
4172 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4174 /* If the mode of DECL_RTL does not match that of the decl, it
4175 must be a promoted value. We return a SUBREG of the wanted mode,
4176 but mark it so that we know that it was already extended. */
4178 if (GET_CODE (DECL_RTL (exp)) == REG
4179 && GET_MODE (DECL_RTL (exp)) != mode)
4181 /* Get the signedness used for this variable. Ensure we get the
4182 same mode we got when the variable was declared. */
4183 if (GET_MODE (DECL_RTL (exp))
4184 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4185 abort ();
4187 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4188 SUBREG_PROMOTED_VAR_P (temp) = 1;
4189 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4190 return temp;
4193 return DECL_RTL (exp);
4195 case INTEGER_CST:
4196 return immed_double_const (TREE_INT_CST_LOW (exp),
4197 TREE_INT_CST_HIGH (exp),
4198 mode);
4200 case CONST_DECL:
4201 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4203 case REAL_CST:
4204 /* If optimized, generate immediate CONST_DOUBLE
4205 which will be turned into memory by reload if necessary.
4207 We used to force a register so that loop.c could see it. But
4208 this does not allow gen_* patterns to perform optimizations with
4209 the constants. It also produces two insns in cases like "x = 1.0;".
4210 On most machines, floating-point constants are not permitted in
4211 many insns, so we'd end up copying it to a register in any case.
4213 Now, we do the copying in expand_binop, if appropriate. */
4214 return immed_real_const (exp);
4216 case COMPLEX_CST:
4217 case STRING_CST:
4218 if (! TREE_CST_RTL (exp))
4219 output_constant_def (exp);
4221 /* TREE_CST_RTL probably contains a constant address.
4222 On RISC machines where a constant address isn't valid,
4223 make some insns to get that address into a register. */
4224 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4225 && modifier != EXPAND_CONST_ADDRESS
4226 && modifier != EXPAND_INITIALIZER
4227 && modifier != EXPAND_SUM
4228 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4229 || (flag_force_addr
4230 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4231 return change_address (TREE_CST_RTL (exp), VOIDmode,
4232 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4233 return TREE_CST_RTL (exp);
4235 case SAVE_EXPR:
4236 context = decl_function_context (exp);
4238 /* We treat inline_function_decl as an alias for the current function
4239 because that is the inline function whose vars, types, etc.
4240 are being merged into the current function.
4241 See expand_inline_function. */
4242 if (context == current_function_decl || context == inline_function_decl)
4243 context = 0;
4245 /* If this is non-local, handle it. */
4246 if (context)
4248 temp = SAVE_EXPR_RTL (exp);
4249 if (temp && GET_CODE (temp) == REG)
4251 put_var_into_stack (exp);
4252 temp = SAVE_EXPR_RTL (exp);
4254 if (temp == 0 || GET_CODE (temp) != MEM)
4255 abort ();
4256 return change_address (temp, mode,
4257 fix_lexical_addr (XEXP (temp, 0), exp));
4259 if (SAVE_EXPR_RTL (exp) == 0)
4261 if (mode == BLKmode)
4263 temp
4264 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4265 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4267 else
4268 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4270 SAVE_EXPR_RTL (exp) = temp;
4271 if (!optimize && GET_CODE (temp) == REG)
4272 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4273 save_expr_regs);
4275 /* If the mode of TEMP does not match that of the expression, it
4276 must be a promoted value. We pass store_expr a SUBREG of the
4277 wanted mode but mark it so that we know that it was already
4278 extended. Note that `unsignedp' was modified above in
4279 this case. */
4281 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4283 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4284 SUBREG_PROMOTED_VAR_P (temp) = 1;
4285 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4288 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4291 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4292 must be a promoted value. We return a SUBREG of the wanted mode,
4293 but mark it so that we know that it was already extended. */
4295 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4296 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4298 /* Compute the signedness and make the proper SUBREG. */
4299 promote_mode (type, mode, &unsignedp, 0);
4300 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4301 SUBREG_PROMOTED_VAR_P (temp) = 1;
4302 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4303 return temp;
4306 return SAVE_EXPR_RTL (exp);
4308 case PLACEHOLDER_EXPR:
4309 /* If there is an object on the head of the placeholder list,
4310 see if some object in it's references is of type TYPE. For
4311 further information, see tree.def. */
4312 if (placeholder_list)
4314 tree object;
4315 tree old_list = placeholder_list;
4317 for (object = TREE_PURPOSE (placeholder_list);
4318 TREE_TYPE (object) != type
4319 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4320 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4321 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4322 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4323 object = TREE_OPERAND (object, 0))
4326 if (object && TREE_TYPE (object) == type)
4328 /* Expand this object skipping the list entries before
4329 it was found in case it is also a PLACEHOLDER_EXPR.
4330 In that case, we want to translate it using subsequent
4331 entries. */
4332 placeholder_list = TREE_CHAIN (placeholder_list);
4333 temp = expand_expr (object, original_target, tmode, modifier);
4334 placeholder_list = old_list;
4335 return temp;
4339 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4340 abort ();
4342 case WITH_RECORD_EXPR:
4343 /* Put the object on the placeholder list, expand our first operand,
4344 and pop the list. */
4345 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4346 placeholder_list);
4347 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4348 tmode, modifier);
4349 placeholder_list = TREE_CHAIN (placeholder_list);
4350 return target;
4352 case EXIT_EXPR:
4353 expand_exit_loop_if_false (NULL_PTR,
4354 invert_truthvalue (TREE_OPERAND (exp, 0)));
4355 return const0_rtx;
4357 case LOOP_EXPR:
4358 push_temp_slots ();
4359 expand_start_loop (1);
4360 expand_expr_stmt (TREE_OPERAND (exp, 0));
4361 expand_end_loop ();
4362 pop_temp_slots ();
4364 return const0_rtx;
4366 case BIND_EXPR:
4368 tree vars = TREE_OPERAND (exp, 0);
4369 int vars_need_expansion = 0;
4371 /* Need to open a binding contour here because
4372 if there are any cleanups they most be contained here. */
4373 expand_start_bindings (0);
4375 /* Mark the corresponding BLOCK for output in its proper place. */
4376 if (TREE_OPERAND (exp, 2) != 0
4377 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4378 insert_block (TREE_OPERAND (exp, 2));
4380 /* If VARS have not yet been expanded, expand them now. */
4381 while (vars)
4383 if (DECL_RTL (vars) == 0)
4385 vars_need_expansion = 1;
4386 expand_decl (vars);
4388 expand_decl_init (vars);
4389 vars = TREE_CHAIN (vars);
4392 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4394 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4396 return temp;
4399 case RTL_EXPR:
4400 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4401 abort ();
4402 emit_insns (RTL_EXPR_SEQUENCE (exp));
4403 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4404 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4405 free_temps_for_rtl_expr (exp);
4406 return RTL_EXPR_RTL (exp);
4408 case CONSTRUCTOR:
4409 /* If we don't need the result, just ensure we evaluate any
4410 subexpressions. */
4411 if (ignore)
4413 tree elt;
4414 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4415 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4416 return const0_rtx;
4419 /* All elts simple constants => refer to a constant in memory. But
4420 if this is a non-BLKmode mode, let it store a field at a time
4421 since that should make a CONST_INT or CONST_DOUBLE when we
4422 fold. Likewise, if we have a target we can use, it is best to
4423 store directly into the target unless the type is large enough
4424 that memcpy will be used. If we are making an initializer and
4425 all operands are constant, put it in memory as well. */
4426 else if ((TREE_STATIC (exp)
4427 && ((mode == BLKmode
4428 && ! (target != 0 && safe_from_p (target, exp)))
4429 || TREE_ADDRESSABLE (exp)
4430 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4431 && (move_by_pieces_ninsns
4432 (TREE_INT_CST_LOW (TYPE_SIZE (type)),
4433 TYPE_ALIGN (type))
4434 > MOVE_RATIO))))
4435 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4437 rtx constructor = output_constant_def (exp);
4438 if (modifier != EXPAND_CONST_ADDRESS
4439 && modifier != EXPAND_INITIALIZER
4440 && modifier != EXPAND_SUM
4441 && (! memory_address_p (GET_MODE (constructor),
4442 XEXP (constructor, 0))
4443 || (flag_force_addr
4444 && GET_CODE (XEXP (constructor, 0)) != REG)))
4445 constructor = change_address (constructor, VOIDmode,
4446 XEXP (constructor, 0));
4447 return constructor;
4450 else
4452 if (target == 0 || ! safe_from_p (target, exp))
4454 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4455 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4456 else
4458 target
4459 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4460 if (AGGREGATE_TYPE_P (type))
4461 MEM_IN_STRUCT_P (target) = 1;
4464 store_constructor (exp, target);
4465 return target;
4468 case INDIRECT_REF:
4470 tree exp1 = TREE_OPERAND (exp, 0);
4471 tree exp2;
4473 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4474 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4475 This code has the same general effect as simply doing
4476 expand_expr on the save expr, except that the expression PTR
4477 is computed for use as a memory address. This means different
4478 code, suitable for indexing, may be generated. */
4479 if (TREE_CODE (exp1) == SAVE_EXPR
4480 && SAVE_EXPR_RTL (exp1) == 0
4481 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4482 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4483 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4485 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4486 VOIDmode, EXPAND_SUM);
4487 op0 = memory_address (mode, temp);
4488 op0 = copy_all_regs (op0);
4489 SAVE_EXPR_RTL (exp1) = op0;
4491 else
4493 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4494 op0 = memory_address (mode, op0);
4497 temp = gen_rtx (MEM, mode, op0);
4498 /* If address was computed by addition,
4499 mark this as an element of an aggregate. */
4500 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4501 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4502 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4503 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4504 || (TREE_CODE (exp1) == ADDR_EXPR
4505 && (exp2 = TREE_OPERAND (exp1, 0))
4506 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4507 MEM_IN_STRUCT_P (temp) = 1;
4508 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4509 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4510 a location is accessed through a pointer to const does not mean
4511 that the value there can never change. */
4512 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4513 #endif
4514 return temp;
4517 case ARRAY_REF:
4518 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4519 abort ();
4522 tree array = TREE_OPERAND (exp, 0);
4523 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4524 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4525 tree index = TREE_OPERAND (exp, 1);
4526 tree index_type = TREE_TYPE (index);
4527 int i;
4529 if (TREE_CODE (low_bound) != INTEGER_CST
4530 && contains_placeholder_p (low_bound))
4531 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4533 /* Optimize the special-case of a zero lower bound.
4535 We convert the low_bound to sizetype to avoid some problems
4536 with constant folding. (E.g. suppose the lower bound is 1,
4537 and its mode is QI. Without the conversion, (ARRAY
4538 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4539 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4541 But sizetype isn't quite right either (especially if
4542 the lowbound is negative). FIXME */
4544 if (! integer_zerop (low_bound))
4545 index = fold (build (MINUS_EXPR, index_type, index,
4546 convert (sizetype, low_bound)));
4548 if ((TREE_CODE (index) != INTEGER_CST
4549 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4550 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
4552 /* Nonconstant array index or nonconstant element size, and
4553 not an array in an unaligned (packed) structure field.
4554 Generate the tree for *(&array+index) and expand that,
4555 except do it in a language-independent way
4556 and don't complain about non-lvalue arrays.
4557 `mark_addressable' should already have been called
4558 for any array for which this case will be reached. */
4560 /* Don't forget the const or volatile flag from the array
4561 element. */
4562 tree variant_type = build_type_variant (type,
4563 TREE_READONLY (exp),
4564 TREE_THIS_VOLATILE (exp));
4565 tree array_adr = build1 (ADDR_EXPR,
4566 build_pointer_type (variant_type), array);
4567 tree elt;
4568 tree size = size_in_bytes (type);
4570 /* Convert the integer argument to a type the same size as a
4571 pointer so the multiply won't overflow spuriously. */
4572 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4573 index = convert (type_for_size (POINTER_SIZE, 0), index);
4575 if (TREE_CODE (size) != INTEGER_CST
4576 && contains_placeholder_p (size))
4577 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4579 /* Don't think the address has side effects
4580 just because the array does.
4581 (In some cases the address might have side effects,
4582 and we fail to record that fact here. However, it should not
4583 matter, since expand_expr should not care.) */
4584 TREE_SIDE_EFFECTS (array_adr) = 0;
4586 elt = build1 (INDIRECT_REF, type,
4587 fold (build (PLUS_EXPR,
4588 TYPE_POINTER_TO (variant_type),
4589 array_adr,
4590 fold (build (MULT_EXPR,
4591 TYPE_POINTER_TO (variant_type),
4592 index, size)))));
4594 /* Volatility, etc., of new expression is same as old
4595 expression. */
4596 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4597 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4598 TREE_READONLY (elt) = TREE_READONLY (exp);
4600 return expand_expr (elt, target, tmode, modifier);
4603 /* Fold an expression like: "foo"[2].
4604 This is not done in fold so it won't happen inside &.
4605 Don't fold if this is for wide characters since it's too
4606 difficult to do correctly and this is a very rare case. */
4608 if (TREE_CODE (array) == STRING_CST
4609 && TREE_CODE (index) == INTEGER_CST
4610 && !TREE_INT_CST_HIGH (index)
4611 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4612 && GET_MODE_CLASS (mode) == MODE_INT
4613 && GET_MODE_SIZE (mode) == 1)
4614 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4616 /* If this is a constant index into a constant array,
4617 just get the value from the array. Handle both the cases when
4618 we have an explicit constructor and when our operand is a variable
4619 that was declared const. */
4621 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4623 if (TREE_CODE (index) == INTEGER_CST
4624 && TREE_INT_CST_HIGH (index) == 0)
4626 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4628 i = TREE_INT_CST_LOW (index);
4629 while (elem && i--)
4630 elem = TREE_CHAIN (elem);
4631 if (elem)
4632 return expand_expr (fold (TREE_VALUE (elem)), target,
4633 tmode, modifier);
4637 else if (optimize >= 1
4638 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4639 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4640 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4642 if (TREE_CODE (index) == INTEGER_CST
4643 && TREE_INT_CST_HIGH (index) == 0)
4645 tree init = DECL_INITIAL (array);
4647 i = TREE_INT_CST_LOW (index);
4648 if (TREE_CODE (init) == CONSTRUCTOR)
4650 tree elem = CONSTRUCTOR_ELTS (init);
4652 while (elem
4653 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4654 elem = TREE_CHAIN (elem);
4655 if (elem)
4656 return expand_expr (fold (TREE_VALUE (elem)), target,
4657 tmode, modifier);
4659 else if (TREE_CODE (init) == STRING_CST
4660 && i < TREE_STRING_LENGTH (init))
4661 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4666 /* Treat array-ref with constant index as a component-ref. */
4668 case COMPONENT_REF:
4669 case BIT_FIELD_REF:
4670 /* If the operand is a CONSTRUCTOR, we can just extract the
4671 appropriate field if it is present. Don't do this if we have
4672 already written the data since we want to refer to that copy
4673 and varasm.c assumes that's what we'll do. */
4674 if (code != ARRAY_REF
4675 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4676 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4678 tree elt;
4680 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4681 elt = TREE_CHAIN (elt))
4682 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4683 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4687 enum machine_mode mode1;
4688 int bitsize;
4689 int bitpos;
4690 tree offset;
4691 int volatilep = 0;
4692 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4693 &mode1, &unsignedp, &volatilep);
4694 int alignment;
4696 /* If we got back the original object, something is wrong. Perhaps
4697 we are evaluating an expression too early. In any event, don't
4698 infinitely recurse. */
4699 if (tem == exp)
4700 abort ();
4702 /* In some cases, we will be offsetting OP0's address by a constant.
4703 So get it as a sum, if possible. If we will be using it
4704 directly in an insn, we validate it. */
4705 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4707 /* If this is a constant, put it into a register if it is a
4708 legitimate constant and memory if it isn't. */
4709 if (CONSTANT_P (op0))
4711 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4712 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4713 op0 = force_reg (mode, op0);
4714 else
4715 op0 = validize_mem (force_const_mem (mode, op0));
4718 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4719 if (offset != 0)
4721 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4723 if (GET_CODE (op0) != MEM)
4724 abort ();
4725 op0 = change_address (op0, VOIDmode,
4726 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4727 force_reg (Pmode, offset_rtx)));
4728 /* If we have a variable offset, the known alignment
4729 is only that of the innermost structure containing the field.
4730 (Actually, we could sometimes do better by using the
4731 size of an element of the innermost array, but no need.) */
4732 if (TREE_CODE (exp) == COMPONENT_REF
4733 || TREE_CODE (exp) == BIT_FIELD_REF)
4734 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4735 / BITS_PER_UNIT);
4738 /* Don't forget about volatility even if this is a bitfield. */
4739 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4741 op0 = copy_rtx (op0);
4742 MEM_VOLATILE_P (op0) = 1;
4745 /* In cases where an aligned union has an unaligned object
4746 as a field, we might be extracting a BLKmode value from
4747 an integer-mode (e.g., SImode) object. Handle this case
4748 by doing the extract into an object as wide as the field
4749 (which we know to be the width of a basic mode), then
4750 storing into memory, and changing the mode to BLKmode. */
4751 if (mode1 == VOIDmode
4752 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4753 && modifier != EXPAND_CONST_ADDRESS
4754 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4755 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4756 /* If the field isn't aligned enough to fetch as a memref,
4757 fetch it as a bit field. */
4758 || (SLOW_UNALIGNED_ACCESS
4759 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4760 || (SLOW_UNALIGNED_ACCESS
4761 && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4763 enum machine_mode ext_mode = mode;
4765 if (ext_mode == BLKmode)
4766 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4768 if (ext_mode == BLKmode)
4769 abort ();
4771 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4772 unsignedp, target, ext_mode, ext_mode,
4773 alignment,
4774 int_size_in_bytes (TREE_TYPE (tem)));
4775 if (mode == BLKmode)
4777 rtx new = assign_stack_temp (ext_mode,
4778 bitsize / BITS_PER_UNIT, 0);
4780 emit_move_insn (new, op0);
4781 op0 = copy_rtx (new);
4782 PUT_MODE (op0, BLKmode);
4783 MEM_IN_STRUCT_P (op0) = 1;
4786 return op0;
4789 /* Get a reference to just this component. */
4790 if (modifier == EXPAND_CONST_ADDRESS
4791 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4792 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4793 (bitpos / BITS_PER_UNIT)));
4794 else
4795 op0 = change_address (op0, mode1,
4796 plus_constant (XEXP (op0, 0),
4797 (bitpos / BITS_PER_UNIT)));
4798 MEM_IN_STRUCT_P (op0) = 1;
4799 MEM_VOLATILE_P (op0) |= volatilep;
4800 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4801 return op0;
4802 if (target == 0)
4803 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4804 convert_move (target, op0, unsignedp);
4805 return target;
4808 case OFFSET_REF:
4810 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4811 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4812 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4813 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4814 MEM_IN_STRUCT_P (temp) = 1;
4815 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4816 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4817 a location is accessed through a pointer to const does not mean
4818 that the value there can never change. */
4819 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4820 #endif
4821 return temp;
4824 /* Intended for a reference to a buffer of a file-object in Pascal.
4825 But it's not certain that a special tree code will really be
4826 necessary for these. INDIRECT_REF might work for them. */
4827 case BUFFER_REF:
4828 abort ();
4830 case IN_EXPR:
4832 /* Pascal set IN expression.
4834 Algorithm:
4835 rlo = set_low - (set_low%bits_per_word);
4836 the_word = set [ (index - rlo)/bits_per_word ];
4837 bit_index = index % bits_per_word;
4838 bitmask = 1 << bit_index;
4839 return !!(the_word & bitmask); */
4841 tree set = TREE_OPERAND (exp, 0);
4842 tree index = TREE_OPERAND (exp, 1);
4843 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4844 tree set_type = TREE_TYPE (set);
4845 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4846 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4847 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4848 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4849 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4850 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4851 rtx setaddr = XEXP (setval, 0);
4852 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4853 rtx rlow;
4854 rtx diff, quo, rem, addr, bit, result;
4856 preexpand_calls (exp);
4858 /* If domain is empty, answer is no. Likewise if index is constant
4859 and out of bounds. */
4860 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4861 && TREE_CODE (set_low_bound) == INTEGER_CST
4862 && tree_int_cst_lt (set_high_bound, set_low_bound)
4863 || (TREE_CODE (index) == INTEGER_CST
4864 && TREE_CODE (set_low_bound) == INTEGER_CST
4865 && tree_int_cst_lt (index, set_low_bound))
4866 || (TREE_CODE (set_high_bound) == INTEGER_CST
4867 && TREE_CODE (index) == INTEGER_CST
4868 && tree_int_cst_lt (set_high_bound, index))))
4869 return const0_rtx;
4871 if (target == 0)
4872 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4874 /* If we get here, we have to generate the code for both cases
4875 (in range and out of range). */
4877 op0 = gen_label_rtx ();
4878 op1 = gen_label_rtx ();
4880 if (! (GET_CODE (index_val) == CONST_INT
4881 && GET_CODE (lo_r) == CONST_INT))
4883 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4884 GET_MODE (index_val), iunsignedp, 0);
4885 emit_jump_insn (gen_blt (op1));
4888 if (! (GET_CODE (index_val) == CONST_INT
4889 && GET_CODE (hi_r) == CONST_INT))
4891 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4892 GET_MODE (index_val), iunsignedp, 0);
4893 emit_jump_insn (gen_bgt (op1));
4896 /* Calculate the element number of bit zero in the first word
4897 of the set. */
4898 if (GET_CODE (lo_r) == CONST_INT)
4899 rlow = GEN_INT (INTVAL (lo_r)
4900 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4901 else
4902 rlow = expand_binop (index_mode, and_optab, lo_r,
4903 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4904 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4906 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4907 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4909 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4910 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4911 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4912 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4914 addr = memory_address (byte_mode,
4915 expand_binop (index_mode, add_optab, diff,
4916 setaddr, NULL_RTX, iunsignedp,
4917 OPTAB_LIB_WIDEN));
4919 /* Extract the bit we want to examine */
4920 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4921 gen_rtx (MEM, byte_mode, addr),
4922 make_tree (TREE_TYPE (index), rem),
4923 NULL_RTX, 1);
4924 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4925 GET_MODE (target) == byte_mode ? target : 0,
4926 1, OPTAB_LIB_WIDEN);
4928 if (result != target)
4929 convert_move (target, result, 1);
4931 /* Output the code to handle the out-of-range case. */
4932 emit_jump (op0);
4933 emit_label (op1);
4934 emit_move_insn (target, const0_rtx);
4935 emit_label (op0);
4936 return target;
4939 case WITH_CLEANUP_EXPR:
4940 if (RTL_EXPR_RTL (exp) == 0)
4942 RTL_EXPR_RTL (exp)
4943 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4944 cleanups_this_call
4945 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4946 /* That's it for this cleanup. */
4947 TREE_OPERAND (exp, 2) = 0;
4948 (*interim_eh_hook) (NULL_TREE);
4950 return RTL_EXPR_RTL (exp);
4952 case CLEANUP_POINT_EXPR:
4954 extern int temp_slot_level;
4955 tree old_cleanups = cleanups_this_call;
4956 int old_temp_level = target_temp_slot_level;
4957 push_temp_slots ();
4958 target_temp_slot_level = temp_slot_level;
4959 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4960 expand_cleanups_to (old_cleanups);
4961 preserve_temp_slots (op0);
4962 free_temp_slots ();
4963 pop_temp_slots ();
4964 target_temp_slot_level = old_temp_level;
4966 return op0;
4968 case CALL_EXPR:
4969 /* Check for a built-in function. */
4970 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4971 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4972 == FUNCTION_DECL)
4973 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4974 return expand_builtin (exp, target, subtarget, tmode, ignore);
4976 /* If this call was expanded already by preexpand_calls,
4977 just return the result we got. */
4978 if (CALL_EXPR_RTL (exp) != 0)
4979 return CALL_EXPR_RTL (exp);
4981 return expand_call (exp, target, ignore);
4983 case NON_LVALUE_EXPR:
4984 case NOP_EXPR:
4985 case CONVERT_EXPR:
4986 case REFERENCE_EXPR:
4987 if (TREE_CODE (type) == UNION_TYPE)
4989 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4990 if (target == 0)
4992 if (mode == BLKmode)
4994 if (TYPE_SIZE (type) == 0
4995 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4996 abort ();
4997 target = assign_stack_temp (BLKmode,
4998 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4999 + BITS_PER_UNIT - 1)
5000 / BITS_PER_UNIT, 0);
5001 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
5003 else
5004 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5007 if (GET_CODE (target) == MEM)
5008 /* Store data into beginning of memory target. */
5009 store_expr (TREE_OPERAND (exp, 0),
5010 change_address (target, TYPE_MODE (valtype), 0), 0);
5012 else if (GET_CODE (target) == REG)
5013 /* Store this field into a union of the proper type. */
5014 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5015 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5016 VOIDmode, 0, 1,
5017 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5018 else
5019 abort ();
5021 /* Return the entire union. */
5022 return target;
5025 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5027 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5028 modifier);
5030 /* If the signedness of the conversion differs and OP0 is
5031 a promoted SUBREG, clear that indication since we now
5032 have to do the proper extension. */
5033 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5034 && GET_CODE (op0) == SUBREG)
5035 SUBREG_PROMOTED_VAR_P (op0) = 0;
5037 return op0;
5040 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5041 if (GET_MODE (op0) == mode)
5042 return op0;
5044 /* If OP0 is a constant, just convert it into the proper mode. */
5045 if (CONSTANT_P (op0))
5046 return
5047 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5048 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5050 if (modifier == EXPAND_INITIALIZER)
5051 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5053 if (flag_force_mem && GET_CODE (op0) == MEM)
5054 op0 = copy_to_reg (op0);
5056 if (target == 0)
5057 return
5058 convert_to_mode (mode, op0,
5059 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5060 else
5061 convert_move (target, op0,
5062 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5063 return target;
5065 case PLUS_EXPR:
5066 /* We come here from MINUS_EXPR when the second operand is a constant. */
5067 plus_expr:
5068 this_optab = add_optab;
5070 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5071 something else, make sure we add the register to the constant and
5072 then to the other thing. This case can occur during strength
5073 reduction and doing it this way will produce better code if the
5074 frame pointer or argument pointer is eliminated.
5076 fold-const.c will ensure that the constant is always in the inner
5077 PLUS_EXPR, so the only case we need to do anything about is if
5078 sp, ap, or fp is our second argument, in which case we must swap
5079 the innermost first argument and our second argument. */
5081 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5082 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5083 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5084 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5085 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5086 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5088 tree t = TREE_OPERAND (exp, 1);
5090 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5091 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5094 /* If the result is to be Pmode and we are adding an integer to
5095 something, we might be forming a constant. So try to use
5096 plus_constant. If it produces a sum and we can't accept it,
5097 use force_operand. This allows P = &ARR[const] to generate
5098 efficient code on machines where a SYMBOL_REF is not a valid
5099 address.
5101 If this is an EXPAND_SUM call, always return the sum. */
5102 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5103 || mode == Pmode)
5105 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5106 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5107 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5109 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5110 EXPAND_SUM);
5111 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5112 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5113 op1 = force_operand (op1, target);
5114 return op1;
5117 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5118 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5119 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5121 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5122 EXPAND_SUM);
5123 if (! CONSTANT_P (op0))
5125 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5126 VOIDmode, modifier);
5127 /* Don't go to both_summands if modifier
5128 says it's not right to return a PLUS. */
5129 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5130 goto binop2;
5131 goto both_summands;
5133 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5134 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5135 op0 = force_operand (op0, target);
5136 return op0;
5140 /* No sense saving up arithmetic to be done
5141 if it's all in the wrong mode to form part of an address.
5142 And force_operand won't know whether to sign-extend or
5143 zero-extend. */
5144 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5145 || mode != Pmode)
5146 goto binop;
5148 preexpand_calls (exp);
5149 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5150 subtarget = 0;
5152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5153 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5155 both_summands:
5156 /* Make sure any term that's a sum with a constant comes last. */
5157 if (GET_CODE (op0) == PLUS
5158 && CONSTANT_P (XEXP (op0, 1)))
5160 temp = op0;
5161 op0 = op1;
5162 op1 = temp;
5164 /* If adding to a sum including a constant,
5165 associate it to put the constant outside. */
5166 if (GET_CODE (op1) == PLUS
5167 && CONSTANT_P (XEXP (op1, 1)))
5169 rtx constant_term = const0_rtx;
5171 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5172 if (temp != 0)
5173 op0 = temp;
5174 /* Ensure that MULT comes first if there is one. */
5175 else if (GET_CODE (op0) == MULT)
5176 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5177 else
5178 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5180 /* Let's also eliminate constants from op0 if possible. */
5181 op0 = eliminate_constant_term (op0, &constant_term);
5183 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5184 their sum should be a constant. Form it into OP1, since the
5185 result we want will then be OP0 + OP1. */
5187 temp = simplify_binary_operation (PLUS, mode, constant_term,
5188 XEXP (op1, 1));
5189 if (temp != 0)
5190 op1 = temp;
5191 else
5192 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5195 /* Put a constant term last and put a multiplication first. */
5196 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5197 temp = op1, op1 = op0, op0 = temp;
5199 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5200 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5202 case MINUS_EXPR:
5203 /* For initializers, we are allowed to return a MINUS of two
5204 symbolic constants. Here we handle all cases when both operands
5205 are constant. */
5206 /* Handle difference of two symbolic constants,
5207 for the sake of an initializer. */
5208 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5209 && really_constant_p (TREE_OPERAND (exp, 0))
5210 && really_constant_p (TREE_OPERAND (exp, 1)))
5212 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5213 VOIDmode, modifier);
5214 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5215 VOIDmode, modifier);
5217 /* If the last operand is a CONST_INT, use plus_constant of
5218 the negated constant. Else make the MINUS. */
5219 if (GET_CODE (op1) == CONST_INT)
5220 return plus_constant (op0, - INTVAL (op1));
5221 else
5222 return gen_rtx (MINUS, mode, op0, op1);
5224 /* Convert A - const to A + (-const). */
5225 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5227 tree negated = fold (build1 (NEGATE_EXPR, type,
5228 TREE_OPERAND (exp, 1)));
5230 /* Deal with the case where we can't negate the constant
5231 in TYPE. */
5232 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5234 tree newtype = signed_type (type);
5235 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5236 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5237 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5239 if (! TREE_OVERFLOW (newneg))
5240 return expand_expr (convert (type,
5241 build (PLUS_EXPR, newtype,
5242 newop0, newneg)),
5243 target, tmode, modifier);
5245 else
5247 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5248 goto plus_expr;
5251 this_optab = sub_optab;
5252 goto binop;
5254 case MULT_EXPR:
5255 preexpand_calls (exp);
5256 /* If first operand is constant, swap them.
5257 Thus the following special case checks need only
5258 check the second operand. */
5259 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5261 register tree t1 = TREE_OPERAND (exp, 0);
5262 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5263 TREE_OPERAND (exp, 1) = t1;
5266 /* Attempt to return something suitable for generating an
5267 indexed address, for machines that support that. */
5269 if (modifier == EXPAND_SUM && mode == Pmode
5270 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5271 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5273 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5275 /* Apply distributive law if OP0 is x+c. */
5276 if (GET_CODE (op0) == PLUS
5277 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5278 return gen_rtx (PLUS, mode,
5279 gen_rtx (MULT, mode, XEXP (op0, 0),
5280 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5281 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5282 * INTVAL (XEXP (op0, 1))));
5284 if (GET_CODE (op0) != REG)
5285 op0 = force_operand (op0, NULL_RTX);
5286 if (GET_CODE (op0) != REG)
5287 op0 = copy_to_mode_reg (mode, op0);
5289 return gen_rtx (MULT, mode, op0,
5290 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5293 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5294 subtarget = 0;
5296 /* Check for multiplying things that have been extended
5297 from a narrower type. If this machine supports multiplying
5298 in that narrower type with a result in the desired type,
5299 do it that way, and avoid the explicit type-conversion. */
5300 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5301 && TREE_CODE (type) == INTEGER_TYPE
5302 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5303 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5304 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5305 && int_fits_type_p (TREE_OPERAND (exp, 1),
5306 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5307 /* Don't use a widening multiply if a shift will do. */
5308 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5309 > HOST_BITS_PER_WIDE_INT)
5310 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5312 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5313 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5315 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5316 /* If both operands are extended, they must either both
5317 be zero-extended or both be sign-extended. */
5318 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5320 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5322 enum machine_mode innermode
5323 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5324 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5325 ? umul_widen_optab : smul_widen_optab);
5326 if (mode == GET_MODE_WIDER_MODE (innermode)
5327 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5329 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5330 NULL_RTX, VOIDmode, 0);
5331 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5332 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5333 VOIDmode, 0);
5334 else
5335 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5336 NULL_RTX, VOIDmode, 0);
5337 goto binop2;
5340 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5341 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5342 return expand_mult (mode, op0, op1, target, unsignedp);
5344 case TRUNC_DIV_EXPR:
5345 case FLOOR_DIV_EXPR:
5346 case CEIL_DIV_EXPR:
5347 case ROUND_DIV_EXPR:
5348 case EXACT_DIV_EXPR:
5349 preexpand_calls (exp);
5350 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5351 subtarget = 0;
5352 /* Possible optimization: compute the dividend with EXPAND_SUM
5353 then if the divisor is constant can optimize the case
5354 where some terms of the dividend have coeffs divisible by it. */
5355 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5356 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5357 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5359 case RDIV_EXPR:
5360 this_optab = flodiv_optab;
5361 goto binop;
5363 case TRUNC_MOD_EXPR:
5364 case FLOOR_MOD_EXPR:
5365 case CEIL_MOD_EXPR:
5366 case ROUND_MOD_EXPR:
5367 preexpand_calls (exp);
5368 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5369 subtarget = 0;
5370 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5371 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5372 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5374 case FIX_ROUND_EXPR:
5375 case FIX_FLOOR_EXPR:
5376 case FIX_CEIL_EXPR:
5377 abort (); /* Not used for C. */
5379 case FIX_TRUNC_EXPR:
5380 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5381 if (target == 0)
5382 target = gen_reg_rtx (mode);
5383 expand_fix (target, op0, unsignedp);
5384 return target;
5386 case FLOAT_EXPR:
5387 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5388 if (target == 0)
5389 target = gen_reg_rtx (mode);
5390 /* expand_float can't figure out what to do if FROM has VOIDmode.
5391 So give it the correct mode. With -O, cse will optimize this. */
5392 if (GET_MODE (op0) == VOIDmode)
5393 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5394 op0);
5395 expand_float (target, op0,
5396 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5397 return target;
5399 case NEGATE_EXPR:
5400 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5401 temp = expand_unop (mode, neg_optab, op0, target, 0);
5402 if (temp == 0)
5403 abort ();
5404 return temp;
5406 case ABS_EXPR:
5407 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5409 /* Handle complex values specially. */
5410 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5411 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5412 return expand_complex_abs (mode, op0, target, unsignedp);
5414 /* Unsigned abs is simply the operand. Testing here means we don't
5415 risk generating incorrect code below. */
5416 if (TREE_UNSIGNED (type))
5417 return op0;
5419 return expand_abs (mode, op0, target, unsignedp,
5420 safe_from_p (target, TREE_OPERAND (exp, 0)));
5422 case MAX_EXPR:
5423 case MIN_EXPR:
5424 target = original_target;
5425 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5426 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5427 || GET_MODE (target) != mode
5428 || (GET_CODE (target) == REG
5429 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5430 target = gen_reg_rtx (mode);
5431 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5432 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5434 /* First try to do it with a special MIN or MAX instruction.
5435 If that does not win, use a conditional jump to select the proper
5436 value. */
5437 this_optab = (TREE_UNSIGNED (type)
5438 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5439 : (code == MIN_EXPR ? smin_optab : smax_optab));
5441 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5442 OPTAB_WIDEN);
5443 if (temp != 0)
5444 return temp;
5446 /* At this point, a MEM target is no longer useful; we will get better
5447 code without it. */
5449 if (GET_CODE (target) == MEM)
5450 target = gen_reg_rtx (mode);
5452 if (target != op0)
5453 emit_move_insn (target, op0);
5455 op0 = gen_label_rtx ();
5457 /* If this mode is an integer too wide to compare properly,
5458 compare word by word. Rely on cse to optimize constant cases. */
5459 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5461 if (code == MAX_EXPR)
5462 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5463 target, op1, NULL_RTX, op0);
5464 else
5465 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5466 op1, target, NULL_RTX, op0);
5467 emit_move_insn (target, op1);
5469 else
5471 if (code == MAX_EXPR)
5472 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5473 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5474 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5475 else
5476 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5477 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5478 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5479 if (temp == const0_rtx)
5480 emit_move_insn (target, op1);
5481 else if (temp != const_true_rtx)
5483 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5484 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5485 else
5486 abort ();
5487 emit_move_insn (target, op1);
5490 emit_label (op0);
5491 return target;
5493 case BIT_NOT_EXPR:
5494 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5495 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5496 if (temp == 0)
5497 abort ();
5498 return temp;
5500 case FFS_EXPR:
5501 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5502 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5503 if (temp == 0)
5504 abort ();
5505 return temp;
5507 /* ??? Can optimize bitwise operations with one arg constant.
5508 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5509 and (a bitwise1 b) bitwise2 b (etc)
5510 but that is probably not worth while. */
5512 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5513 boolean values when we want in all cases to compute both of them. In
5514 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5515 as actual zero-or-1 values and then bitwise anding. In cases where
5516 there cannot be any side effects, better code would be made by
5517 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5518 how to recognize those cases. */
5520 case TRUTH_AND_EXPR:
5521 case BIT_AND_EXPR:
5522 this_optab = and_optab;
5523 goto binop;
5525 case TRUTH_OR_EXPR:
5526 case BIT_IOR_EXPR:
5527 this_optab = ior_optab;
5528 goto binop;
5530 case TRUTH_XOR_EXPR:
5531 case BIT_XOR_EXPR:
5532 this_optab = xor_optab;
5533 goto binop;
5535 case LSHIFT_EXPR:
5536 case RSHIFT_EXPR:
5537 case LROTATE_EXPR:
5538 case RROTATE_EXPR:
5539 preexpand_calls (exp);
5540 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5541 subtarget = 0;
5542 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5543 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5544 unsignedp);
5546 /* Could determine the answer when only additive constants differ. Also,
5547 the addition of one can be handled by changing the condition. */
5548 case LT_EXPR:
5549 case LE_EXPR:
5550 case GT_EXPR:
5551 case GE_EXPR:
5552 case EQ_EXPR:
5553 case NE_EXPR:
5554 preexpand_calls (exp);
5555 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5556 if (temp != 0)
5557 return temp;
5559 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5560 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5561 && original_target
5562 && GET_CODE (original_target) == REG
5563 && (GET_MODE (original_target)
5564 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5566 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5567 VOIDmode, 0);
5569 if (temp != original_target)
5570 temp = copy_to_reg (temp);
5572 op1 = gen_label_rtx ();
5573 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5574 GET_MODE (temp), unsignedp, 0);
5575 emit_jump_insn (gen_beq (op1));
5576 emit_move_insn (temp, const1_rtx);
5577 emit_label (op1);
5578 return temp;
5581 /* If no set-flag instruction, must generate a conditional
5582 store into a temporary variable. Drop through
5583 and handle this like && and ||. */
5585 case TRUTH_ANDIF_EXPR:
5586 case TRUTH_ORIF_EXPR:
5587 if (! ignore
5588 && (target == 0 || ! safe_from_p (target, exp)
5589 /* Make sure we don't have a hard reg (such as function's return
5590 value) live across basic blocks, if not optimizing. */
5591 || (!optimize && GET_CODE (target) == REG
5592 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5593 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5595 if (target)
5596 emit_clr_insn (target);
5598 op1 = gen_label_rtx ();
5599 jumpifnot (exp, op1);
5601 if (target)
5602 emit_0_to_1_insn (target);
5604 emit_label (op1);
5605 return ignore ? const0_rtx : target;
5607 case TRUTH_NOT_EXPR:
5608 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5609 /* The parser is careful to generate TRUTH_NOT_EXPR
5610 only with operands that are always zero or one. */
5611 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5612 target, 1, OPTAB_LIB_WIDEN);
5613 if (temp == 0)
5614 abort ();
5615 return temp;
5617 case COMPOUND_EXPR:
5618 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5619 emit_queue ();
5620 return expand_expr (TREE_OPERAND (exp, 1),
5621 (ignore ? const0_rtx : target),
5622 VOIDmode, 0);
5624 case COND_EXPR:
5626 rtx flag = NULL_RTX;
5627 tree left_cleanups = NULL_TREE;
5628 tree right_cleanups = NULL_TREE;
5630 /* Used to save a pointer to the place to put the setting of
5631 the flag that indicates if this side of the conditional was
5632 taken. We backpatch the code, if we find out later that we
5633 have any conditional cleanups that need to be performed. */
5634 rtx dest_right_flag = NULL_RTX;
5635 rtx dest_left_flag = NULL_RTX;
5637 /* Note that COND_EXPRs whose type is a structure or union
5638 are required to be constructed to contain assignments of
5639 a temporary variable, so that we can evaluate them here
5640 for side effect only. If type is void, we must do likewise. */
5642 /* If an arm of the branch requires a cleanup,
5643 only that cleanup is performed. */
5645 tree singleton = 0;
5646 tree binary_op = 0, unary_op = 0;
5647 tree old_cleanups = cleanups_this_call;
5649 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5650 convert it to our mode, if necessary. */
5651 if (integer_onep (TREE_OPERAND (exp, 1))
5652 && integer_zerop (TREE_OPERAND (exp, 2))
5653 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5655 if (ignore)
5657 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5658 modifier);
5659 return const0_rtx;
5662 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5663 if (GET_MODE (op0) == mode)
5664 return op0;
5666 if (target == 0)
5667 target = gen_reg_rtx (mode);
5668 convert_move (target, op0, unsignedp);
5669 return target;
5672 /* If we are not to produce a result, we have no target. Otherwise,
5673 if a target was specified use it; it will not be used as an
5674 intermediate target unless it is safe. If no target, use a
5675 temporary. */
5677 if (ignore)
5678 temp = 0;
5679 else if (original_target
5680 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5681 && GET_MODE (original_target) == mode
5682 && ! (GET_CODE (original_target) == MEM
5683 && MEM_VOLATILE_P (original_target)))
5684 temp = original_target;
5685 else if (mode == BLKmode)
5687 if (TYPE_SIZE (type) == 0
5688 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5689 abort ();
5691 temp = assign_stack_temp (BLKmode,
5692 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5693 + BITS_PER_UNIT - 1)
5694 / BITS_PER_UNIT, 0);
5695 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5697 else
5698 temp = gen_reg_rtx (mode);
5700 /* Check for X ? A + B : A. If we have this, we can copy
5701 A to the output and conditionally add B. Similarly for unary
5702 operations. Don't do this if X has side-effects because
5703 those side effects might affect A or B and the "?" operation is
5704 a sequence point in ANSI. (We test for side effects later.) */
5706 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5707 && operand_equal_p (TREE_OPERAND (exp, 2),
5708 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5709 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5710 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5711 && operand_equal_p (TREE_OPERAND (exp, 1),
5712 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5713 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5714 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5715 && operand_equal_p (TREE_OPERAND (exp, 2),
5716 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5717 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5718 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5719 && operand_equal_p (TREE_OPERAND (exp, 1),
5720 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5721 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5723 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5724 operation, do this as A + (X != 0). Similarly for other simple
5725 binary operators. */
5726 if (temp && singleton && binary_op
5727 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5728 && (TREE_CODE (binary_op) == PLUS_EXPR
5729 || TREE_CODE (binary_op) == MINUS_EXPR
5730 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5731 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
5732 && integer_onep (TREE_OPERAND (binary_op, 1))
5733 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5735 rtx result;
5736 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5737 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5738 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5739 : xor_optab);
5741 /* If we had X ? A : A + 1, do this as A + (X == 0).
5743 We have to invert the truth value here and then put it
5744 back later if do_store_flag fails. We cannot simply copy
5745 TREE_OPERAND (exp, 0) to another variable and modify that
5746 because invert_truthvalue can modify the tree pointed to
5747 by its argument. */
5748 if (singleton == TREE_OPERAND (exp, 1))
5749 TREE_OPERAND (exp, 0)
5750 = invert_truthvalue (TREE_OPERAND (exp, 0));
5752 result = do_store_flag (TREE_OPERAND (exp, 0),
5753 (safe_from_p (temp, singleton)
5754 ? temp : NULL_RTX),
5755 mode, BRANCH_COST <= 1);
5757 if (result)
5759 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5760 return expand_binop (mode, boptab, op1, result, temp,
5761 unsignedp, OPTAB_LIB_WIDEN);
5763 else if (singleton == TREE_OPERAND (exp, 1))
5764 TREE_OPERAND (exp, 0)
5765 = invert_truthvalue (TREE_OPERAND (exp, 0));
5768 NO_DEFER_POP;
5769 op0 = gen_label_rtx ();
5771 flag = gen_reg_rtx (word_mode);
5772 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5774 if (temp != 0)
5776 /* If the target conflicts with the other operand of the
5777 binary op, we can't use it. Also, we can't use the target
5778 if it is a hard register, because evaluating the condition
5779 might clobber it. */
5780 if ((binary_op
5781 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5782 || (GET_CODE (temp) == REG
5783 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5784 temp = gen_reg_rtx (mode);
5785 store_expr (singleton, temp, 0);
5787 else
5788 expand_expr (singleton,
5789 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5790 dest_left_flag = get_last_insn ();
5791 if (singleton == TREE_OPERAND (exp, 1))
5792 jumpif (TREE_OPERAND (exp, 0), op0);
5793 else
5794 jumpifnot (TREE_OPERAND (exp, 0), op0);
5796 /* Allows cleanups up to here. */
5797 old_cleanups = cleanups_this_call;
5798 if (binary_op && temp == 0)
5799 /* Just touch the other operand. */
5800 expand_expr (TREE_OPERAND (binary_op, 1),
5801 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5802 else if (binary_op)
5803 store_expr (build (TREE_CODE (binary_op), type,
5804 make_tree (type, temp),
5805 TREE_OPERAND (binary_op, 1)),
5806 temp, 0);
5807 else
5808 store_expr (build1 (TREE_CODE (unary_op), type,
5809 make_tree (type, temp)),
5810 temp, 0);
5811 op1 = op0;
5812 dest_right_flag = get_last_insn ();
5814 #if 0
5815 /* This is now done in jump.c and is better done there because it
5816 produces shorter register lifetimes. */
5818 /* Check for both possibilities either constants or variables
5819 in registers (but not the same as the target!). If so, can
5820 save branches by assigning one, branching, and assigning the
5821 other. */
5822 else if (temp && GET_MODE (temp) != BLKmode
5823 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5824 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5825 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5826 && DECL_RTL (TREE_OPERAND (exp, 1))
5827 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5828 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5829 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5830 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5831 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5832 && DECL_RTL (TREE_OPERAND (exp, 2))
5833 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5834 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5836 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5837 temp = gen_reg_rtx (mode);
5838 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5839 dest_left_flag = get_last_insn ();
5840 jumpifnot (TREE_OPERAND (exp, 0), op0);
5842 /* Allows cleanups up to here. */
5843 old_cleanups = cleanups_this_call;
5844 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5845 op1 = op0;
5846 dest_right_flag = get_last_insn ();
5848 #endif
5849 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5850 comparison operator. If we have one of these cases, set the
5851 output to A, branch on A (cse will merge these two references),
5852 then set the output to FOO. */
5853 else if (temp
5854 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5855 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5856 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5857 TREE_OPERAND (exp, 1), 0)
5858 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5859 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5861 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5862 temp = gen_reg_rtx (mode);
5863 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5864 dest_left_flag = get_last_insn ();
5865 jumpif (TREE_OPERAND (exp, 0), op0);
5867 /* Allows cleanups up to here. */
5868 old_cleanups = cleanups_this_call;
5869 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5870 op1 = op0;
5871 dest_right_flag = get_last_insn ();
5873 else if (temp
5874 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5875 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5876 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5877 TREE_OPERAND (exp, 2), 0)
5878 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5879 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5881 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5882 temp = gen_reg_rtx (mode);
5883 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5884 dest_left_flag = get_last_insn ();
5885 jumpifnot (TREE_OPERAND (exp, 0), op0);
5887 /* Allows cleanups up to here. */
5888 old_cleanups = cleanups_this_call;
5889 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5890 op1 = op0;
5891 dest_right_flag = get_last_insn ();
5893 else
5895 op1 = gen_label_rtx ();
5896 jumpifnot (TREE_OPERAND (exp, 0), op0);
5898 /* Allows cleanups up to here. */
5899 old_cleanups = cleanups_this_call;
5900 if (temp != 0)
5901 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5902 else
5903 expand_expr (TREE_OPERAND (exp, 1),
5904 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5905 dest_left_flag = get_last_insn ();
5907 /* Handle conditional cleanups, if any. */
5908 left_cleanups = defer_cleanups_to (old_cleanups);
5910 emit_queue ();
5911 emit_jump_insn (gen_jump (op1));
5912 emit_barrier ();
5913 emit_label (op0);
5914 if (temp != 0)
5915 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5916 else
5917 expand_expr (TREE_OPERAND (exp, 2),
5918 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5919 dest_right_flag = get_last_insn ();
5922 /* Handle conditional cleanups, if any. */
5923 right_cleanups = defer_cleanups_to (old_cleanups);
5925 emit_queue ();
5926 emit_label (op1);
5927 OK_DEFER_POP;
5929 /* Add back in, any conditional cleanups. */
5930 if (left_cleanups || right_cleanups)
5932 tree new_cleanups;
5933 tree cond;
5934 rtx last;
5936 /* Now that we know that a flag is needed, go back and add in the
5937 setting of the flag. */
5939 /* Do the left side flag. */
5940 last = get_last_insn ();
5941 /* Flag left cleanups as needed. */
5942 emit_move_insn (flag, const1_rtx);
5943 /* ??? deprecated, use sequences instead. */
5944 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
5946 /* Do the right side flag. */
5947 last = get_last_insn ();
5948 /* Flag left cleanups as needed. */
5949 emit_move_insn (flag, const0_rtx);
5950 /* ??? deprecated, use sequences instead. */
5951 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
5953 /* convert flag, which is an rtx, into a tree. */
5954 cond = make_node (RTL_EXPR);
5955 TREE_TYPE (cond) = integer_type_node;
5956 RTL_EXPR_RTL (cond) = flag;
5957 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
5959 if (! left_cleanups)
5960 left_cleanups = integer_zero_node;
5961 if (! right_cleanups)
5962 right_cleanups = integer_zero_node;
5963 new_cleanups = build (COND_EXPR, void_type_node,
5964 truthvalue_conversion (cond),
5965 left_cleanups, right_cleanups);
5966 new_cleanups = fold (new_cleanups);
5968 /* Now add in the conditionalized cleanups. */
5969 cleanups_this_call
5970 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
5971 (*interim_eh_hook) (NULL_TREE);
5973 return temp;
5976 case TARGET_EXPR:
5978 int need_exception_region = 0;
5979 /* Something needs to be initialized, but we didn't know
5980 where that thing was when building the tree. For example,
5981 it could be the return value of a function, or a parameter
5982 to a function which lays down in the stack, or a temporary
5983 variable which must be passed by reference.
5985 We guarantee that the expression will either be constructed
5986 or copied into our original target. */
5988 tree slot = TREE_OPERAND (exp, 0);
5989 tree exp1;
5990 rtx temp;
5992 if (TREE_CODE (slot) != VAR_DECL)
5993 abort ();
5995 if (target == 0)
5997 if (DECL_RTL (slot) != 0)
5999 target = DECL_RTL (slot);
6000 /* If we have already expanded the slot, so don't do
6001 it again. (mrs) */
6002 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6003 return target;
6005 else
6007 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
6008 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
6009 /* All temp slots at this level must not conflict. */
6010 preserve_temp_slots (target);
6011 DECL_RTL (slot) = target;
6013 /* Since SLOT is not known to the called function
6014 to belong to its stack frame, we must build an explicit
6015 cleanup. This case occurs when we must build up a reference
6016 to pass the reference as an argument. In this case,
6017 it is very likely that such a reference need not be
6018 built here. */
6020 if (TREE_OPERAND (exp, 2) == 0)
6021 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6022 if (TREE_OPERAND (exp, 2))
6024 cleanups_this_call = tree_cons (NULL_TREE,
6025 TREE_OPERAND (exp, 2),
6026 cleanups_this_call);
6027 need_exception_region = 1;
6031 else
6033 /* This case does occur, when expanding a parameter which
6034 needs to be constructed on the stack. The target
6035 is the actual stack address that we want to initialize.
6036 The function we call will perform the cleanup in this case. */
6038 /* If we have already assigned it space, use that space,
6039 not target that we were passed in, as our target
6040 parameter is only a hint. */
6041 if (DECL_RTL (slot) != 0)
6043 target = DECL_RTL (slot);
6044 /* If we have already expanded the slot, so don't do
6045 it again. (mrs) */
6046 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6047 return target;
6050 DECL_RTL (slot) = target;
6053 exp1 = TREE_OPERAND (exp, 1);
6054 /* Mark it as expanded. */
6055 TREE_OPERAND (exp, 1) = NULL_TREE;
6057 temp = expand_expr (exp1, target, tmode, modifier);
6059 if (need_exception_region)
6060 (*interim_eh_hook) (NULL_TREE);
6062 return temp;
6065 case INIT_EXPR:
6067 tree lhs = TREE_OPERAND (exp, 0);
6068 tree rhs = TREE_OPERAND (exp, 1);
6069 tree noncopied_parts = 0;
6070 tree lhs_type = TREE_TYPE (lhs);
6072 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6073 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6074 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6075 TYPE_NONCOPIED_PARTS (lhs_type));
6076 while (noncopied_parts != 0)
6078 expand_assignment (TREE_VALUE (noncopied_parts),
6079 TREE_PURPOSE (noncopied_parts), 0, 0);
6080 noncopied_parts = TREE_CHAIN (noncopied_parts);
6082 return temp;
6085 case MODIFY_EXPR:
6087 /* If lhs is complex, expand calls in rhs before computing it.
6088 That's so we don't compute a pointer and save it over a call.
6089 If lhs is simple, compute it first so we can give it as a
6090 target if the rhs is just a call. This avoids an extra temp and copy
6091 and that prevents a partial-subsumption which makes bad code.
6092 Actually we could treat component_ref's of vars like vars. */
6094 tree lhs = TREE_OPERAND (exp, 0);
6095 tree rhs = TREE_OPERAND (exp, 1);
6096 tree noncopied_parts = 0;
6097 tree lhs_type = TREE_TYPE (lhs);
6099 temp = 0;
6101 if (TREE_CODE (lhs) != VAR_DECL
6102 && TREE_CODE (lhs) != RESULT_DECL
6103 && TREE_CODE (lhs) != PARM_DECL)
6104 preexpand_calls (exp);
6106 /* Check for |= or &= of a bitfield of size one into another bitfield
6107 of size 1. In this case, (unless we need the result of the
6108 assignment) we can do this more efficiently with a
6109 test followed by an assignment, if necessary.
6111 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6112 things change so we do, this code should be enhanced to
6113 support it. */
6114 if (ignore
6115 && TREE_CODE (lhs) == COMPONENT_REF
6116 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6117 || TREE_CODE (rhs) == BIT_AND_EXPR)
6118 && TREE_OPERAND (rhs, 0) == lhs
6119 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6120 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6121 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6123 rtx label = gen_label_rtx ();
6125 do_jump (TREE_OPERAND (rhs, 1),
6126 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6127 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6128 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6129 (TREE_CODE (rhs) == BIT_IOR_EXPR
6130 ? integer_one_node
6131 : integer_zero_node)),
6132 0, 0);
6133 do_pending_stack_adjust ();
6134 emit_label (label);
6135 return const0_rtx;
6138 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6139 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6140 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6141 TYPE_NONCOPIED_PARTS (lhs_type));
6143 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6144 while (noncopied_parts != 0)
6146 expand_assignment (TREE_PURPOSE (noncopied_parts),
6147 TREE_VALUE (noncopied_parts), 0, 0);
6148 noncopied_parts = TREE_CHAIN (noncopied_parts);
6150 return temp;
6153 case PREINCREMENT_EXPR:
6154 case PREDECREMENT_EXPR:
6155 return expand_increment (exp, 0);
6157 case POSTINCREMENT_EXPR:
6158 case POSTDECREMENT_EXPR:
6159 /* Faster to treat as pre-increment if result is not used. */
6160 return expand_increment (exp, ! ignore);
6162 case ADDR_EXPR:
6163 /* If nonzero, TEMP will be set to the address of something that might
6164 be a MEM corresponding to a stack slot. */
6165 temp = 0;
6167 /* Are we taking the address of a nested function? */
6168 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6169 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6171 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6172 op0 = force_operand (op0, target);
6174 /* If we are taking the address of something erroneous, just
6175 return a zero. */
6176 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6177 return const0_rtx;
6178 else
6180 /* We make sure to pass const0_rtx down if we came in with
6181 ignore set, to avoid doing the cleanups twice for something. */
6182 op0 = expand_expr (TREE_OPERAND (exp, 0),
6183 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6184 (modifier == EXPAND_INITIALIZER
6185 ? modifier : EXPAND_CONST_ADDRESS));
6187 /* If we are going to ignore the result, OP0 will have been set
6188 to const0_rtx, so just return it. Don't get confused and
6189 think we are taking the address of the constant. */
6190 if (ignore)
6191 return op0;
6193 /* We would like the object in memory. If it is a constant,
6194 we can have it be statically allocated into memory. For
6195 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6196 memory and store the value into it. */
6198 if (CONSTANT_P (op0))
6199 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6200 op0);
6201 else if (GET_CODE (op0) == MEM)
6203 mark_temp_addr_taken (op0);
6204 temp = XEXP (op0, 0);
6207 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6208 || GET_CODE (op0) == CONCAT)
6210 /* If this object is in a register, it must be not
6211 be BLKmode. */
6212 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6213 enum machine_mode inner_mode = TYPE_MODE (inner_type);
6214 rtx memloc
6215 = assign_stack_temp (inner_mode,
6216 int_size_in_bytes (inner_type), 1);
6217 MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
6219 mark_temp_addr_taken (memloc);
6220 emit_move_insn (memloc, op0);
6221 op0 = memloc;
6224 if (GET_CODE (op0) != MEM)
6225 abort ();
6227 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6228 return XEXP (op0, 0);
6230 op0 = force_operand (XEXP (op0, 0), target);
6233 if (flag_force_addr && GET_CODE (op0) != REG)
6234 op0 = force_reg (Pmode, op0);
6236 if (GET_CODE (op0) == REG)
6237 mark_reg_pointer (op0);
6239 /* If we might have had a temp slot, add an equivalent address
6240 for it. */
6241 if (temp != 0)
6242 update_temp_slot_address (temp, op0);
6244 return op0;
6246 case ENTRY_VALUE_EXPR:
6247 abort ();
6249 /* COMPLEX type for Extended Pascal & Fortran */
6250 case COMPLEX_EXPR:
6252 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6253 rtx insns;
6255 /* Get the rtx code of the operands. */
6256 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6257 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6259 if (! target)
6260 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6262 start_sequence ();
6264 /* Move the real (op0) and imaginary (op1) parts to their location. */
6265 emit_move_insn (gen_realpart (mode, target), op0);
6266 emit_move_insn (gen_imagpart (mode, target), op1);
6268 insns = get_insns ();
6269 end_sequence ();
6271 /* Complex construction should appear as a single unit. */
6272 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6273 each with a separate pseudo as destination.
6274 It's not correct for flow to treat them as a unit. */
6275 if (GET_CODE (target) != CONCAT)
6276 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6277 else
6278 emit_insns (insns);
6280 return target;
6283 case REALPART_EXPR:
6284 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6285 return gen_realpart (mode, op0);
6287 case IMAGPART_EXPR:
6288 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6289 return gen_imagpart (mode, op0);
6291 case CONJ_EXPR:
6293 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6294 rtx imag_t;
6295 rtx insns;
6297 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6299 if (! target)
6300 target = gen_reg_rtx (mode);
6302 start_sequence ();
6304 /* Store the realpart and the negated imagpart to target. */
6305 emit_move_insn (gen_realpart (partmode, target),
6306 gen_realpart (partmode, op0));
6308 imag_t = gen_imagpart (partmode, target);
6309 temp = expand_unop (partmode, neg_optab,
6310 gen_imagpart (partmode, op0), imag_t, 0);
6311 if (temp != imag_t)
6312 emit_move_insn (imag_t, temp);
6314 insns = get_insns ();
6315 end_sequence ();
6317 /* Conjugate should appear as a single unit
6318 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6319 each with a separate pseudo as destination.
6320 It's not correct for flow to treat them as a unit. */
6321 if (GET_CODE (target) != CONCAT)
6322 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6323 else
6324 emit_insns (insns);
6326 return target;
6329 case ERROR_MARK:
6330 op0 = CONST0_RTX (tmode);
6331 if (op0 != 0)
6332 return op0;
6333 return const0_rtx;
6335 default:
6336 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6339 /* Here to do an ordinary binary operator, generating an instruction
6340 from the optab already placed in `this_optab'. */
6341 binop:
6342 preexpand_calls (exp);
6343 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6344 subtarget = 0;
6345 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6346 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6347 binop2:
6348 temp = expand_binop (mode, this_optab, op0, op1, target,
6349 unsignedp, OPTAB_LIB_WIDEN);
6350 if (temp == 0)
6351 abort ();
6352 return temp;
6356 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6357 void
6358 bc_expand_expr (exp)
6359 tree exp;
6361 enum tree_code code;
6362 tree type, arg0;
6363 rtx r;
6364 struct binary_operator *binoptab;
6365 struct unary_operator *unoptab;
6366 struct increment_operator *incroptab;
6367 struct bc_label *lab, *lab1;
6368 enum bytecode_opcode opcode;
6371 code = TREE_CODE (exp);
6373 switch (code)
6375 case PARM_DECL:
6377 if (DECL_RTL (exp) == 0)
6379 error_with_decl (exp, "prior parameter's size depends on `%s'");
6380 return;
6383 bc_load_parmaddr (DECL_RTL (exp));
6384 bc_load_memory (TREE_TYPE (exp), exp);
6386 return;
6388 case VAR_DECL:
6390 if (DECL_RTL (exp) == 0)
6391 abort ();
6393 #if 0
6394 if (BYTECODE_LABEL (DECL_RTL (exp)))
6395 bc_load_externaddr (DECL_RTL (exp));
6396 else
6397 bc_load_localaddr (DECL_RTL (exp));
6398 #endif
6399 if (TREE_PUBLIC (exp))
6400 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6401 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6402 else
6403 bc_load_localaddr (DECL_RTL (exp));
6405 bc_load_memory (TREE_TYPE (exp), exp);
6406 return;
6408 case INTEGER_CST:
6410 #ifdef DEBUG_PRINT_CODE
6411 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6412 #endif
6413 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6414 ? SImode
6415 : TYPE_MODE (TREE_TYPE (exp)))],
6416 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6417 return;
6419 case REAL_CST:
6421 #if 0
6422 #ifdef DEBUG_PRINT_CODE
6423 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6424 #endif
6425 /* FIX THIS: find a better way to pass real_cst's. -bson */
6426 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6427 (double) TREE_REAL_CST (exp));
6428 #else
6429 abort ();
6430 #endif
6432 return;
6434 case CALL_EXPR:
6436 /* We build a call description vector describing the type of
6437 the return value and of the arguments; this call vector,
6438 together with a pointer to a location for the return value
6439 and the base of the argument list, is passed to the low
6440 level machine dependent call subroutine, which is responsible
6441 for putting the arguments wherever real functions expect
6442 them, as well as getting the return value back. */
6444 tree calldesc = 0, arg;
6445 int nargs = 0, i;
6446 rtx retval;
6448 /* Push the evaluated args on the evaluation stack in reverse
6449 order. Also make an entry for each arg in the calldesc
6450 vector while we're at it. */
6452 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6454 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6456 ++nargs;
6457 bc_expand_expr (TREE_VALUE (arg));
6459 calldesc = tree_cons ((tree) 0,
6460 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6461 calldesc);
6462 calldesc = tree_cons ((tree) 0,
6463 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6464 calldesc);
6467 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6469 /* Allocate a location for the return value and push its
6470 address on the evaluation stack. Also make an entry
6471 at the front of the calldesc for the return value type. */
6473 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6474 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6475 bc_load_localaddr (retval);
6477 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6478 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6480 /* Prepend the argument count. */
6481 calldesc = tree_cons ((tree) 0,
6482 build_int_2 (nargs, 0),
6483 calldesc);
6485 /* Push the address of the call description vector on the stack. */
6486 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6487 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6488 build_index_type (build_int_2 (nargs * 2, 0)));
6489 r = output_constant_def (calldesc);
6490 bc_load_externaddr (r);
6492 /* Push the address of the function to be called. */
6493 bc_expand_expr (TREE_OPERAND (exp, 0));
6495 /* Call the function, popping its address and the calldesc vector
6496 address off the evaluation stack in the process. */
6497 bc_emit_instruction (call);
6499 /* Pop the arguments off the stack. */
6500 bc_adjust_stack (nargs);
6502 /* Load the return value onto the stack. */
6503 bc_load_localaddr (retval);
6504 bc_load_memory (type, TREE_OPERAND (exp, 0));
6506 return;
6508 case SAVE_EXPR:
6510 if (!SAVE_EXPR_RTL (exp))
6512 /* First time around: copy to local variable */
6513 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6514 TYPE_ALIGN (TREE_TYPE(exp)));
6515 bc_expand_expr (TREE_OPERAND (exp, 0));
6516 bc_emit_instruction (duplicate);
6518 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6519 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6521 else
6523 /* Consecutive reference: use saved copy */
6524 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6525 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6527 return;
6529 #if 0
6530 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6531 how are they handled instead? */
6532 case LET_STMT:
6534 TREE_USED (exp) = 1;
6535 bc_expand_expr (STMT_BODY (exp));
6536 return;
6537 #endif
6539 case NOP_EXPR:
6540 case CONVERT_EXPR:
6542 bc_expand_expr (TREE_OPERAND (exp, 0));
6543 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6544 return;
6546 case MODIFY_EXPR:
6548 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6549 return;
6551 case ADDR_EXPR:
6553 bc_expand_address (TREE_OPERAND (exp, 0));
6554 return;
6556 case INDIRECT_REF:
6558 bc_expand_expr (TREE_OPERAND (exp, 0));
6559 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6560 return;
6562 case ARRAY_REF:
6564 bc_expand_expr (bc_canonicalize_array_ref (exp));
6565 return;
6567 case COMPONENT_REF:
6569 bc_expand_component_address (exp);
6571 /* If we have a bitfield, generate a proper load */
6572 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6573 return;
6575 case COMPOUND_EXPR:
6577 bc_expand_expr (TREE_OPERAND (exp, 0));
6578 bc_emit_instruction (drop);
6579 bc_expand_expr (TREE_OPERAND (exp, 1));
6580 return;
6582 case COND_EXPR:
6584 bc_expand_expr (TREE_OPERAND (exp, 0));
6585 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6586 lab = bc_get_bytecode_label ();
6587 bc_emit_bytecode (xjumpifnot);
6588 bc_emit_bytecode_labelref (lab);
6590 #ifdef DEBUG_PRINT_CODE
6591 fputc ('\n', stderr);
6592 #endif
6593 bc_expand_expr (TREE_OPERAND (exp, 1));
6594 lab1 = bc_get_bytecode_label ();
6595 bc_emit_bytecode (jump);
6596 bc_emit_bytecode_labelref (lab1);
6598 #ifdef DEBUG_PRINT_CODE
6599 fputc ('\n', stderr);
6600 #endif
6602 bc_emit_bytecode_labeldef (lab);
6603 bc_expand_expr (TREE_OPERAND (exp, 2));
6604 bc_emit_bytecode_labeldef (lab1);
6605 return;
6607 case TRUTH_ANDIF_EXPR:
6609 opcode = xjumpifnot;
6610 goto andorif;
6612 case TRUTH_ORIF_EXPR:
6614 opcode = xjumpif;
6615 goto andorif;
6617 case PLUS_EXPR:
6619 binoptab = optab_plus_expr;
6620 goto binop;
6622 case MINUS_EXPR:
6624 binoptab = optab_minus_expr;
6625 goto binop;
6627 case MULT_EXPR:
6629 binoptab = optab_mult_expr;
6630 goto binop;
6632 case TRUNC_DIV_EXPR:
6633 case FLOOR_DIV_EXPR:
6634 case CEIL_DIV_EXPR:
6635 case ROUND_DIV_EXPR:
6636 case EXACT_DIV_EXPR:
6638 binoptab = optab_trunc_div_expr;
6639 goto binop;
6641 case TRUNC_MOD_EXPR:
6642 case FLOOR_MOD_EXPR:
6643 case CEIL_MOD_EXPR:
6644 case ROUND_MOD_EXPR:
6646 binoptab = optab_trunc_mod_expr;
6647 goto binop;
6649 case FIX_ROUND_EXPR:
6650 case FIX_FLOOR_EXPR:
6651 case FIX_CEIL_EXPR:
6652 abort (); /* Not used for C. */
6654 case FIX_TRUNC_EXPR:
6655 case FLOAT_EXPR:
6656 case MAX_EXPR:
6657 case MIN_EXPR:
6658 case FFS_EXPR:
6659 case LROTATE_EXPR:
6660 case RROTATE_EXPR:
6661 abort (); /* FIXME */
6663 case RDIV_EXPR:
6665 binoptab = optab_rdiv_expr;
6666 goto binop;
6668 case BIT_AND_EXPR:
6670 binoptab = optab_bit_and_expr;
6671 goto binop;
6673 case BIT_IOR_EXPR:
6675 binoptab = optab_bit_ior_expr;
6676 goto binop;
6678 case BIT_XOR_EXPR:
6680 binoptab = optab_bit_xor_expr;
6681 goto binop;
6683 case LSHIFT_EXPR:
6685 binoptab = optab_lshift_expr;
6686 goto binop;
6688 case RSHIFT_EXPR:
6690 binoptab = optab_rshift_expr;
6691 goto binop;
6693 case TRUTH_AND_EXPR:
6695 binoptab = optab_truth_and_expr;
6696 goto binop;
6698 case TRUTH_OR_EXPR:
6700 binoptab = optab_truth_or_expr;
6701 goto binop;
6703 case LT_EXPR:
6705 binoptab = optab_lt_expr;
6706 goto binop;
6708 case LE_EXPR:
6710 binoptab = optab_le_expr;
6711 goto binop;
6713 case GE_EXPR:
6715 binoptab = optab_ge_expr;
6716 goto binop;
6718 case GT_EXPR:
6720 binoptab = optab_gt_expr;
6721 goto binop;
6723 case EQ_EXPR:
6725 binoptab = optab_eq_expr;
6726 goto binop;
6728 case NE_EXPR:
6730 binoptab = optab_ne_expr;
6731 goto binop;
6733 case NEGATE_EXPR:
6735 unoptab = optab_negate_expr;
6736 goto unop;
6738 case BIT_NOT_EXPR:
6740 unoptab = optab_bit_not_expr;
6741 goto unop;
6743 case TRUTH_NOT_EXPR:
6745 unoptab = optab_truth_not_expr;
6746 goto unop;
6748 case PREDECREMENT_EXPR:
6750 incroptab = optab_predecrement_expr;
6751 goto increment;
6753 case PREINCREMENT_EXPR:
6755 incroptab = optab_preincrement_expr;
6756 goto increment;
6758 case POSTDECREMENT_EXPR:
6760 incroptab = optab_postdecrement_expr;
6761 goto increment;
6763 case POSTINCREMENT_EXPR:
6765 incroptab = optab_postincrement_expr;
6766 goto increment;
6768 case CONSTRUCTOR:
6770 bc_expand_constructor (exp);
6771 return;
6773 case ERROR_MARK:
6774 case RTL_EXPR:
6776 return;
6778 case BIND_EXPR:
6780 tree vars = TREE_OPERAND (exp, 0);
6781 int vars_need_expansion = 0;
6783 /* Need to open a binding contour here because
6784 if there are any cleanups they most be contained here. */
6785 expand_start_bindings (0);
6787 /* Mark the corresponding BLOCK for output. */
6788 if (TREE_OPERAND (exp, 2) != 0)
6789 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6791 /* If VARS have not yet been expanded, expand them now. */
6792 while (vars)
6794 if (DECL_RTL (vars) == 0)
6796 vars_need_expansion = 1;
6797 expand_decl (vars);
6799 expand_decl_init (vars);
6800 vars = TREE_CHAIN (vars);
6803 bc_expand_expr (TREE_OPERAND (exp, 1));
6805 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6807 return;
6811 abort ();
6813 binop:
6815 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6816 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6817 return;
6820 unop:
6822 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6823 return;
6826 andorif:
6828 bc_expand_expr (TREE_OPERAND (exp, 0));
6829 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6830 lab = bc_get_bytecode_label ();
6832 bc_emit_instruction (duplicate);
6833 bc_emit_bytecode (opcode);
6834 bc_emit_bytecode_labelref (lab);
6836 #ifdef DEBUG_PRINT_CODE
6837 fputc ('\n', stderr);
6838 #endif
6840 bc_emit_instruction (drop);
6842 bc_expand_expr (TREE_OPERAND (exp, 1));
6843 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6844 bc_emit_bytecode_labeldef (lab);
6845 return;
6848 increment:
6850 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6852 /* Push the quantum. */
6853 bc_expand_expr (TREE_OPERAND (exp, 1));
6855 /* Convert it to the lvalue's type. */
6856 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6858 /* Push the address of the lvalue */
6859 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6861 /* Perform actual increment */
6862 bc_expand_increment (incroptab, type);
6863 return;
6866 /* Return the alignment in bits of EXP, a pointer valued expression.
6867 But don't return more than MAX_ALIGN no matter what.
6868 The alignment returned is, by default, the alignment of the thing that
6869 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6871 Otherwise, look at the expression to see if we can do better, i.e., if the
6872 expression is actually pointing at an object whose alignment is tighter. */
6874 static int
6875 get_pointer_alignment (exp, max_align)
6876 tree exp;
6877 unsigned max_align;
6879 unsigned align, inner;
6881 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6882 return 0;
6884 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6885 align = MIN (align, max_align);
6887 while (1)
6889 switch (TREE_CODE (exp))
6891 case NOP_EXPR:
6892 case CONVERT_EXPR:
6893 case NON_LVALUE_EXPR:
6894 exp = TREE_OPERAND (exp, 0);
6895 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6896 return align;
6897 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6898 align = MIN (inner, max_align);
6899 break;
6901 case PLUS_EXPR:
6902 /* If sum of pointer + int, restrict our maximum alignment to that
6903 imposed by the integer. If not, we can't do any better than
6904 ALIGN. */
6905 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6906 return align;
6908 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6909 & (max_align - 1))
6910 != 0)
6911 max_align >>= 1;
6913 exp = TREE_OPERAND (exp, 0);
6914 break;
6916 case ADDR_EXPR:
6917 /* See what we are pointing at and look at its alignment. */
6918 exp = TREE_OPERAND (exp, 0);
6919 if (TREE_CODE (exp) == FUNCTION_DECL)
6920 align = FUNCTION_BOUNDARY;
6921 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6922 align = DECL_ALIGN (exp);
6923 #ifdef CONSTANT_ALIGNMENT
6924 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6925 align = CONSTANT_ALIGNMENT (exp, align);
6926 #endif
6927 return MIN (align, max_align);
6929 default:
6930 return align;
6935 /* Return the tree node and offset if a given argument corresponds to
6936 a string constant. */
6938 static tree
6939 string_constant (arg, ptr_offset)
6940 tree arg;
6941 tree *ptr_offset;
6943 STRIP_NOPS (arg);
6945 if (TREE_CODE (arg) == ADDR_EXPR
6946 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6948 *ptr_offset = integer_zero_node;
6949 return TREE_OPERAND (arg, 0);
6951 else if (TREE_CODE (arg) == PLUS_EXPR)
6953 tree arg0 = TREE_OPERAND (arg, 0);
6954 tree arg1 = TREE_OPERAND (arg, 1);
6956 STRIP_NOPS (arg0);
6957 STRIP_NOPS (arg1);
6959 if (TREE_CODE (arg0) == ADDR_EXPR
6960 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6962 *ptr_offset = arg1;
6963 return TREE_OPERAND (arg0, 0);
6965 else if (TREE_CODE (arg1) == ADDR_EXPR
6966 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6968 *ptr_offset = arg0;
6969 return TREE_OPERAND (arg1, 0);
6973 return 0;
6976 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6977 way, because it could contain a zero byte in the middle.
6978 TREE_STRING_LENGTH is the size of the character array, not the string.
6980 Unfortunately, string_constant can't access the values of const char
6981 arrays with initializers, so neither can we do so here. */
6983 static tree
6984 c_strlen (src)
6985 tree src;
6987 tree offset_node;
6988 int offset, max;
6989 char *ptr;
6991 src = string_constant (src, &offset_node);
6992 if (src == 0)
6993 return 0;
6994 max = TREE_STRING_LENGTH (src);
6995 ptr = TREE_STRING_POINTER (src);
6996 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6998 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6999 compute the offset to the following null if we don't know where to
7000 start searching for it. */
7001 int i;
7002 for (i = 0; i < max; i++)
7003 if (ptr[i] == 0)
7004 return 0;
7005 /* We don't know the starting offset, but we do know that the string
7006 has no internal zero bytes. We can assume that the offset falls
7007 within the bounds of the string; otherwise, the programmer deserves
7008 what he gets. Subtract the offset from the length of the string,
7009 and return that. */
7010 /* This would perhaps not be valid if we were dealing with named
7011 arrays in addition to literal string constants. */
7012 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7015 /* We have a known offset into the string. Start searching there for
7016 a null character. */
7017 if (offset_node == 0)
7018 offset = 0;
7019 else
7021 /* Did we get a long long offset? If so, punt. */
7022 if (TREE_INT_CST_HIGH (offset_node) != 0)
7023 return 0;
7024 offset = TREE_INT_CST_LOW (offset_node);
7026 /* If the offset is known to be out of bounds, warn, and call strlen at
7027 runtime. */
7028 if (offset < 0 || offset > max)
7030 warning ("offset outside bounds of constant string");
7031 return 0;
7033 /* Use strlen to search for the first zero byte. Since any strings
7034 constructed with build_string will have nulls appended, we win even
7035 if we get handed something like (char[4])"abcd".
7037 Since OFFSET is our starting index into the string, no further
7038 calculation is needed. */
7039 return size_int (strlen (ptr + offset));
7042 /* Expand an expression EXP that calls a built-in function,
7043 with result going to TARGET if that's convenient
7044 (and in mode MODE if that's convenient).
7045 SUBTARGET may be used as the target for computing one of EXP's operands.
7046 IGNORE is nonzero if the value is to be ignored. */
7048 #define CALLED_AS_BUILT_IN(NODE) \
7049 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7051 static rtx
7052 expand_builtin (exp, target, subtarget, mode, ignore)
7053 tree exp;
7054 rtx target;
7055 rtx subtarget;
7056 enum machine_mode mode;
7057 int ignore;
7059 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7060 tree arglist = TREE_OPERAND (exp, 1);
7061 rtx op0;
7062 rtx lab1, insns;
7063 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7064 optab builtin_optab;
7066 switch (DECL_FUNCTION_CODE (fndecl))
7068 case BUILT_IN_ABS:
7069 case BUILT_IN_LABS:
7070 case BUILT_IN_FABS:
7071 /* build_function_call changes these into ABS_EXPR. */
7072 abort ();
7074 case BUILT_IN_SIN:
7075 case BUILT_IN_COS:
7076 /* Treat these like sqrt, but only if the user asks for them. */
7077 if (! flag_fast_math)
7078 break;
7079 case BUILT_IN_FSQRT:
7080 /* If not optimizing, call the library function. */
7081 if (! optimize)
7082 break;
7084 if (arglist == 0
7085 /* Arg could be wrong type if user redeclared this fcn wrong. */
7086 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7087 break;
7089 /* Stabilize and compute the argument. */
7090 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7091 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7093 exp = copy_node (exp);
7094 arglist = copy_node (arglist);
7095 TREE_OPERAND (exp, 1) = arglist;
7096 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7098 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7100 /* Make a suitable register to place result in. */
7101 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7103 emit_queue ();
7104 start_sequence ();
7106 switch (DECL_FUNCTION_CODE (fndecl))
7108 case BUILT_IN_SIN:
7109 builtin_optab = sin_optab; break;
7110 case BUILT_IN_COS:
7111 builtin_optab = cos_optab; break;
7112 case BUILT_IN_FSQRT:
7113 builtin_optab = sqrt_optab; break;
7114 default:
7115 abort ();
7118 /* Compute into TARGET.
7119 Set TARGET to wherever the result comes back. */
7120 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7121 builtin_optab, op0, target, 0);
7123 /* If we were unable to expand via the builtin, stop the
7124 sequence (without outputting the insns) and break, causing
7125 a call the the library function. */
7126 if (target == 0)
7128 end_sequence ();
7129 break;
7132 /* Check the results by default. But if flag_fast_math is turned on,
7133 then assume sqrt will always be called with valid arguments. */
7135 if (! flag_fast_math)
7137 /* Don't define the builtin FP instructions
7138 if your machine is not IEEE. */
7139 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7140 abort ();
7142 lab1 = gen_label_rtx ();
7144 /* Test the result; if it is NaN, set errno=EDOM because
7145 the argument was not in the domain. */
7146 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7147 emit_jump_insn (gen_beq (lab1));
7149 #ifdef TARGET_EDOM
7151 #ifdef GEN_ERRNO_RTX
7152 rtx errno_rtx = GEN_ERRNO_RTX;
7153 #else
7154 rtx errno_rtx
7155 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7156 #endif
7158 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7160 #else
7161 /* We can't set errno=EDOM directly; let the library call do it.
7162 Pop the arguments right away in case the call gets deleted. */
7163 NO_DEFER_POP;
7164 expand_call (exp, target, 0);
7165 OK_DEFER_POP;
7166 #endif
7168 emit_label (lab1);
7171 /* Output the entire sequence. */
7172 insns = get_insns ();
7173 end_sequence ();
7174 emit_insns (insns);
7176 return target;
7178 /* __builtin_apply_args returns block of memory allocated on
7179 the stack into which is stored the arg pointer, structure
7180 value address, static chain, and all the registers that might
7181 possibly be used in performing a function call. The code is
7182 moved to the start of the function so the incoming values are
7183 saved. */
7184 case BUILT_IN_APPLY_ARGS:
7185 /* Don't do __builtin_apply_args more than once in a function.
7186 Save the result of the first call and reuse it. */
7187 if (apply_args_value != 0)
7188 return apply_args_value;
7190 /* When this function is called, it means that registers must be
7191 saved on entry to this function. So we migrate the
7192 call to the first insn of this function. */
7193 rtx temp;
7194 rtx seq;
7196 start_sequence ();
7197 temp = expand_builtin_apply_args ();
7198 seq = get_insns ();
7199 end_sequence ();
7201 apply_args_value = temp;
7203 /* Put the sequence after the NOTE that starts the function.
7204 If this is inside a SEQUENCE, make the outer-level insn
7205 chain current, so the code is placed at the start of the
7206 function. */
7207 push_topmost_sequence ();
7208 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7209 pop_topmost_sequence ();
7210 return temp;
7213 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7214 FUNCTION with a copy of the parameters described by
7215 ARGUMENTS, and ARGSIZE. It returns a block of memory
7216 allocated on the stack into which is stored all the registers
7217 that might possibly be used for returning the result of a
7218 function. ARGUMENTS is the value returned by
7219 __builtin_apply_args. ARGSIZE is the number of bytes of
7220 arguments that must be copied. ??? How should this value be
7221 computed? We'll also need a safe worst case value for varargs
7222 functions. */
7223 case BUILT_IN_APPLY:
7224 if (arglist == 0
7225 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7226 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7227 || TREE_CHAIN (arglist) == 0
7228 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7229 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7230 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7231 return const0_rtx;
7232 else
7234 int i;
7235 tree t;
7236 rtx ops[3];
7238 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7239 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7241 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7244 /* __builtin_return (RESULT) causes the function to return the
7245 value described by RESULT. RESULT is address of the block of
7246 memory returned by __builtin_apply. */
7247 case BUILT_IN_RETURN:
7248 if (arglist
7249 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7250 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7251 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7252 NULL_RTX, VOIDmode, 0));
7253 return const0_rtx;
7255 case BUILT_IN_SAVEREGS:
7256 /* Don't do __builtin_saveregs more than once in a function.
7257 Save the result of the first call and reuse it. */
7258 if (saveregs_value != 0)
7259 return saveregs_value;
7261 /* When this function is called, it means that registers must be
7262 saved on entry to this function. So we migrate the
7263 call to the first insn of this function. */
7264 rtx temp;
7265 rtx seq;
7267 /* Now really call the function. `expand_call' does not call
7268 expand_builtin, so there is no danger of infinite recursion here. */
7269 start_sequence ();
7271 #ifdef EXPAND_BUILTIN_SAVEREGS
7272 /* Do whatever the machine needs done in this case. */
7273 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7274 #else
7275 /* The register where the function returns its value
7276 is likely to have something else in it, such as an argument.
7277 So preserve that register around the call. */
7279 if (value_mode != VOIDmode)
7281 rtx valreg = hard_libcall_value (value_mode);
7282 rtx saved_valreg = gen_reg_rtx (value_mode);
7284 emit_move_insn (saved_valreg, valreg);
7285 temp = expand_call (exp, target, ignore);
7286 emit_move_insn (valreg, saved_valreg);
7288 else
7289 /* Generate the call, putting the value in a pseudo. */
7290 temp = expand_call (exp, target, ignore);
7291 #endif
7293 seq = get_insns ();
7294 end_sequence ();
7296 saveregs_value = temp;
7298 /* Put the sequence after the NOTE that starts the function.
7299 If this is inside a SEQUENCE, make the outer-level insn
7300 chain current, so the code is placed at the start of the
7301 function. */
7302 push_topmost_sequence ();
7303 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7304 pop_topmost_sequence ();
7305 return temp;
7308 /* __builtin_args_info (N) returns word N of the arg space info
7309 for the current function. The number and meanings of words
7310 is controlled by the definition of CUMULATIVE_ARGS. */
7311 case BUILT_IN_ARGS_INFO:
7313 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7314 int i;
7315 int *word_ptr = (int *) &current_function_args_info;
7316 tree type, elts, result;
7318 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7319 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7320 __FILE__, __LINE__);
7322 if (arglist != 0)
7324 tree arg = TREE_VALUE (arglist);
7325 if (TREE_CODE (arg) != INTEGER_CST)
7326 error ("argument of `__builtin_args_info' must be constant");
7327 else
7329 int wordnum = TREE_INT_CST_LOW (arg);
7331 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7332 error ("argument of `__builtin_args_info' out of range");
7333 else
7334 return GEN_INT (word_ptr[wordnum]);
7337 else
7338 error ("missing argument in `__builtin_args_info'");
7340 return const0_rtx;
7342 #if 0
7343 for (i = 0; i < nwords; i++)
7344 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7346 type = build_array_type (integer_type_node,
7347 build_index_type (build_int_2 (nwords, 0)));
7348 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7349 TREE_CONSTANT (result) = 1;
7350 TREE_STATIC (result) = 1;
7351 result = build (INDIRECT_REF, build_pointer_type (type), result);
7352 TREE_CONSTANT (result) = 1;
7353 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7354 #endif
7357 /* Return the address of the first anonymous stack arg. */
7358 case BUILT_IN_NEXT_ARG:
7360 tree fntype = TREE_TYPE (current_function_decl);
7362 if ((TYPE_ARG_TYPES (fntype) == 0
7363 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7364 == void_type_node))
7365 && ! current_function_varargs)
7367 error ("`va_start' used in function with fixed args");
7368 return const0_rtx;
7371 if (arglist)
7373 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7374 tree arg = TREE_VALUE (arglist);
7376 /* Strip off all nops for the sake of the comparison. This
7377 is not quite the same as STRIP_NOPS. It does more. */
7378 while (TREE_CODE (arg) == NOP_EXPR
7379 || TREE_CODE (arg) == CONVERT_EXPR
7380 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7381 arg = TREE_OPERAND (arg, 0);
7382 if (arg != last_parm)
7383 warning ("second parameter of `va_start' not last named argument");
7385 else
7386 /* Evidently an out of date version of <stdarg.h>; can't validate
7387 va_start's second argument, but can still work as intended. */
7388 warning ("`__builtin_next_arg' called without an argument");
7391 return expand_binop (Pmode, add_optab,
7392 current_function_internal_arg_pointer,
7393 current_function_arg_offset_rtx,
7394 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7396 case BUILT_IN_CLASSIFY_TYPE:
7397 if (arglist != 0)
7399 tree type = TREE_TYPE (TREE_VALUE (arglist));
7400 enum tree_code code = TREE_CODE (type);
7401 if (code == VOID_TYPE)
7402 return GEN_INT (void_type_class);
7403 if (code == INTEGER_TYPE)
7404 return GEN_INT (integer_type_class);
7405 if (code == CHAR_TYPE)
7406 return GEN_INT (char_type_class);
7407 if (code == ENUMERAL_TYPE)
7408 return GEN_INT (enumeral_type_class);
7409 if (code == BOOLEAN_TYPE)
7410 return GEN_INT (boolean_type_class);
7411 if (code == POINTER_TYPE)
7412 return GEN_INT (pointer_type_class);
7413 if (code == REFERENCE_TYPE)
7414 return GEN_INT (reference_type_class);
7415 if (code == OFFSET_TYPE)
7416 return GEN_INT (offset_type_class);
7417 if (code == REAL_TYPE)
7418 return GEN_INT (real_type_class);
7419 if (code == COMPLEX_TYPE)
7420 return GEN_INT (complex_type_class);
7421 if (code == FUNCTION_TYPE)
7422 return GEN_INT (function_type_class);
7423 if (code == METHOD_TYPE)
7424 return GEN_INT (method_type_class);
7425 if (code == RECORD_TYPE)
7426 return GEN_INT (record_type_class);
7427 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7428 return GEN_INT (union_type_class);
7429 if (code == ARRAY_TYPE)
7431 if (TYPE_STRING_FLAG (type))
7432 return GEN_INT (string_type_class);
7433 else
7434 return GEN_INT (array_type_class);
7436 if (code == SET_TYPE)
7437 return GEN_INT (set_type_class);
7438 if (code == FILE_TYPE)
7439 return GEN_INT (file_type_class);
7440 if (code == LANG_TYPE)
7441 return GEN_INT (lang_type_class);
7443 return GEN_INT (no_type_class);
7445 case BUILT_IN_CONSTANT_P:
7446 if (arglist == 0)
7447 return const0_rtx;
7448 else
7449 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7450 ? const1_rtx : const0_rtx);
7452 case BUILT_IN_FRAME_ADDRESS:
7453 /* The argument must be a nonnegative integer constant.
7454 It counts the number of frames to scan up the stack.
7455 The value is the address of that frame. */
7456 case BUILT_IN_RETURN_ADDRESS:
7457 /* The argument must be a nonnegative integer constant.
7458 It counts the number of frames to scan up the stack.
7459 The value is the return address saved in that frame. */
7460 if (arglist == 0)
7461 /* Warning about missing arg was already issued. */
7462 return const0_rtx;
7463 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7465 error ("invalid arg to `__builtin_return_address'");
7466 return const0_rtx;
7468 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7470 error ("invalid arg to `__builtin_return_address'");
7471 return const0_rtx;
7473 else
7475 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7476 rtx tem = frame_pointer_rtx;
7477 int i;
7479 /* Some machines need special handling before we can access arbitrary
7480 frames. For example, on the sparc, we must first flush all
7481 register windows to the stack. */
7482 #ifdef SETUP_FRAME_ADDRESSES
7483 SETUP_FRAME_ADDRESSES ();
7484 #endif
7486 /* On the sparc, the return address is not in the frame, it is
7487 in a register. There is no way to access it off of the current
7488 frame pointer, but it can be accessed off the previous frame
7489 pointer by reading the value from the register window save
7490 area. */
7491 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7492 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7493 count--;
7494 #endif
7496 /* Scan back COUNT frames to the specified frame. */
7497 for (i = 0; i < count; i++)
7499 /* Assume the dynamic chain pointer is in the word that
7500 the frame address points to, unless otherwise specified. */
7501 #ifdef DYNAMIC_CHAIN_ADDRESS
7502 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7503 #endif
7504 tem = memory_address (Pmode, tem);
7505 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7508 /* For __builtin_frame_address, return what we've got. */
7509 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7510 return tem;
7512 /* For __builtin_return_address,
7513 Get the return address from that frame. */
7514 #ifdef RETURN_ADDR_RTX
7515 return RETURN_ADDR_RTX (count, tem);
7516 #else
7517 tem = memory_address (Pmode,
7518 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7519 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7520 #endif
7523 case BUILT_IN_ALLOCA:
7524 if (arglist == 0
7525 /* Arg could be non-integer if user redeclared this fcn wrong. */
7526 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7527 break;
7529 /* Compute the argument. */
7530 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7532 /* Allocate the desired space. */
7533 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7535 case BUILT_IN_FFS:
7536 /* If not optimizing, call the library function. */
7537 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7538 break;
7540 if (arglist == 0
7541 /* Arg could be non-integer if user redeclared this fcn wrong. */
7542 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7543 break;
7545 /* Compute the argument. */
7546 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7547 /* Compute ffs, into TARGET if possible.
7548 Set TARGET to wherever the result comes back. */
7549 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7550 ffs_optab, op0, target, 1);
7551 if (target == 0)
7552 abort ();
7553 return target;
7555 case BUILT_IN_STRLEN:
7556 /* If not optimizing, call the library function. */
7557 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7558 break;
7560 if (arglist == 0
7561 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7562 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7563 break;
7564 else
7566 tree src = TREE_VALUE (arglist);
7567 tree len = c_strlen (src);
7569 int align
7570 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7572 rtx result, src_rtx, char_rtx;
7573 enum machine_mode insn_mode = value_mode, char_mode;
7574 enum insn_code icode;
7576 /* If the length is known, just return it. */
7577 if (len != 0)
7578 return expand_expr (len, target, mode, 0);
7580 /* If SRC is not a pointer type, don't do this operation inline. */
7581 if (align == 0)
7582 break;
7584 /* Call a function if we can't compute strlen in the right mode. */
7586 while (insn_mode != VOIDmode)
7588 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7589 if (icode != CODE_FOR_nothing)
7590 break;
7592 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7594 if (insn_mode == VOIDmode)
7595 break;
7597 /* Make a place to write the result of the instruction. */
7598 result = target;
7599 if (! (result != 0
7600 && GET_CODE (result) == REG
7601 && GET_MODE (result) == insn_mode
7602 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7603 result = gen_reg_rtx (insn_mode);
7605 /* Make sure the operands are acceptable to the predicates. */
7607 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7608 result = gen_reg_rtx (insn_mode);
7610 src_rtx = memory_address (BLKmode,
7611 expand_expr (src, NULL_RTX, Pmode,
7612 EXPAND_NORMAL));
7613 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7614 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7616 char_rtx = const0_rtx;
7617 char_mode = insn_operand_mode[(int)icode][2];
7618 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7619 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7621 emit_insn (GEN_FCN (icode) (result,
7622 gen_rtx (MEM, BLKmode, src_rtx),
7623 char_rtx, GEN_INT (align)));
7625 /* Return the value in the proper mode for this function. */
7626 if (GET_MODE (result) == value_mode)
7627 return result;
7628 else if (target != 0)
7630 convert_move (target, result, 0);
7631 return target;
7633 else
7634 return convert_to_mode (value_mode, result, 0);
7637 case BUILT_IN_STRCPY:
7638 /* If not optimizing, call the library function. */
7639 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7640 break;
7642 if (arglist == 0
7643 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7644 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7645 || TREE_CHAIN (arglist) == 0
7646 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7647 break;
7648 else
7650 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7652 if (len == 0)
7653 break;
7655 len = size_binop (PLUS_EXPR, len, integer_one_node);
7657 chainon (arglist, build_tree_list (NULL_TREE, len));
7660 /* Drops in. */
7661 case BUILT_IN_MEMCPY:
7662 /* If not optimizing, call the library function. */
7663 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7664 break;
7666 if (arglist == 0
7667 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7668 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7669 || TREE_CHAIN (arglist) == 0
7670 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7671 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7672 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7673 break;
7674 else
7676 tree dest = TREE_VALUE (arglist);
7677 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7678 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7680 int src_align
7681 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7682 int dest_align
7683 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7684 rtx dest_rtx, dest_mem, src_mem;
7686 /* If either SRC or DEST is not a pointer type, don't do
7687 this operation in-line. */
7688 if (src_align == 0 || dest_align == 0)
7690 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7691 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7692 break;
7695 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7696 dest_mem = gen_rtx (MEM, BLKmode,
7697 memory_address (BLKmode, dest_rtx));
7698 src_mem = gen_rtx (MEM, BLKmode,
7699 memory_address (BLKmode,
7700 expand_expr (src, NULL_RTX,
7701 Pmode,
7702 EXPAND_NORMAL)));
7704 /* Copy word part most expediently. */
7705 emit_block_move (dest_mem, src_mem,
7706 expand_expr (len, NULL_RTX, VOIDmode, 0),
7707 MIN (src_align, dest_align));
7708 return dest_rtx;
7711 /* These comparison functions need an instruction that returns an actual
7712 index. An ordinary compare that just sets the condition codes
7713 is not enough. */
7714 #ifdef HAVE_cmpstrsi
7715 case BUILT_IN_STRCMP:
7716 /* If not optimizing, call the library function. */
7717 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7718 break;
7720 if (arglist == 0
7721 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7722 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7723 || TREE_CHAIN (arglist) == 0
7724 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7725 break;
7726 else if (!HAVE_cmpstrsi)
7727 break;
7729 tree arg1 = TREE_VALUE (arglist);
7730 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7731 tree offset;
7732 tree len, len2;
7734 len = c_strlen (arg1);
7735 if (len)
7736 len = size_binop (PLUS_EXPR, integer_one_node, len);
7737 len2 = c_strlen (arg2);
7738 if (len2)
7739 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7741 /* If we don't have a constant length for the first, use the length
7742 of the second, if we know it. We don't require a constant for
7743 this case; some cost analysis could be done if both are available
7744 but neither is constant. For now, assume they're equally cheap.
7746 If both strings have constant lengths, use the smaller. This
7747 could arise if optimization results in strcpy being called with
7748 two fixed strings, or if the code was machine-generated. We should
7749 add some code to the `memcmp' handler below to deal with such
7750 situations, someday. */
7751 if (!len || TREE_CODE (len) != INTEGER_CST)
7753 if (len2)
7754 len = len2;
7755 else if (len == 0)
7756 break;
7758 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7760 if (tree_int_cst_lt (len2, len))
7761 len = len2;
7764 chainon (arglist, build_tree_list (NULL_TREE, len));
7767 /* Drops in. */
7768 case BUILT_IN_MEMCMP:
7769 /* If not optimizing, call the library function. */
7770 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7771 break;
7773 if (arglist == 0
7774 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7775 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7776 || TREE_CHAIN (arglist) == 0
7777 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7778 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7779 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7780 break;
7781 else if (!HAVE_cmpstrsi)
7782 break;
7784 tree arg1 = TREE_VALUE (arglist);
7785 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7786 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7787 rtx result;
7789 int arg1_align
7790 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7791 int arg2_align
7792 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7793 enum machine_mode insn_mode
7794 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7796 /* If we don't have POINTER_TYPE, call the function. */
7797 if (arg1_align == 0 || arg2_align == 0)
7799 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7800 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7801 break;
7804 /* Make a place to write the result of the instruction. */
7805 result = target;
7806 if (! (result != 0
7807 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7808 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7809 result = gen_reg_rtx (insn_mode);
7811 emit_insn (gen_cmpstrsi (result,
7812 gen_rtx (MEM, BLKmode,
7813 expand_expr (arg1, NULL_RTX, Pmode,
7814 EXPAND_NORMAL)),
7815 gen_rtx (MEM, BLKmode,
7816 expand_expr (arg2, NULL_RTX, Pmode,
7817 EXPAND_NORMAL)),
7818 expand_expr (len, NULL_RTX, VOIDmode, 0),
7819 GEN_INT (MIN (arg1_align, arg2_align))));
7821 /* Return the value in the proper mode for this function. */
7822 mode = TYPE_MODE (TREE_TYPE (exp));
7823 if (GET_MODE (result) == mode)
7824 return result;
7825 else if (target != 0)
7827 convert_move (target, result, 0);
7828 return target;
7830 else
7831 return convert_to_mode (mode, result, 0);
7833 #else
7834 case BUILT_IN_STRCMP:
7835 case BUILT_IN_MEMCMP:
7836 break;
7837 #endif
7839 default: /* just do library call, if unknown builtin */
7840 error ("built-in function `%s' not currently supported",
7841 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7844 /* The switch statement above can drop through to cause the function
7845 to be called normally. */
7847 return expand_call (exp, target, ignore);
7850 /* Built-in functions to perform an untyped call and return. */
7852 /* For each register that may be used for calling a function, this
7853 gives a mode used to copy the register's value. VOIDmode indicates
7854 the register is not used for calling a function. If the machine
7855 has register windows, this gives only the outbound registers.
7856 INCOMING_REGNO gives the corresponding inbound register. */
7857 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7859 /* For each register that may be used for returning values, this gives
7860 a mode used to copy the register's value. VOIDmode indicates the
7861 register is not used for returning values. If the machine has
7862 register windows, this gives only the outbound registers.
7863 INCOMING_REGNO gives the corresponding inbound register. */
7864 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7866 /* For each register that may be used for calling a function, this
7867 gives the offset of that register into the block returned by
7868 __bultin_apply_args. 0 indicates that the register is not
7869 used for calling a function. */
7870 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7872 /* Return the offset of register REGNO into the block returned by
7873 __builtin_apply_args. This is not declared static, since it is
7874 needed in objc-act.c. */
7876 int
7877 apply_args_register_offset (regno)
7878 int regno;
7880 apply_args_size ();
7882 /* Arguments are always put in outgoing registers (in the argument
7883 block) if such make sense. */
7884 #ifdef OUTGOING_REGNO
7885 regno = OUTGOING_REGNO(regno);
7886 #endif
7887 return apply_args_reg_offset[regno];
7890 /* Return the size required for the block returned by __builtin_apply_args,
7891 and initialize apply_args_mode. */
7893 static int
7894 apply_args_size ()
7896 static int size = -1;
7897 int align, regno;
7898 enum machine_mode mode;
7900 /* The values computed by this function never change. */
7901 if (size < 0)
7903 /* The first value is the incoming arg-pointer. */
7904 size = GET_MODE_SIZE (Pmode);
7906 /* The second value is the structure value address unless this is
7907 passed as an "invisible" first argument. */
7908 if (struct_value_rtx)
7909 size += GET_MODE_SIZE (Pmode);
7911 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7912 if (FUNCTION_ARG_REGNO_P (regno))
7914 /* Search for the proper mode for copying this register's
7915 value. I'm not sure this is right, but it works so far. */
7916 enum machine_mode best_mode = VOIDmode;
7918 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7919 mode != VOIDmode;
7920 mode = GET_MODE_WIDER_MODE (mode))
7921 if (HARD_REGNO_MODE_OK (regno, mode)
7922 && HARD_REGNO_NREGS (regno, mode) == 1)
7923 best_mode = mode;
7925 if (best_mode == VOIDmode)
7926 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7927 mode != VOIDmode;
7928 mode = GET_MODE_WIDER_MODE (mode))
7929 if (HARD_REGNO_MODE_OK (regno, mode)
7930 && (mov_optab->handlers[(int) mode].insn_code
7931 != CODE_FOR_nothing))
7932 best_mode = mode;
7934 mode = best_mode;
7935 if (mode == VOIDmode)
7936 abort ();
7938 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7939 if (size % align != 0)
7940 size = CEIL (size, align) * align;
7941 apply_args_reg_offset[regno] = size;
7942 size += GET_MODE_SIZE (mode);
7943 apply_args_mode[regno] = mode;
7945 else
7947 apply_args_mode[regno] = VOIDmode;
7948 apply_args_reg_offset[regno] = 0;
7951 return size;
7954 /* Return the size required for the block returned by __builtin_apply,
7955 and initialize apply_result_mode. */
7957 static int
7958 apply_result_size ()
7960 static int size = -1;
7961 int align, regno;
7962 enum machine_mode mode;
7964 /* The values computed by this function never change. */
7965 if (size < 0)
7967 size = 0;
7969 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7970 if (FUNCTION_VALUE_REGNO_P (regno))
7972 /* Search for the proper mode for copying this register's
7973 value. I'm not sure this is right, but it works so far. */
7974 enum machine_mode best_mode = VOIDmode;
7976 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7977 mode != TImode;
7978 mode = GET_MODE_WIDER_MODE (mode))
7979 if (HARD_REGNO_MODE_OK (regno, mode))
7980 best_mode = mode;
7982 if (best_mode == VOIDmode)
7983 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7984 mode != VOIDmode;
7985 mode = GET_MODE_WIDER_MODE (mode))
7986 if (HARD_REGNO_MODE_OK (regno, mode)
7987 && (mov_optab->handlers[(int) mode].insn_code
7988 != CODE_FOR_nothing))
7989 best_mode = mode;
7991 mode = best_mode;
7992 if (mode == VOIDmode)
7993 abort ();
7995 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7996 if (size % align != 0)
7997 size = CEIL (size, align) * align;
7998 size += GET_MODE_SIZE (mode);
7999 apply_result_mode[regno] = mode;
8001 else
8002 apply_result_mode[regno] = VOIDmode;
8004 /* Allow targets that use untyped_call and untyped_return to override
8005 the size so that machine-specific information can be stored here. */
8006 #ifdef APPLY_RESULT_SIZE
8007 size = APPLY_RESULT_SIZE;
8008 #endif
8010 return size;
8013 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8014 /* Create a vector describing the result block RESULT. If SAVEP is true,
8015 the result block is used to save the values; otherwise it is used to
8016 restore the values. */
8018 static rtx
8019 result_vector (savep, result)
8020 int savep;
8021 rtx result;
8023 int regno, size, align, nelts;
8024 enum machine_mode mode;
8025 rtx reg, mem;
8026 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8028 size = nelts = 0;
8029 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8030 if ((mode = apply_result_mode[regno]) != VOIDmode)
8032 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8033 if (size % align != 0)
8034 size = CEIL (size, align) * align;
8035 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8036 mem = change_address (result, mode,
8037 plus_constant (XEXP (result, 0), size));
8038 savevec[nelts++] = (savep
8039 ? gen_rtx (SET, VOIDmode, mem, reg)
8040 : gen_rtx (SET, VOIDmode, reg, mem));
8041 size += GET_MODE_SIZE (mode);
8043 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8045 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8047 /* Save the state required to perform an untyped call with the same
8048 arguments as were passed to the current function. */
8050 static rtx
8051 expand_builtin_apply_args ()
8053 rtx registers;
8054 int size, align, regno;
8055 enum machine_mode mode;
8057 /* Create a block where the arg-pointer, structure value address,
8058 and argument registers can be saved. */
8059 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8061 /* Walk past the arg-pointer and structure value address. */
8062 size = GET_MODE_SIZE (Pmode);
8063 if (struct_value_rtx)
8064 size += GET_MODE_SIZE (Pmode);
8066 /* Save each register used in calling a function to the block. */
8067 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8068 if ((mode = apply_args_mode[regno]) != VOIDmode)
8070 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8071 if (size % align != 0)
8072 size = CEIL (size, align) * align;
8073 emit_move_insn (change_address (registers, mode,
8074 plus_constant (XEXP (registers, 0),
8075 size)),
8076 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
8077 size += GET_MODE_SIZE (mode);
8080 /* Save the arg pointer to the block. */
8081 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8082 copy_to_reg (virtual_incoming_args_rtx));
8083 size = GET_MODE_SIZE (Pmode);
8085 /* Save the structure value address unless this is passed as an
8086 "invisible" first argument. */
8087 if (struct_value_incoming_rtx)
8089 emit_move_insn (change_address (registers, Pmode,
8090 plus_constant (XEXP (registers, 0),
8091 size)),
8092 copy_to_reg (struct_value_incoming_rtx));
8093 size += GET_MODE_SIZE (Pmode);
8096 /* Return the address of the block. */
8097 return copy_addr_to_reg (XEXP (registers, 0));
8100 /* Perform an untyped call and save the state required to perform an
8101 untyped return of whatever value was returned by the given function. */
8103 static rtx
8104 expand_builtin_apply (function, arguments, argsize)
8105 rtx function, arguments, argsize;
8107 int size, align, regno;
8108 enum machine_mode mode;
8109 rtx incoming_args, result, reg, dest, call_insn;
8110 rtx old_stack_level = 0;
8111 rtx call_fusage = 0;
8113 /* Create a block where the return registers can be saved. */
8114 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8116 /* ??? The argsize value should be adjusted here. */
8118 /* Fetch the arg pointer from the ARGUMENTS block. */
8119 incoming_args = gen_reg_rtx (Pmode);
8120 emit_move_insn (incoming_args,
8121 gen_rtx (MEM, Pmode, arguments));
8122 #ifndef STACK_GROWS_DOWNWARD
8123 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8124 incoming_args, 0, OPTAB_LIB_WIDEN);
8125 #endif
8127 /* Perform postincrements before actually calling the function. */
8128 emit_queue ();
8130 /* Push a new argument block and copy the arguments. */
8131 do_pending_stack_adjust ();
8132 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8134 /* Push a block of memory onto the stack to store the memory arguments.
8135 Save the address in a register, and copy the memory arguments. ??? I
8136 haven't figured out how the calling convention macros effect this,
8137 but it's likely that the source and/or destination addresses in
8138 the block copy will need updating in machine specific ways. */
8139 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8140 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8141 gen_rtx (MEM, BLKmode, incoming_args),
8142 argsize,
8143 PARM_BOUNDARY / BITS_PER_UNIT);
8145 /* Refer to the argument block. */
8146 apply_args_size ();
8147 arguments = gen_rtx (MEM, BLKmode, arguments);
8149 /* Walk past the arg-pointer and structure value address. */
8150 size = GET_MODE_SIZE (Pmode);
8151 if (struct_value_rtx)
8152 size += GET_MODE_SIZE (Pmode);
8154 /* Restore each of the registers previously saved. Make USE insns
8155 for each of these registers for use in making the call. */
8156 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8157 if ((mode = apply_args_mode[regno]) != VOIDmode)
8159 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8160 if (size % align != 0)
8161 size = CEIL (size, align) * align;
8162 reg = gen_rtx (REG, mode, regno);
8163 emit_move_insn (reg,
8164 change_address (arguments, mode,
8165 plus_constant (XEXP (arguments, 0),
8166 size)));
8168 use_reg (&call_fusage, reg);
8169 size += GET_MODE_SIZE (mode);
8172 /* Restore the structure value address unless this is passed as an
8173 "invisible" first argument. */
8174 size = GET_MODE_SIZE (Pmode);
8175 if (struct_value_rtx)
8177 rtx value = gen_reg_rtx (Pmode);
8178 emit_move_insn (value,
8179 change_address (arguments, Pmode,
8180 plus_constant (XEXP (arguments, 0),
8181 size)));
8182 emit_move_insn (struct_value_rtx, value);
8183 if (GET_CODE (struct_value_rtx) == REG)
8184 use_reg (&call_fusage, struct_value_rtx);
8185 size += GET_MODE_SIZE (Pmode);
8188 /* All arguments and registers used for the call are set up by now! */
8189 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
8191 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
8192 and we don't want to load it into a register as an optimization,
8193 because prepare_call_address already did it if it should be done. */
8194 if (GET_CODE (function) != SYMBOL_REF)
8195 function = memory_address (FUNCTION_MODE, function);
8197 /* Generate the actual call instruction and save the return value. */
8198 #ifdef HAVE_untyped_call
8199 if (HAVE_untyped_call)
8200 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8201 result, result_vector (1, result)));
8202 else
8203 #endif
8204 #ifdef HAVE_call_value
8205 if (HAVE_call_value)
8207 rtx valreg = 0;
8209 /* Locate the unique return register. It is not possible to
8210 express a call that sets more than one return register using
8211 call_value; use untyped_call for that. In fact, untyped_call
8212 only needs to save the return registers in the given block. */
8213 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8214 if ((mode = apply_result_mode[regno]) != VOIDmode)
8216 if (valreg)
8217 abort (); /* HAVE_untyped_call required. */
8218 valreg = gen_rtx (REG, mode, regno);
8221 emit_call_insn (gen_call_value (valreg,
8222 gen_rtx (MEM, FUNCTION_MODE, function),
8223 const0_rtx, NULL_RTX, const0_rtx));
8225 emit_move_insn (change_address (result, GET_MODE (valreg),
8226 XEXP (result, 0)),
8227 valreg);
8229 else
8230 #endif
8231 abort ();
8233 /* Find the CALL insn we just emitted. */
8234 for (call_insn = get_last_insn ();
8235 call_insn && GET_CODE (call_insn) != CALL_INSN;
8236 call_insn = PREV_INSN (call_insn))
8239 if (! call_insn)
8240 abort ();
8242 /* Put the register usage information on the CALL. If there is already
8243 some usage information, put ours at the end. */
8244 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8246 rtx link;
8248 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8249 link = XEXP (link, 1))
8252 XEXP (link, 1) = call_fusage;
8254 else
8255 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8257 /* Restore the stack. */
8258 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8260 /* Return the address of the result block. */
8261 return copy_addr_to_reg (XEXP (result, 0));
8264 /* Perform an untyped return. */
8266 static void
8267 expand_builtin_return (result)
8268 rtx result;
8270 int size, align, regno;
8271 enum machine_mode mode;
8272 rtx reg;
8273 rtx call_fusage = 0;
8275 apply_result_size ();
8276 result = gen_rtx (MEM, BLKmode, result);
8278 #ifdef HAVE_untyped_return
8279 if (HAVE_untyped_return)
8281 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8282 emit_barrier ();
8283 return;
8285 #endif
8287 /* Restore the return value and note that each value is used. */
8288 size = 0;
8289 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8290 if ((mode = apply_result_mode[regno]) != VOIDmode)
8292 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8293 if (size % align != 0)
8294 size = CEIL (size, align) * align;
8295 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8296 emit_move_insn (reg,
8297 change_address (result, mode,
8298 plus_constant (XEXP (result, 0),
8299 size)));
8301 push_to_sequence (call_fusage);
8302 emit_insn (gen_rtx (USE, VOIDmode, reg));
8303 call_fusage = get_insns ();
8304 end_sequence ();
8305 size += GET_MODE_SIZE (mode);
8308 /* Put the USE insns before the return. */
8309 emit_insns (call_fusage);
8311 /* Return whatever values was restored by jumping directly to the end
8312 of the function. */
8313 expand_null_return ();
8316 /* Expand code for a post- or pre- increment or decrement
8317 and return the RTX for the result.
8318 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8320 static rtx
8321 expand_increment (exp, post)
8322 register tree exp;
8323 int post;
8325 register rtx op0, op1;
8326 register rtx temp, value;
8327 register tree incremented = TREE_OPERAND (exp, 0);
8328 optab this_optab = add_optab;
8329 int icode;
8330 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8331 int op0_is_copy = 0;
8332 int single_insn = 0;
8333 /* 1 means we can't store into OP0 directly,
8334 because it is a subreg narrower than a word,
8335 and we don't dare clobber the rest of the word. */
8336 int bad_subreg = 0;
8338 if (output_bytecode)
8340 bc_expand_expr (exp);
8341 return NULL_RTX;
8344 /* Stabilize any component ref that might need to be
8345 evaluated more than once below. */
8346 if (!post
8347 || TREE_CODE (incremented) == BIT_FIELD_REF
8348 || (TREE_CODE (incremented) == COMPONENT_REF
8349 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8350 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8351 incremented = stabilize_reference (incremented);
8352 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8353 ones into save exprs so that they don't accidentally get evaluated
8354 more than once by the code below. */
8355 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8356 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8357 incremented = save_expr (incremented);
8359 /* Compute the operands as RTX.
8360 Note whether OP0 is the actual lvalue or a copy of it:
8361 I believe it is a copy iff it is a register or subreg
8362 and insns were generated in computing it. */
8364 temp = get_last_insn ();
8365 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8367 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8368 in place but intead must do sign- or zero-extension during assignment,
8369 so we copy it into a new register and let the code below use it as
8370 a copy.
8372 Note that we can safely modify this SUBREG since it is know not to be
8373 shared (it was made by the expand_expr call above). */
8375 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8377 if (post)
8378 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8379 else
8380 bad_subreg = 1;
8382 else if (GET_CODE (op0) == SUBREG
8383 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8385 /* We cannot increment this SUBREG in place. If we are
8386 post-incrementing, get a copy of the old value. Otherwise,
8387 just mark that we cannot increment in place. */
8388 if (post)
8389 op0 = copy_to_reg (op0);
8390 else
8391 bad_subreg = 1;
8394 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8395 && temp != get_last_insn ());
8396 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8398 /* Decide whether incrementing or decrementing. */
8399 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8400 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8401 this_optab = sub_optab;
8403 /* Convert decrement by a constant into a negative increment. */
8404 if (this_optab == sub_optab
8405 && GET_CODE (op1) == CONST_INT)
8407 op1 = GEN_INT (- INTVAL (op1));
8408 this_optab = add_optab;
8411 /* For a preincrement, see if we can do this with a single instruction. */
8412 if (!post)
8414 icode = (int) this_optab->handlers[(int) mode].insn_code;
8415 if (icode != (int) CODE_FOR_nothing
8416 /* Make sure that OP0 is valid for operands 0 and 1
8417 of the insn we want to queue. */
8418 && (*insn_operand_predicate[icode][0]) (op0, mode)
8419 && (*insn_operand_predicate[icode][1]) (op0, mode)
8420 && (*insn_operand_predicate[icode][2]) (op1, mode))
8421 single_insn = 1;
8424 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8425 then we cannot just increment OP0. We must therefore contrive to
8426 increment the original value. Then, for postincrement, we can return
8427 OP0 since it is a copy of the old value. For preincrement, expand here
8428 unless we can do it with a single insn.
8430 Likewise if storing directly into OP0 would clobber high bits
8431 we need to preserve (bad_subreg). */
8432 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8434 /* This is the easiest way to increment the value wherever it is.
8435 Problems with multiple evaluation of INCREMENTED are prevented
8436 because either (1) it is a component_ref or preincrement,
8437 in which case it was stabilized above, or (2) it is an array_ref
8438 with constant index in an array in a register, which is
8439 safe to reevaluate. */
8440 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8441 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8442 ? MINUS_EXPR : PLUS_EXPR),
8443 TREE_TYPE (exp),
8444 incremented,
8445 TREE_OPERAND (exp, 1));
8446 temp = expand_assignment (incremented, newexp, ! post, 0);
8447 return post ? op0 : temp;
8450 if (post)
8452 /* We have a true reference to the value in OP0.
8453 If there is an insn to add or subtract in this mode, queue it.
8454 Queueing the increment insn avoids the register shuffling
8455 that often results if we must increment now and first save
8456 the old value for subsequent use. */
8458 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8459 op0 = stabilize (op0);
8460 #endif
8462 icode = (int) this_optab->handlers[(int) mode].insn_code;
8463 if (icode != (int) CODE_FOR_nothing
8464 /* Make sure that OP0 is valid for operands 0 and 1
8465 of the insn we want to queue. */
8466 && (*insn_operand_predicate[icode][0]) (op0, mode)
8467 && (*insn_operand_predicate[icode][1]) (op0, mode))
8469 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8470 op1 = force_reg (mode, op1);
8472 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8476 /* Preincrement, or we can't increment with one simple insn. */
8477 if (post)
8478 /* Save a copy of the value before inc or dec, to return it later. */
8479 temp = value = copy_to_reg (op0);
8480 else
8481 /* Arrange to return the incremented value. */
8482 /* Copy the rtx because expand_binop will protect from the queue,
8483 and the results of that would be invalid for us to return
8484 if our caller does emit_queue before using our result. */
8485 temp = copy_rtx (value = op0);
8487 /* Increment however we can. */
8488 op1 = expand_binop (mode, this_optab, value, op1, op0,
8489 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8490 /* Make sure the value is stored into OP0. */
8491 if (op1 != op0)
8492 emit_move_insn (op0, op1);
8494 return temp;
8497 /* Expand all function calls contained within EXP, innermost ones first.
8498 But don't look within expressions that have sequence points.
8499 For each CALL_EXPR, record the rtx for its value
8500 in the CALL_EXPR_RTL field. */
8502 static void
8503 preexpand_calls (exp)
8504 tree exp;
8506 register int nops, i;
8507 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8509 if (! do_preexpand_calls)
8510 return;
8512 /* Only expressions and references can contain calls. */
8514 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8515 return;
8517 switch (TREE_CODE (exp))
8519 case CALL_EXPR:
8520 /* Do nothing if already expanded. */
8521 if (CALL_EXPR_RTL (exp) != 0)
8522 return;
8524 /* Do nothing to built-in functions. */
8525 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8526 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8527 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8528 /* Do nothing if the call returns a variable-sized object. */
8529 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
8530 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8531 return;
8533 case COMPOUND_EXPR:
8534 case COND_EXPR:
8535 case TRUTH_ANDIF_EXPR:
8536 case TRUTH_ORIF_EXPR:
8537 /* If we find one of these, then we can be sure
8538 the adjust will be done for it (since it makes jumps).
8539 Do it now, so that if this is inside an argument
8540 of a function, we don't get the stack adjustment
8541 after some other args have already been pushed. */
8542 do_pending_stack_adjust ();
8543 return;
8545 case BLOCK:
8546 case RTL_EXPR:
8547 case WITH_CLEANUP_EXPR:
8548 return;
8550 case SAVE_EXPR:
8551 if (SAVE_EXPR_RTL (exp) != 0)
8552 return;
8555 nops = tree_code_length[(int) TREE_CODE (exp)];
8556 for (i = 0; i < nops; i++)
8557 if (TREE_OPERAND (exp, i) != 0)
8559 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8560 if (type == 'e' || type == '<' || type == '1' || type == '2'
8561 || type == 'r')
8562 preexpand_calls (TREE_OPERAND (exp, i));
8566 /* At the start of a function, record that we have no previously-pushed
8567 arguments waiting to be popped. */
8569 void
8570 init_pending_stack_adjust ()
8572 pending_stack_adjust = 0;
8575 /* When exiting from function, if safe, clear out any pending stack adjust
8576 so the adjustment won't get done. */
8578 void
8579 clear_pending_stack_adjust ()
8581 #ifdef EXIT_IGNORE_STACK
8582 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8583 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8584 && ! flag_inline_functions)
8585 pending_stack_adjust = 0;
8586 #endif
8589 /* Pop any previously-pushed arguments that have not been popped yet. */
8591 void
8592 do_pending_stack_adjust ()
8594 if (inhibit_defer_pop == 0)
8596 if (pending_stack_adjust != 0)
8597 adjust_stack (GEN_INT (pending_stack_adjust));
8598 pending_stack_adjust = 0;
8602 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8603 Returns the cleanups to be performed. */
8605 static tree
8606 defer_cleanups_to (old_cleanups)
8607 tree old_cleanups;
8609 tree new_cleanups = NULL_TREE;
8610 tree cleanups = cleanups_this_call;
8611 tree last = NULL_TREE;
8613 while (cleanups_this_call != old_cleanups)
8615 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8616 last = cleanups_this_call;
8617 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8620 if (last)
8622 /* Remove the list from the chain of cleanups. */
8623 TREE_CHAIN (last) = NULL_TREE;
8625 /* reverse them so that we can build them in the right order. */
8626 cleanups = nreverse (cleanups);
8628 while (cleanups)
8630 if (new_cleanups)
8631 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8632 TREE_VALUE (cleanups), new_cleanups);
8633 else
8634 new_cleanups = TREE_VALUE (cleanups);
8636 cleanups = TREE_CHAIN (cleanups);
8640 return new_cleanups;
8643 /* Expand all cleanups up to OLD_CLEANUPS.
8644 Needed here, and also for language-dependent calls. */
8646 void
8647 expand_cleanups_to (old_cleanups)
8648 tree old_cleanups;
8650 while (cleanups_this_call != old_cleanups)
8652 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8653 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8654 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8658 /* Expand conditional expressions. */
8660 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8661 LABEL is an rtx of code CODE_LABEL, in this function and all the
8662 functions here. */
8664 void
8665 jumpifnot (exp, label)
8666 tree exp;
8667 rtx label;
8669 do_jump (exp, label, NULL_RTX);
8672 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8674 void
8675 jumpif (exp, label)
8676 tree exp;
8677 rtx label;
8679 do_jump (exp, NULL_RTX, label);
8682 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8683 the result is zero, or IF_TRUE_LABEL if the result is one.
8684 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8685 meaning fall through in that case.
8687 do_jump always does any pending stack adjust except when it does not
8688 actually perform a jump. An example where there is no jump
8689 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8691 This function is responsible for optimizing cases such as
8692 &&, || and comparison operators in EXP. */
8694 void
8695 do_jump (exp, if_false_label, if_true_label)
8696 tree exp;
8697 rtx if_false_label, if_true_label;
8699 register enum tree_code code = TREE_CODE (exp);
8700 /* Some cases need to create a label to jump to
8701 in order to properly fall through.
8702 These cases set DROP_THROUGH_LABEL nonzero. */
8703 rtx drop_through_label = 0;
8704 rtx temp;
8705 rtx comparison = 0;
8706 int i;
8707 tree type;
8708 enum machine_mode mode;
8710 emit_queue ();
8712 switch (code)
8714 case ERROR_MARK:
8715 break;
8717 case INTEGER_CST:
8718 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8719 if (temp)
8720 emit_jump (temp);
8721 break;
8723 #if 0
8724 /* This is not true with #pragma weak */
8725 case ADDR_EXPR:
8726 /* The address of something can never be zero. */
8727 if (if_true_label)
8728 emit_jump (if_true_label);
8729 break;
8730 #endif
8732 case NOP_EXPR:
8733 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8734 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8735 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8736 goto normal;
8737 case CONVERT_EXPR:
8738 /* If we are narrowing the operand, we have to do the compare in the
8739 narrower mode. */
8740 if ((TYPE_PRECISION (TREE_TYPE (exp))
8741 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8742 goto normal;
8743 case NON_LVALUE_EXPR:
8744 case REFERENCE_EXPR:
8745 case ABS_EXPR:
8746 case NEGATE_EXPR:
8747 case LROTATE_EXPR:
8748 case RROTATE_EXPR:
8749 /* These cannot change zero->non-zero or vice versa. */
8750 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8751 break;
8753 #if 0
8754 /* This is never less insns than evaluating the PLUS_EXPR followed by
8755 a test and can be longer if the test is eliminated. */
8756 case PLUS_EXPR:
8757 /* Reduce to minus. */
8758 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8759 TREE_OPERAND (exp, 0),
8760 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8761 TREE_OPERAND (exp, 1))));
8762 /* Process as MINUS. */
8763 #endif
8765 case MINUS_EXPR:
8766 /* Non-zero iff operands of minus differ. */
8767 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8768 TREE_OPERAND (exp, 0),
8769 TREE_OPERAND (exp, 1)),
8770 NE, NE);
8771 break;
8773 case BIT_AND_EXPR:
8774 /* If we are AND'ing with a small constant, do this comparison in the
8775 smallest type that fits. If the machine doesn't have comparisons
8776 that small, it will be converted back to the wider comparison.
8777 This helps if we are testing the sign bit of a narrower object.
8778 combine can't do this for us because it can't know whether a
8779 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8781 if (! SLOW_BYTE_ACCESS
8782 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8783 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8784 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8785 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8786 && (type = type_for_mode (mode, 1)) != 0
8787 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8788 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8789 != CODE_FOR_nothing))
8791 do_jump (convert (type, exp), if_false_label, if_true_label);
8792 break;
8794 goto normal;
8796 case TRUTH_NOT_EXPR:
8797 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8798 break;
8800 case TRUTH_ANDIF_EXPR:
8802 rtx seq1, seq2;
8803 tree cleanups, old_cleanups;
8805 if (if_false_label == 0)
8806 if_false_label = drop_through_label = gen_label_rtx ();
8807 start_sequence ();
8808 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8809 seq1 = get_insns ();
8810 end_sequence ();
8812 old_cleanups = cleanups_this_call;
8813 start_sequence ();
8814 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8815 seq2 = get_insns ();
8816 end_sequence ();
8818 cleanups = defer_cleanups_to (old_cleanups);
8819 if (cleanups)
8821 rtx flag = gen_reg_rtx (word_mode);
8822 tree new_cleanups;
8823 tree cond;
8825 /* Flag cleanups as not needed. */
8826 emit_move_insn (flag, const0_rtx);
8827 emit_insns (seq1);
8829 /* Flag cleanups as needed. */
8830 emit_move_insn (flag, const1_rtx);
8831 emit_insns (seq2);
8833 /* convert flag, which is an rtx, into a tree. */
8834 cond = make_node (RTL_EXPR);
8835 TREE_TYPE (cond) = integer_type_node;
8836 RTL_EXPR_RTL (cond) = flag;
8837 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8839 new_cleanups = build (COND_EXPR, void_type_node,
8840 truthvalue_conversion (cond),
8841 cleanups, integer_zero_node);
8842 new_cleanups = fold (new_cleanups);
8844 /* Now add in the conditionalized cleanups. */
8845 cleanups_this_call
8846 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8847 (*interim_eh_hook) (NULL_TREE);
8849 else
8851 emit_insns (seq1);
8852 emit_insns (seq2);
8855 break;
8857 case TRUTH_ORIF_EXPR:
8859 rtx seq1, seq2;
8860 tree cleanups, old_cleanups;
8862 if (if_true_label == 0)
8863 if_true_label = drop_through_label = gen_label_rtx ();
8864 start_sequence ();
8865 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8866 seq1 = get_insns ();
8867 end_sequence ();
8869 old_cleanups = cleanups_this_call;
8870 start_sequence ();
8871 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8872 seq2 = get_insns ();
8873 end_sequence ();
8875 cleanups = defer_cleanups_to (old_cleanups);
8876 if (cleanups)
8878 rtx flag = gen_reg_rtx (word_mode);
8879 tree new_cleanups;
8880 tree cond;
8882 /* Flag cleanups as not needed. */
8883 emit_move_insn (flag, const0_rtx);
8884 emit_insns (seq1);
8886 /* Flag cleanups as needed. */
8887 emit_move_insn (flag, const1_rtx);
8888 emit_insns (seq2);
8890 /* convert flag, which is an rtx, into a tree. */
8891 cond = make_node (RTL_EXPR);
8892 TREE_TYPE (cond) = integer_type_node;
8893 RTL_EXPR_RTL (cond) = flag;
8894 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8896 new_cleanups = build (COND_EXPR, void_type_node,
8897 truthvalue_conversion (cond),
8898 cleanups, integer_zero_node);
8899 new_cleanups = fold (new_cleanups);
8901 /* Now add in the conditionalized cleanups. */
8902 cleanups_this_call
8903 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8904 (*interim_eh_hook) (NULL_TREE);
8906 else
8908 emit_insns (seq1);
8909 emit_insns (seq2);
8912 break;
8914 case COMPOUND_EXPR:
8915 push_temp_slots ();
8916 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8917 free_temp_slots ();
8918 pop_temp_slots ();
8919 emit_queue ();
8920 do_pending_stack_adjust ();
8921 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8922 break;
8924 case COMPONENT_REF:
8925 case BIT_FIELD_REF:
8926 case ARRAY_REF:
8928 int bitsize, bitpos, unsignedp;
8929 enum machine_mode mode;
8930 tree type;
8931 tree offset;
8932 int volatilep = 0;
8934 /* Get description of this reference. We don't actually care
8935 about the underlying object here. */
8936 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8937 &mode, &unsignedp, &volatilep);
8939 type = type_for_size (bitsize, unsignedp);
8940 if (! SLOW_BYTE_ACCESS
8941 && type != 0 && bitsize >= 0
8942 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8943 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8944 != CODE_FOR_nothing))
8946 do_jump (convert (type, exp), if_false_label, if_true_label);
8947 break;
8949 goto normal;
8952 case COND_EXPR:
8953 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8954 if (integer_onep (TREE_OPERAND (exp, 1))
8955 && integer_zerop (TREE_OPERAND (exp, 2)))
8956 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8958 else if (integer_zerop (TREE_OPERAND (exp, 1))
8959 && integer_onep (TREE_OPERAND (exp, 2)))
8960 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8962 else
8964 register rtx label1 = gen_label_rtx ();
8965 drop_through_label = gen_label_rtx ();
8966 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8967 /* Now the THEN-expression. */
8968 do_jump (TREE_OPERAND (exp, 1),
8969 if_false_label ? if_false_label : drop_through_label,
8970 if_true_label ? if_true_label : drop_through_label);
8971 /* In case the do_jump just above never jumps. */
8972 do_pending_stack_adjust ();
8973 emit_label (label1);
8974 /* Now the ELSE-expression. */
8975 do_jump (TREE_OPERAND (exp, 2),
8976 if_false_label ? if_false_label : drop_through_label,
8977 if_true_label ? if_true_label : drop_through_label);
8979 break;
8981 case EQ_EXPR:
8982 if (integer_zerop (TREE_OPERAND (exp, 1)))
8983 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8984 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8985 == MODE_INT)
8987 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8988 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8989 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8990 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8991 else
8992 comparison = compare (exp, EQ, EQ);
8993 break;
8995 case NE_EXPR:
8996 if (integer_zerop (TREE_OPERAND (exp, 1)))
8997 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8998 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8999 == MODE_INT)
9001 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9002 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
9003 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
9004 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9005 else
9006 comparison = compare (exp, NE, NE);
9007 break;
9009 case LT_EXPR:
9010 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9011 == MODE_INT)
9012 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9013 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9014 else
9015 comparison = compare (exp, LT, LTU);
9016 break;
9018 case LE_EXPR:
9019 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9020 == MODE_INT)
9021 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9022 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9023 else
9024 comparison = compare (exp, LE, LEU);
9025 break;
9027 case GT_EXPR:
9028 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9029 == MODE_INT)
9030 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9031 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9032 else
9033 comparison = compare (exp, GT, GTU);
9034 break;
9036 case GE_EXPR:
9037 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9038 == MODE_INT)
9039 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9040 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9041 else
9042 comparison = compare (exp, GE, GEU);
9043 break;
9045 default:
9046 normal:
9047 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9048 #if 0
9049 /* This is not needed any more and causes poor code since it causes
9050 comparisons and tests from non-SI objects to have different code
9051 sequences. */
9052 /* Copy to register to avoid generating bad insns by cse
9053 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9054 if (!cse_not_expected && GET_CODE (temp) == MEM)
9055 temp = copy_to_reg (temp);
9056 #endif
9057 do_pending_stack_adjust ();
9058 if (GET_CODE (temp) == CONST_INT)
9059 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9060 else if (GET_CODE (temp) == LABEL_REF)
9061 comparison = const_true_rtx;
9062 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9063 && !can_compare_p (GET_MODE (temp)))
9064 /* Note swapping the labels gives us not-equal. */
9065 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9066 else if (GET_MODE (temp) != VOIDmode)
9067 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9068 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9069 GET_MODE (temp), NULL_RTX, 0);
9070 else
9071 abort ();
9074 /* Do any postincrements in the expression that was tested. */
9075 emit_queue ();
9077 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9078 straight into a conditional jump instruction as the jump condition.
9079 Otherwise, all the work has been done already. */
9081 if (comparison == const_true_rtx)
9083 if (if_true_label)
9084 emit_jump (if_true_label);
9086 else if (comparison == const0_rtx)
9088 if (if_false_label)
9089 emit_jump (if_false_label);
9091 else if (comparison)
9092 do_jump_for_compare (comparison, if_false_label, if_true_label);
9094 if (drop_through_label)
9096 /* If do_jump produces code that might be jumped around,
9097 do any stack adjusts from that code, before the place
9098 where control merges in. */
9099 do_pending_stack_adjust ();
9100 emit_label (drop_through_label);
9104 /* Given a comparison expression EXP for values too wide to be compared
9105 with one insn, test the comparison and jump to the appropriate label.
9106 The code of EXP is ignored; we always test GT if SWAP is 0,
9107 and LT if SWAP is 1. */
9109 static void
9110 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9111 tree exp;
9112 int swap;
9113 rtx if_false_label, if_true_label;
9115 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9116 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9117 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9118 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9119 rtx drop_through_label = 0;
9120 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9121 int i;
9123 if (! if_true_label || ! if_false_label)
9124 drop_through_label = gen_label_rtx ();
9125 if (! if_true_label)
9126 if_true_label = drop_through_label;
9127 if (! if_false_label)
9128 if_false_label = drop_through_label;
9130 /* Compare a word at a time, high order first. */
9131 for (i = 0; i < nwords; i++)
9133 rtx comp;
9134 rtx op0_word, op1_word;
9136 if (WORDS_BIG_ENDIAN)
9138 op0_word = operand_subword_force (op0, i, mode);
9139 op1_word = operand_subword_force (op1, i, mode);
9141 else
9143 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9144 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9147 /* All but high-order word must be compared as unsigned. */
9148 comp = compare_from_rtx (op0_word, op1_word,
9149 (unsignedp || i > 0) ? GTU : GT,
9150 unsignedp, word_mode, NULL_RTX, 0);
9151 if (comp == const_true_rtx)
9152 emit_jump (if_true_label);
9153 else if (comp != const0_rtx)
9154 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9156 /* Consider lower words only if these are equal. */
9157 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9158 NULL_RTX, 0);
9159 if (comp == const_true_rtx)
9160 emit_jump (if_false_label);
9161 else if (comp != const0_rtx)
9162 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9165 if (if_false_label)
9166 emit_jump (if_false_label);
9167 if (drop_through_label)
9168 emit_label (drop_through_label);
9171 /* Compare OP0 with OP1, word at a time, in mode MODE.
9172 UNSIGNEDP says to do unsigned comparison.
9173 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9175 void
9176 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9177 enum machine_mode mode;
9178 int unsignedp;
9179 rtx op0, op1;
9180 rtx if_false_label, if_true_label;
9182 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9183 rtx drop_through_label = 0;
9184 int i;
9186 if (! if_true_label || ! if_false_label)
9187 drop_through_label = gen_label_rtx ();
9188 if (! if_true_label)
9189 if_true_label = drop_through_label;
9190 if (! if_false_label)
9191 if_false_label = drop_through_label;
9193 /* Compare a word at a time, high order first. */
9194 for (i = 0; i < nwords; i++)
9196 rtx comp;
9197 rtx op0_word, op1_word;
9199 if (WORDS_BIG_ENDIAN)
9201 op0_word = operand_subword_force (op0, i, mode);
9202 op1_word = operand_subword_force (op1, i, mode);
9204 else
9206 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9207 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9210 /* All but high-order word must be compared as unsigned. */
9211 comp = compare_from_rtx (op0_word, op1_word,
9212 (unsignedp || i > 0) ? GTU : GT,
9213 unsignedp, word_mode, NULL_RTX, 0);
9214 if (comp == const_true_rtx)
9215 emit_jump (if_true_label);
9216 else if (comp != const0_rtx)
9217 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9219 /* Consider lower words only if these are equal. */
9220 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9221 NULL_RTX, 0);
9222 if (comp == const_true_rtx)
9223 emit_jump (if_false_label);
9224 else if (comp != const0_rtx)
9225 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9228 if (if_false_label)
9229 emit_jump (if_false_label);
9230 if (drop_through_label)
9231 emit_label (drop_through_label);
9234 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9235 with one insn, test the comparison and jump to the appropriate label. */
9237 static void
9238 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9239 tree exp;
9240 rtx if_false_label, if_true_label;
9242 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9243 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9244 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9245 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9246 int i;
9247 rtx drop_through_label = 0;
9249 if (! if_false_label)
9250 drop_through_label = if_false_label = gen_label_rtx ();
9252 for (i = 0; i < nwords; i++)
9254 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9255 operand_subword_force (op1, i, mode),
9256 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9257 word_mode, NULL_RTX, 0);
9258 if (comp == const_true_rtx)
9259 emit_jump (if_false_label);
9260 else if (comp != const0_rtx)
9261 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9264 if (if_true_label)
9265 emit_jump (if_true_label);
9266 if (drop_through_label)
9267 emit_label (drop_through_label);
9270 /* Jump according to whether OP0 is 0.
9271 We assume that OP0 has an integer mode that is too wide
9272 for the available compare insns. */
9274 static void
9275 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9276 rtx op0;
9277 rtx if_false_label, if_true_label;
9279 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9280 int i;
9281 rtx drop_through_label = 0;
9283 if (! if_false_label)
9284 drop_through_label = if_false_label = gen_label_rtx ();
9286 for (i = 0; i < nwords; i++)
9288 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9289 GET_MODE (op0)),
9290 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9291 if (comp == const_true_rtx)
9292 emit_jump (if_false_label);
9293 else if (comp != const0_rtx)
9294 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9297 if (if_true_label)
9298 emit_jump (if_true_label);
9299 if (drop_through_label)
9300 emit_label (drop_through_label);
9303 /* Given a comparison expression in rtl form, output conditional branches to
9304 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9306 static void
9307 do_jump_for_compare (comparison, if_false_label, if_true_label)
9308 rtx comparison, if_false_label, if_true_label;
9310 if (if_true_label)
9312 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9313 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9314 else
9315 abort ();
9317 if (if_false_label)
9318 emit_jump (if_false_label);
9320 else if (if_false_label)
9322 rtx insn;
9323 rtx prev = get_last_insn ();
9324 rtx branch = 0;
9326 /* Output the branch with the opposite condition. Then try to invert
9327 what is generated. If more than one insn is a branch, or if the
9328 branch is not the last insn written, abort. If we can't invert
9329 the branch, emit make a true label, redirect this jump to that,
9330 emit a jump to the false label and define the true label. */
9332 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9333 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
9334 else
9335 abort ();
9337 /* Here we get the first insn that was just emitted. It used to be the
9338 case that, on some machines, emitting the branch would discard
9339 the previous compare insn and emit a replacement. This isn't
9340 done anymore, but abort if we see that PREV is deleted. */
9342 if (prev == 0)
9343 insn = get_insns ();
9344 else if (INSN_DELETED_P (prev))
9345 abort ();
9346 else
9347 insn = NEXT_INSN (prev);
9349 for (; insn; insn = NEXT_INSN (insn))
9350 if (GET_CODE (insn) == JUMP_INSN)
9352 if (branch)
9353 abort ();
9354 branch = insn;
9357 if (branch != get_last_insn ())
9358 abort ();
9360 JUMP_LABEL (branch) = if_false_label;
9361 if (! invert_jump (branch, if_false_label))
9363 if_true_label = gen_label_rtx ();
9364 redirect_jump (branch, if_true_label);
9365 emit_jump (if_false_label);
9366 emit_label (if_true_label);
9371 /* Generate code for a comparison expression EXP
9372 (including code to compute the values to be compared)
9373 and set (CC0) according to the result.
9374 SIGNED_CODE should be the rtx operation for this comparison for
9375 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9377 We force a stack adjustment unless there are currently
9378 things pushed on the stack that aren't yet used. */
9380 static rtx
9381 compare (exp, signed_code, unsigned_code)
9382 register tree exp;
9383 enum rtx_code signed_code, unsigned_code;
9385 register rtx op0
9386 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9387 register rtx op1
9388 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9389 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9390 register enum machine_mode mode = TYPE_MODE (type);
9391 int unsignedp = TREE_UNSIGNED (type);
9392 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9394 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9395 ((mode == BLKmode)
9396 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9397 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9400 /* Like compare but expects the values to compare as two rtx's.
9401 The decision as to signed or unsigned comparison must be made by the caller.
9403 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9404 compared.
9406 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9407 size of MODE should be used. */
9410 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9411 register rtx op0, op1;
9412 enum rtx_code code;
9413 int unsignedp;
9414 enum machine_mode mode;
9415 rtx size;
9416 int align;
9418 rtx tem;
9420 /* If one operand is constant, make it the second one. Only do this
9421 if the other operand is not constant as well. */
9423 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9424 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9426 tem = op0;
9427 op0 = op1;
9428 op1 = tem;
9429 code = swap_condition (code);
9432 if (flag_force_mem)
9434 op0 = force_not_mem (op0);
9435 op1 = force_not_mem (op1);
9438 do_pending_stack_adjust ();
9440 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9441 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9442 return tem;
9444 #if 0
9445 /* There's no need to do this now that combine.c can eliminate lots of
9446 sign extensions. This can be less efficient in certain cases on other
9447 machines. */
9449 /* If this is a signed equality comparison, we can do it as an
9450 unsigned comparison since zero-extension is cheaper than sign
9451 extension and comparisons with zero are done as unsigned. This is
9452 the case even on machines that can do fast sign extension, since
9453 zero-extension is easier to combine with other operations than
9454 sign-extension is. If we are comparing against a constant, we must
9455 convert it to what it would look like unsigned. */
9456 if ((code == EQ || code == NE) && ! unsignedp
9457 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9459 if (GET_CODE (op1) == CONST_INT
9460 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9461 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9462 unsignedp = 1;
9464 #endif
9466 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9468 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9471 /* Generate code to calculate EXP using a store-flag instruction
9472 and return an rtx for the result. EXP is either a comparison
9473 or a TRUTH_NOT_EXPR whose operand is a comparison.
9475 If TARGET is nonzero, store the result there if convenient.
9477 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9478 cheap.
9480 Return zero if there is no suitable set-flag instruction
9481 available on this machine.
9483 Once expand_expr has been called on the arguments of the comparison,
9484 we are committed to doing the store flag, since it is not safe to
9485 re-evaluate the expression. We emit the store-flag insn by calling
9486 emit_store_flag, but only expand the arguments if we have a reason
9487 to believe that emit_store_flag will be successful. If we think that
9488 it will, but it isn't, we have to simulate the store-flag with a
9489 set/jump/set sequence. */
9491 static rtx
9492 do_store_flag (exp, target, mode, only_cheap)
9493 tree exp;
9494 rtx target;
9495 enum machine_mode mode;
9496 int only_cheap;
9498 enum rtx_code code;
9499 tree arg0, arg1, type;
9500 tree tem;
9501 enum machine_mode operand_mode;
9502 int invert = 0;
9503 int unsignedp;
9504 rtx op0, op1;
9505 enum insn_code icode;
9506 rtx subtarget = target;
9507 rtx result, label, pattern, jump_pat;
9509 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9510 result at the end. We can't simply invert the test since it would
9511 have already been inverted if it were valid. This case occurs for
9512 some floating-point comparisons. */
9514 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9515 invert = 1, exp = TREE_OPERAND (exp, 0);
9517 arg0 = TREE_OPERAND (exp, 0);
9518 arg1 = TREE_OPERAND (exp, 1);
9519 type = TREE_TYPE (arg0);
9520 operand_mode = TYPE_MODE (type);
9521 unsignedp = TREE_UNSIGNED (type);
9523 /* We won't bother with BLKmode store-flag operations because it would mean
9524 passing a lot of information to emit_store_flag. */
9525 if (operand_mode == BLKmode)
9526 return 0;
9528 STRIP_NOPS (arg0);
9529 STRIP_NOPS (arg1);
9531 /* Get the rtx comparison code to use. We know that EXP is a comparison
9532 operation of some type. Some comparisons against 1 and -1 can be
9533 converted to comparisons with zero. Do so here so that the tests
9534 below will be aware that we have a comparison with zero. These
9535 tests will not catch constants in the first operand, but constants
9536 are rarely passed as the first operand. */
9538 switch (TREE_CODE (exp))
9540 case EQ_EXPR:
9541 code = EQ;
9542 break;
9543 case NE_EXPR:
9544 code = NE;
9545 break;
9546 case LT_EXPR:
9547 if (integer_onep (arg1))
9548 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9549 else
9550 code = unsignedp ? LTU : LT;
9551 break;
9552 case LE_EXPR:
9553 if (! unsignedp && integer_all_onesp (arg1))
9554 arg1 = integer_zero_node, code = LT;
9555 else
9556 code = unsignedp ? LEU : LE;
9557 break;
9558 case GT_EXPR:
9559 if (! unsignedp && integer_all_onesp (arg1))
9560 arg1 = integer_zero_node, code = GE;
9561 else
9562 code = unsignedp ? GTU : GT;
9563 break;
9564 case GE_EXPR:
9565 if (integer_onep (arg1))
9566 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9567 else
9568 code = unsignedp ? GEU : GE;
9569 break;
9570 default:
9571 abort ();
9574 /* Put a constant second. */
9575 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9577 tem = arg0; arg0 = arg1; arg1 = tem;
9578 code = swap_condition (code);
9581 /* If this is an equality or inequality test of a single bit, we can
9582 do this by shifting the bit being tested to the low-order bit and
9583 masking the result with the constant 1. If the condition was EQ,
9584 we xor it with 1. This does not require an scc insn and is faster
9585 than an scc insn even if we have it. */
9587 if ((code == NE || code == EQ)
9588 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9589 && integer_pow2p (TREE_OPERAND (arg0, 1))
9590 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9592 tree inner = TREE_OPERAND (arg0, 0);
9593 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9594 NULL_RTX, VOIDmode, 0)));
9595 int ops_unsignedp;
9597 /* If INNER is a right shift of a constant and it plus BITNUM does
9598 not overflow, adjust BITNUM and INNER. */
9600 if (TREE_CODE (inner) == RSHIFT_EXPR
9601 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9602 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9603 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9604 < TYPE_PRECISION (type)))
9606 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9607 inner = TREE_OPERAND (inner, 0);
9610 /* If we are going to be able to omit the AND below, we must do our
9611 operations as unsigned. If we must use the AND, we have a choice.
9612 Normally unsigned is faster, but for some machines signed is. */
9613 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9614 #ifdef LOAD_EXTEND_OP
9615 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9616 #else
9618 #endif
9621 if (subtarget == 0 || GET_CODE (subtarget) != REG
9622 || GET_MODE (subtarget) != operand_mode
9623 || ! safe_from_p (subtarget, inner))
9624 subtarget = 0;
9626 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9628 if (bitnum != 0)
9629 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9630 size_int (bitnum), subtarget, ops_unsignedp);
9632 if (GET_MODE (op0) != mode)
9633 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9635 if ((code == EQ && ! invert) || (code == NE && invert))
9636 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9637 ops_unsignedp, OPTAB_LIB_WIDEN);
9639 /* Put the AND last so it can combine with more things. */
9640 if (bitnum != TYPE_PRECISION (type) - 1)
9641 op0 = expand_and (op0, const1_rtx, subtarget);
9643 return op0;
9646 /* Now see if we are likely to be able to do this. Return if not. */
9647 if (! can_compare_p (operand_mode))
9648 return 0;
9649 icode = setcc_gen_code[(int) code];
9650 if (icode == CODE_FOR_nothing
9651 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9653 /* We can only do this if it is one of the special cases that
9654 can be handled without an scc insn. */
9655 if ((code == LT && integer_zerop (arg1))
9656 || (! only_cheap && code == GE && integer_zerop (arg1)))
9658 else if (BRANCH_COST >= 0
9659 && ! only_cheap && (code == NE || code == EQ)
9660 && TREE_CODE (type) != REAL_TYPE
9661 && ((abs_optab->handlers[(int) operand_mode].insn_code
9662 != CODE_FOR_nothing)
9663 || (ffs_optab->handlers[(int) operand_mode].insn_code
9664 != CODE_FOR_nothing)))
9666 else
9667 return 0;
9670 preexpand_calls (exp);
9671 if (subtarget == 0 || GET_CODE (subtarget) != REG
9672 || GET_MODE (subtarget) != operand_mode
9673 || ! safe_from_p (subtarget, arg1))
9674 subtarget = 0;
9676 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9677 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9679 if (target == 0)
9680 target = gen_reg_rtx (mode);
9682 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9683 because, if the emit_store_flag does anything it will succeed and
9684 OP0 and OP1 will not be used subsequently. */
9686 result = emit_store_flag (target, code,
9687 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9688 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9689 operand_mode, unsignedp, 1);
9691 if (result)
9693 if (invert)
9694 result = expand_binop (mode, xor_optab, result, const1_rtx,
9695 result, 0, OPTAB_LIB_WIDEN);
9696 return result;
9699 /* If this failed, we have to do this with set/compare/jump/set code. */
9700 if (target == 0 || GET_CODE (target) != REG
9701 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9702 target = gen_reg_rtx (GET_MODE (target));
9704 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9705 result = compare_from_rtx (op0, op1, code, unsignedp,
9706 operand_mode, NULL_RTX, 0);
9707 if (GET_CODE (result) == CONST_INT)
9708 return (((result == const0_rtx && ! invert)
9709 || (result != const0_rtx && invert))
9710 ? const0_rtx : const1_rtx);
9712 label = gen_label_rtx ();
9713 if (bcc_gen_fctn[(int) code] == 0)
9714 abort ();
9716 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9717 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9718 emit_label (label);
9720 return target;
9723 /* Generate a tablejump instruction (used for switch statements). */
9725 #ifdef HAVE_tablejump
9727 /* INDEX is the value being switched on, with the lowest value
9728 in the table already subtracted.
9729 MODE is its expected mode (needed if INDEX is constant).
9730 RANGE is the length of the jump table.
9731 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9733 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9734 index value is out of range. */
9736 void
9737 do_tablejump (index, mode, range, table_label, default_label)
9738 rtx index, range, table_label, default_label;
9739 enum machine_mode mode;
9741 register rtx temp, vector;
9743 /* Do an unsigned comparison (in the proper mode) between the index
9744 expression and the value which represents the length of the range.
9745 Since we just finished subtracting the lower bound of the range
9746 from the index expression, this comparison allows us to simultaneously
9747 check that the original index expression value is both greater than
9748 or equal to the minimum value of the range and less than or equal to
9749 the maximum value of the range. */
9751 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9752 emit_jump_insn (gen_bgtu (default_label));
9754 /* If index is in range, it must fit in Pmode.
9755 Convert to Pmode so we can index with it. */
9756 if (mode != Pmode)
9757 index = convert_to_mode (Pmode, index, 1);
9759 /* Don't let a MEM slip thru, because then INDEX that comes
9760 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9761 and break_out_memory_refs will go to work on it and mess it up. */
9762 #ifdef PIC_CASE_VECTOR_ADDRESS
9763 if (flag_pic && GET_CODE (index) != REG)
9764 index = copy_to_mode_reg (Pmode, index);
9765 #endif
9767 /* If flag_force_addr were to affect this address
9768 it could interfere with the tricky assumptions made
9769 about addresses that contain label-refs,
9770 which may be valid only very near the tablejump itself. */
9771 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9772 GET_MODE_SIZE, because this indicates how large insns are. The other
9773 uses should all be Pmode, because they are addresses. This code
9774 could fail if addresses and insns are not the same size. */
9775 index = gen_rtx (PLUS, Pmode,
9776 gen_rtx (MULT, Pmode, index,
9777 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9778 gen_rtx (LABEL_REF, Pmode, table_label));
9779 #ifdef PIC_CASE_VECTOR_ADDRESS
9780 if (flag_pic)
9781 index = PIC_CASE_VECTOR_ADDRESS (index);
9782 else
9783 #endif
9784 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9785 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9786 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9787 RTX_UNCHANGING_P (vector) = 1;
9788 convert_move (temp, vector, 0);
9790 emit_jump_insn (gen_tablejump (temp, table_label));
9792 #ifndef CASE_VECTOR_PC_RELATIVE
9793 /* If we are generating PIC code or if the table is PC-relative, the
9794 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9795 if (! flag_pic)
9796 emit_barrier ();
9797 #endif
9800 #endif /* HAVE_tablejump */
9803 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9804 to that value is on the top of the stack. The resulting type is TYPE, and
9805 the source declaration is DECL. */
9807 void
9808 bc_load_memory (type, decl)
9809 tree type, decl;
9811 enum bytecode_opcode opcode;
9814 /* Bit fields are special. We only know about signed and
9815 unsigned ints, and enums. The latter are treated as
9816 signed integers. */
9818 if (DECL_BIT_FIELD (decl))
9819 if (TREE_CODE (type) == ENUMERAL_TYPE
9820 || TREE_CODE (type) == INTEGER_TYPE)
9821 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9822 else
9823 abort ();
9824 else
9825 /* See corresponding comment in bc_store_memory(). */
9826 if (TYPE_MODE (type) == BLKmode
9827 || TYPE_MODE (type) == VOIDmode)
9828 return;
9829 else
9830 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9832 if (opcode == neverneverland)
9833 abort ();
9835 bc_emit_bytecode (opcode);
9837 #ifdef DEBUG_PRINT_CODE
9838 fputc ('\n', stderr);
9839 #endif
9843 /* Store the contents of the second stack slot to the address in the
9844 top stack slot. DECL is the declaration of the destination and is used
9845 to determine whether we're dealing with a bitfield. */
9847 void
9848 bc_store_memory (type, decl)
9849 tree type, decl;
9851 enum bytecode_opcode opcode;
9854 if (DECL_BIT_FIELD (decl))
9856 if (TREE_CODE (type) == ENUMERAL_TYPE
9857 || TREE_CODE (type) == INTEGER_TYPE)
9858 opcode = sstoreBI;
9859 else
9860 abort ();
9862 else
9863 if (TYPE_MODE (type) == BLKmode)
9865 /* Copy structure. This expands to a block copy instruction, storeBLK.
9866 In addition to the arguments expected by the other store instructions,
9867 it also expects a type size (SImode) on top of the stack, which is the
9868 structure size in size units (usually bytes). The two first arguments
9869 are already on the stack; so we just put the size on level 1. For some
9870 other languages, the size may be variable, this is why we don't encode
9871 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9873 bc_expand_expr (TYPE_SIZE (type));
9874 opcode = storeBLK;
9876 else
9877 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9879 if (opcode == neverneverland)
9880 abort ();
9882 bc_emit_bytecode (opcode);
9884 #ifdef DEBUG_PRINT_CODE
9885 fputc ('\n', stderr);
9886 #endif
9890 /* Allocate local stack space sufficient to hold a value of the given
9891 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9892 integral power of 2. A special case is locals of type VOID, which
9893 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9894 remapped into the corresponding attribute of SI. */
9897 bc_allocate_local (size, alignment)
9898 int size, alignment;
9900 rtx retval;
9901 int byte_alignment;
9903 if (size < 0)
9904 abort ();
9906 /* Normalize size and alignment */
9907 if (!size)
9908 size = UNITS_PER_WORD;
9910 if (alignment < BITS_PER_UNIT)
9911 byte_alignment = 1 << (INT_ALIGN - 1);
9912 else
9913 /* Align */
9914 byte_alignment = alignment / BITS_PER_UNIT;
9916 if (local_vars_size & (byte_alignment - 1))
9917 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9919 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9920 local_vars_size += size;
9922 return retval;
9926 /* Allocate variable-sized local array. Variable-sized arrays are
9927 actually pointers to the address in memory where they are stored. */
9930 bc_allocate_variable_array (size)
9931 tree size;
9933 rtx retval;
9934 const int ptralign = (1 << (PTR_ALIGN - 1));
9936 /* Align pointer */
9937 if (local_vars_size & ptralign)
9938 local_vars_size += ptralign - (local_vars_size & ptralign);
9940 /* Note down local space needed: pointer to block; also return
9941 dummy rtx */
9943 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9944 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9945 return retval;
9949 /* Push the machine address for the given external variable offset. */
9950 void
9951 bc_load_externaddr (externaddr)
9952 rtx externaddr;
9954 bc_emit_bytecode (constP);
9955 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9956 BYTECODE_BC_LABEL (externaddr)->offset);
9958 #ifdef DEBUG_PRINT_CODE
9959 fputc ('\n', stderr);
9960 #endif
9964 static char *
9965 bc_strdup (s)
9966 char *s;
9968 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9969 strcpy (new, s);
9970 return new;
9974 /* Like above, but expects an IDENTIFIER. */
9975 void
9976 bc_load_externaddr_id (id, offset)
9977 tree id;
9978 int offset;
9980 if (!IDENTIFIER_POINTER (id))
9981 abort ();
9983 bc_emit_bytecode (constP);
9984 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9986 #ifdef DEBUG_PRINT_CODE
9987 fputc ('\n', stderr);
9988 #endif
9992 /* Push the machine address for the given local variable offset. */
9993 void
9994 bc_load_localaddr (localaddr)
9995 rtx localaddr;
9997 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10001 /* Push the machine address for the given parameter offset.
10002 NOTE: offset is in bits. */
10003 void
10004 bc_load_parmaddr (parmaddr)
10005 rtx parmaddr;
10007 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10008 / BITS_PER_UNIT));
10012 /* Convert a[i] into *(a + i). */
10013 tree
10014 bc_canonicalize_array_ref (exp)
10015 tree exp;
10017 tree type = TREE_TYPE (exp);
10018 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10019 TREE_OPERAND (exp, 0));
10020 tree index = TREE_OPERAND (exp, 1);
10023 /* Convert the integer argument to a type the same size as a pointer
10024 so the multiply won't overflow spuriously. */
10026 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10027 index = convert (type_for_size (POINTER_SIZE, 0), index);
10029 /* The array address isn't volatile even if the array is.
10030 (Of course this isn't terribly relevant since the bytecode
10031 translator treats nearly everything as volatile anyway.) */
10032 TREE_THIS_VOLATILE (array_adr) = 0;
10034 return build1 (INDIRECT_REF, type,
10035 fold (build (PLUS_EXPR,
10036 TYPE_POINTER_TO (type),
10037 array_adr,
10038 fold (build (MULT_EXPR,
10039 TYPE_POINTER_TO (type),
10040 index,
10041 size_in_bytes (type))))));
10045 /* Load the address of the component referenced by the given
10046 COMPONENT_REF expression.
10048 Returns innermost lvalue. */
10050 tree
10051 bc_expand_component_address (exp)
10052 tree exp;
10054 tree tem, chain;
10055 enum machine_mode mode;
10056 int bitpos = 0;
10057 HOST_WIDE_INT SIval;
10060 tem = TREE_OPERAND (exp, 1);
10061 mode = DECL_MODE (tem);
10064 /* Compute cumulative bit offset for nested component refs
10065 and array refs, and find the ultimate containing object. */
10067 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10069 if (TREE_CODE (tem) == COMPONENT_REF)
10070 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10071 else
10072 if (TREE_CODE (tem) == ARRAY_REF
10073 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10074 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10076 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10077 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10078 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10079 else
10080 break;
10083 bc_expand_expr (tem);
10086 /* For bitfields also push their offset and size */
10087 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10088 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10089 else
10090 if (SIval = bitpos / BITS_PER_UNIT)
10091 bc_emit_instruction (addconstPSI, SIval);
10093 return (TREE_OPERAND (exp, 1));
10097 /* Emit code to push two SI constants */
10098 void
10099 bc_push_offset_and_size (offset, size)
10100 HOST_WIDE_INT offset, size;
10102 bc_emit_instruction (constSI, offset);
10103 bc_emit_instruction (constSI, size);
10107 /* Emit byte code to push the address of the given lvalue expression to
10108 the stack. If it's a bit field, we also push offset and size info.
10110 Returns innermost component, which allows us to determine not only
10111 its type, but also whether it's a bitfield. */
10113 tree
10114 bc_expand_address (exp)
10115 tree exp;
10117 /* Safeguard */
10118 if (!exp || TREE_CODE (exp) == ERROR_MARK)
10119 return (exp);
10122 switch (TREE_CODE (exp))
10124 case ARRAY_REF:
10126 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
10128 case COMPONENT_REF:
10130 return (bc_expand_component_address (exp));
10132 case INDIRECT_REF:
10134 bc_expand_expr (TREE_OPERAND (exp, 0));
10136 /* For variable-sized types: retrieve pointer. Sometimes the
10137 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
10138 also make sure we have an operand, just in case... */
10140 if (TREE_OPERAND (exp, 0)
10141 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
10142 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
10143 bc_emit_instruction (loadP);
10145 /* If packed, also return offset and size */
10146 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
10148 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
10149 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
10151 return (TREE_OPERAND (exp, 0));
10153 case FUNCTION_DECL:
10155 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10156 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
10157 break;
10159 case PARM_DECL:
10161 bc_load_parmaddr (DECL_RTL (exp));
10163 /* For variable-sized types: retrieve pointer */
10164 if (TYPE_SIZE (TREE_TYPE (exp))
10165 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10166 bc_emit_instruction (loadP);
10168 /* If packed, also return offset and size */
10169 if (DECL_BIT_FIELD (exp))
10170 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10171 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10173 break;
10175 case RESULT_DECL:
10177 bc_emit_instruction (returnP);
10178 break;
10180 case VAR_DECL:
10182 #if 0
10183 if (BYTECODE_LABEL (DECL_RTL (exp)))
10184 bc_load_externaddr (DECL_RTL (exp));
10185 #endif
10187 if (DECL_EXTERNAL (exp))
10188 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10189 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
10190 else
10191 bc_load_localaddr (DECL_RTL (exp));
10193 /* For variable-sized types: retrieve pointer */
10194 if (TYPE_SIZE (TREE_TYPE (exp))
10195 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10196 bc_emit_instruction (loadP);
10198 /* If packed, also return offset and size */
10199 if (DECL_BIT_FIELD (exp))
10200 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10201 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10203 break;
10205 case STRING_CST:
10207 rtx r;
10209 bc_emit_bytecode (constP);
10210 r = output_constant_def (exp);
10211 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
10213 #ifdef DEBUG_PRINT_CODE
10214 fputc ('\n', stderr);
10215 #endif
10217 break;
10219 default:
10221 abort();
10222 break;
10225 /* Most lvalues don't have components. */
10226 return (exp);
10230 /* Emit a type code to be used by the runtime support in handling
10231 parameter passing. The type code consists of the machine mode
10232 plus the minimal alignment shifted left 8 bits. */
10234 tree
10235 bc_runtime_type_code (type)
10236 tree type;
10238 int val;
10240 switch (TREE_CODE (type))
10242 case VOID_TYPE:
10243 case INTEGER_TYPE:
10244 case REAL_TYPE:
10245 case COMPLEX_TYPE:
10246 case ENUMERAL_TYPE:
10247 case POINTER_TYPE:
10248 case RECORD_TYPE:
10250 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
10251 break;
10253 case ERROR_MARK:
10255 val = 0;
10256 break;
10258 default:
10260 abort ();
10262 return build_int_2 (val, 0);
10266 /* Generate constructor label */
10267 char *
10268 bc_gen_constr_label ()
10270 static int label_counter;
10271 static char label[20];
10273 sprintf (label, "*LR%d", label_counter++);
10275 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10279 /* Evaluate constructor CONSTR and return pointer to it on level one. We
10280 expand the constructor data as static data, and push a pointer to it.
10281 The pointer is put in the pointer table and is retrieved by a constP
10282 bytecode instruction. We then loop and store each constructor member in
10283 the corresponding component. Finally, we return the original pointer on
10284 the stack. */
10286 void
10287 bc_expand_constructor (constr)
10288 tree constr;
10290 char *l;
10291 HOST_WIDE_INT ptroffs;
10292 rtx constr_rtx;
10295 /* Literal constructors are handled as constants, whereas
10296 non-literals are evaluated and stored element by element
10297 into the data segment. */
10299 /* Allocate space in proper segment and push pointer to space on stack.
10302 l = bc_gen_constr_label ();
10304 if (TREE_CONSTANT (constr))
10306 text_section ();
10308 bc_emit_const_labeldef (l);
10309 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
10311 else
10313 data_section ();
10315 bc_emit_data_labeldef (l);
10316 bc_output_data_constructor (constr);
10320 /* Add reference to pointer table and recall pointer to stack;
10321 this code is common for both types of constructors: literals
10322 and non-literals. */
10324 ptroffs = bc_define_pointer (l);
10325 bc_emit_instruction (constP, ptroffs);
10327 /* This is all that has to be done if it's a literal. */
10328 if (TREE_CONSTANT (constr))
10329 return;
10332 /* At this point, we have the pointer to the structure on top of the stack.
10333 Generate sequences of store_memory calls for the constructor. */
10335 /* constructor type is structure */
10336 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
10338 register tree elt;
10340 /* If the constructor has fewer fields than the structure,
10341 clear the whole structure first. */
10343 if (list_length (CONSTRUCTOR_ELTS (constr))
10344 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10346 bc_emit_instruction (duplicate);
10347 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10348 bc_emit_instruction (clearBLK);
10351 /* Store each element of the constructor into the corresponding
10352 field of TARGET. */
10354 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10356 register tree field = TREE_PURPOSE (elt);
10357 register enum machine_mode mode;
10358 int bitsize;
10359 int bitpos;
10360 int unsignedp;
10362 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10363 mode = DECL_MODE (field);
10364 unsignedp = TREE_UNSIGNED (field);
10366 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10368 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10369 /* The alignment of TARGET is
10370 at least what its type requires. */
10371 VOIDmode, 0,
10372 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10373 int_size_in_bytes (TREE_TYPE (constr)));
10376 else
10378 /* Constructor type is array */
10379 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10381 register tree elt;
10382 register int i;
10383 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10384 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10385 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10386 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10388 /* If the constructor has fewer fields than the structure,
10389 clear the whole structure first. */
10391 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10393 bc_emit_instruction (duplicate);
10394 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10395 bc_emit_instruction (clearBLK);
10399 /* Store each element of the constructor into the corresponding
10400 element of TARGET, determined by counting the elements. */
10402 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10403 elt;
10404 elt = TREE_CHAIN (elt), i++)
10406 register enum machine_mode mode;
10407 int bitsize;
10408 int bitpos;
10409 int unsignedp;
10411 mode = TYPE_MODE (elttype);
10412 bitsize = GET_MODE_BITSIZE (mode);
10413 unsignedp = TREE_UNSIGNED (elttype);
10415 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10416 /* * TYPE_SIZE_UNIT (elttype) */ );
10418 bc_store_field (elt, bitsize, bitpos, mode,
10419 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10420 /* The alignment of TARGET is
10421 at least what its type requires. */
10422 VOIDmode, 0,
10423 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10424 int_size_in_bytes (TREE_TYPE (constr)));
10431 /* Store the value of EXP (an expression tree) into member FIELD of
10432 structure at address on stack, which has type TYPE, mode MODE and
10433 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10434 structure.
10436 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10437 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10439 void
10440 bc_store_field (field, bitsize, bitpos, mode, exp, type,
10441 value_mode, unsignedp, align, total_size)
10442 int bitsize, bitpos;
10443 enum machine_mode mode;
10444 tree field, exp, type;
10445 enum machine_mode value_mode;
10446 int unsignedp;
10447 int align;
10448 int total_size;
10451 /* Expand expression and copy pointer */
10452 bc_expand_expr (exp);
10453 bc_emit_instruction (over);
10456 /* If the component is a bit field, we cannot use addressing to access
10457 it. Use bit-field techniques to store in it. */
10459 if (DECL_BIT_FIELD (field))
10461 bc_store_bit_field (bitpos, bitsize, unsignedp);
10462 return;
10464 else
10465 /* Not bit field */
10467 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10469 /* Advance pointer to the desired member */
10470 if (offset)
10471 bc_emit_instruction (addconstPSI, offset);
10473 /* Store */
10474 bc_store_memory (type, field);
10479 /* Store SI/SU in bitfield */
10480 void
10481 bc_store_bit_field (offset, size, unsignedp)
10482 int offset, size, unsignedp;
10484 /* Push bitfield offset and size */
10485 bc_push_offset_and_size (offset, size);
10487 /* Store */
10488 bc_emit_instruction (sstoreBI);
10492 /* Load SI/SU from bitfield */
10493 void
10494 bc_load_bit_field (offset, size, unsignedp)
10495 int offset, size, unsignedp;
10497 /* Push bitfield offset and size */
10498 bc_push_offset_and_size (offset, size);
10500 /* Load: sign-extend if signed, else zero-extend */
10501 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10505 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10506 (adjust stack pointer upwards), negative means add that number of
10507 levels (adjust the stack pointer downwards). Only positive values
10508 normally make sense. */
10510 void
10511 bc_adjust_stack (nlevels)
10512 int nlevels;
10514 switch (nlevels)
10516 case 0:
10517 break;
10519 case 2:
10520 bc_emit_instruction (drop);
10522 case 1:
10523 bc_emit_instruction (drop);
10524 break;
10526 default:
10528 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10529 stack_depth -= nlevels;
10532 #if defined (VALIDATE_STACK_FOR_BC)
10533 VALIDATE_STACK_FOR_BC ();
10534 #endif