* arm/arm.h: (CPP_SPEC): Define __ARMEB__, __ARMEL__, and
[official-gcc.git] / gcc / expr.c
blob342fa3751356c643ce60e03e0771cef37817416e
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "function.h"
31 #include "insn-flags.h"
32 #include "insn-codes.h"
33 #include "expr.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "typeclass.h"
39 #include "bytecode.h"
40 #include "bc-opcode.h"
41 #include "bc-typecd.h"
42 #include "bc-optab.h"
43 #include "bc-emit.h"
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
54 #ifdef PUSH_ROUNDING
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
60 #endif
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
70 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop;
96 /* A list of all cleanups which belong to the arguments of
97 function calls being expanded by expand_call. */
98 tree cleanups_this_call;
100 /* When temporaries are created by TARGET_EXPRs, they are created at
101 this level of temp_slot_level, so that they can remain allocated
102 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
103 of TARGET_EXPRs. */
104 int target_temp_slot_level;
106 /* Nonzero means __builtin_saveregs has already been done in this function.
107 The value is the pseudoreg containing the value __builtin_saveregs
108 returned. */
109 static rtx saveregs_value;
111 /* Similarly for __builtin_apply_args. */
112 static rtx apply_args_value;
114 /* This structure is used by move_by_pieces to describe the move to
115 be performed. */
117 struct move_by_pieces
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 int to_struct;
124 rtx from;
125 rtx from_addr;
126 int autinc_from;
127 int explicit_inc_from;
128 int from_struct;
129 int len;
130 int offset;
131 int reverse;
134 /* This structure is used by clear_by_pieces to describe the clear to
135 be performed. */
137 struct clear_by_pieces
139 rtx to;
140 rtx to_addr;
141 int autinc_to;
142 int explicit_inc_to;
143 int to_struct;
144 int len;
145 int offset;
146 int reverse;
149 /* Used to generate bytecodes: keep track of size of local variables,
150 as well as depth of arithmetic stack. (Notice that variables are
151 stored on the machine's stack, not the arithmetic stack.) */
153 extern int local_vars_size;
154 extern int stack_depth;
155 extern int max_stack_depth;
156 extern struct obstack permanent_obstack;
157 extern rtx arg_pointer_save_area;
159 static rtx enqueue_insn PROTO((rtx, rtx));
160 static int queued_subexp_p PROTO((rtx));
161 static void init_queue PROTO((void));
162 static void move_by_pieces PROTO((rtx, rtx, int, int));
163 static int move_by_pieces_ninsns PROTO((unsigned int, int));
164 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
165 struct move_by_pieces *));
166 static void clear_by_pieces PROTO((rtx, int, int));
167 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
168 struct clear_by_pieces *));
169 static int is_zeros_p PROTO((tree));
170 static int mostly_zeros_p PROTO((tree));
171 static void store_constructor PROTO((tree, rtx, int));
172 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
173 enum machine_mode, int, int, int));
174 static int get_inner_unaligned_p PROTO((tree));
175 static tree save_noncopied_parts PROTO((tree, tree));
176 static tree init_noncopied_parts PROTO((tree, tree));
177 static int safe_from_p PROTO((rtx, tree));
178 static int fixed_type_p PROTO((tree));
179 static int get_pointer_alignment PROTO((tree, unsigned));
180 static tree string_constant PROTO((tree, tree *));
181 static tree c_strlen PROTO((tree));
182 static rtx expand_builtin PROTO((tree, rtx, rtx,
183 enum machine_mode, int));
184 static int apply_args_size PROTO((void));
185 static int apply_result_size PROTO((void));
186 static rtx result_vector PROTO((int, rtx));
187 static rtx expand_builtin_apply_args PROTO((void));
188 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
189 static void expand_builtin_return PROTO((rtx));
190 static rtx expand_increment PROTO((tree, int));
191 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
192 tree bc_runtime_type_code PROTO((tree));
193 rtx bc_allocate_local PROTO((int, int));
194 void bc_store_memory PROTO((tree, tree));
195 tree bc_expand_component_address PROTO((tree));
196 tree bc_expand_address PROTO((tree));
197 void bc_expand_constructor PROTO((tree));
198 void bc_adjust_stack PROTO((int));
199 tree bc_canonicalize_array_ref PROTO((tree));
200 void bc_load_memory PROTO((tree, tree));
201 void bc_load_externaddr PROTO((rtx));
202 void bc_load_externaddr_id PROTO((tree, int));
203 void bc_load_localaddr PROTO((rtx));
204 void bc_load_parmaddr PROTO((rtx));
205 static void preexpand_calls PROTO((tree));
206 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
207 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
208 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
209 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
210 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
211 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
212 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
213 static tree defer_cleanups_to PROTO((tree));
214 extern void (*interim_eh_hook) PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
224 /* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
227 #ifndef MOVE_RATIO
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
229 #define MOVE_RATIO 2
230 #else
231 /* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233 #define MOVE_RATIO 15
234 #endif
235 #endif
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
247 #endif
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
252 #endif
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
255 #endif
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
262 /* Initialize maps used to convert modes to const, load, and store
263 bytecodes. */
264 void
265 bc_init_mode_to_opcode_maps ()
267 int mode;
269 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
270 mode_to_const_map[mode] =
271 mode_to_load_map[mode] =
272 mode_to_store_map[mode] = neverneverland;
274 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
275 mode_to_const_map[(int) SYM] = CONST; \
276 mode_to_load_map[(int) SYM] = LOAD; \
277 mode_to_store_map[(int) SYM] = STORE;
279 #include "modemap.def"
280 #undef DEF_MODEMAP
283 /* This is run once per compilation to set up which modes can be used
284 directly in memory and to initialize the block move optab. */
286 void
287 init_expr_once ()
289 rtx insn, pat;
290 enum machine_mode mode;
291 /* Try indexing by frame ptr and try by stack ptr.
292 It is known that on the Convex the stack ptr isn't a valid index.
293 With luck, one or the other is valid on any machine. */
294 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
295 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
297 start_sequence ();
298 insn = emit_insn (gen_rtx (SET, 0, 0));
299 pat = PATTERN (insn);
301 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
302 mode = (enum machine_mode) ((int) mode + 1))
304 int regno;
305 rtx reg;
306 int num_clobbers;
308 direct_load[(int) mode] = direct_store[(int) mode] = 0;
309 PUT_MODE (mem, mode);
310 PUT_MODE (mem1, mode);
312 /* See if there is some register that can be used in this mode and
313 directly loaded or stored from memory. */
315 if (mode != VOIDmode && mode != BLKmode)
316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
317 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
318 regno++)
320 if (! HARD_REGNO_MODE_OK (regno, mode))
321 continue;
323 reg = gen_rtx (REG, mode, regno);
325 SET_SRC (pat) = mem;
326 SET_DEST (pat) = reg;
327 if (recog (pat, insn, &num_clobbers) >= 0)
328 direct_load[(int) mode] = 1;
330 SET_SRC (pat) = mem1;
331 SET_DEST (pat) = reg;
332 if (recog (pat, insn, &num_clobbers) >= 0)
333 direct_load[(int) mode] = 1;
335 SET_SRC (pat) = reg;
336 SET_DEST (pat) = mem;
337 if (recog (pat, insn, &num_clobbers) >= 0)
338 direct_store[(int) mode] = 1;
340 SET_SRC (pat) = reg;
341 SET_DEST (pat) = mem1;
342 if (recog (pat, insn, &num_clobbers) >= 0)
343 direct_store[(int) mode] = 1;
347 end_sequence ();
350 /* This is run at the start of compiling a function. */
352 void
353 init_expr ()
355 init_queue ();
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 cleanups_this_call = 0;
360 saveregs_value = 0;
361 apply_args_value = 0;
362 forced_labels = 0;
365 /* Save all variables describing the current status into the structure *P.
366 This is used before starting a nested function. */
368 void
369 save_expr_status (p)
370 struct function *p;
372 /* Instead of saving the postincrement queue, empty it. */
373 emit_queue ();
375 p->pending_stack_adjust = pending_stack_adjust;
376 p->inhibit_defer_pop = inhibit_defer_pop;
377 p->cleanups_this_call = cleanups_this_call;
378 p->saveregs_value = saveregs_value;
379 p->apply_args_value = apply_args_value;
380 p->forced_labels = forced_labels;
382 pending_stack_adjust = 0;
383 inhibit_defer_pop = 0;
384 cleanups_this_call = 0;
385 saveregs_value = 0;
386 apply_args_value = 0;
387 forced_labels = 0;
390 /* Restore all variables describing the current status from the structure *P.
391 This is used after a nested function. */
393 void
394 restore_expr_status (p)
395 struct function *p;
397 pending_stack_adjust = p->pending_stack_adjust;
398 inhibit_defer_pop = p->inhibit_defer_pop;
399 cleanups_this_call = p->cleanups_this_call;
400 saveregs_value = p->saveregs_value;
401 apply_args_value = p->apply_args_value;
402 forced_labels = p->forced_labels;
405 /* Manage the queue of increment instructions to be output
406 for POSTINCREMENT_EXPR expressions, etc. */
408 static rtx pending_chain;
410 /* Queue up to increment (or change) VAR later. BODY says how:
411 BODY should be the same thing you would pass to emit_insn
412 to increment right away. It will go to emit_insn later on.
414 The value is a QUEUED expression to be used in place of VAR
415 where you want to guarantee the pre-incrementation value of VAR. */
417 static rtx
418 enqueue_insn (var, body)
419 rtx var, body;
421 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
422 var, NULL_RTX, NULL_RTX, body, pending_chain);
423 return pending_chain;
426 /* Use protect_from_queue to convert a QUEUED expression
427 into something that you can put immediately into an instruction.
428 If the queued incrementation has not happened yet,
429 protect_from_queue returns the variable itself.
430 If the incrementation has happened, protect_from_queue returns a temp
431 that contains a copy of the old value of the variable.
433 Any time an rtx which might possibly be a QUEUED is to be put
434 into an instruction, it must be passed through protect_from_queue first.
435 QUEUED expressions are not meaningful in instructions.
437 Do not pass a value through protect_from_queue and then hold
438 on to it for a while before putting it in an instruction!
439 If the queue is flushed in between, incorrect code will result. */
442 protect_from_queue (x, modify)
443 register rtx x;
444 int modify;
446 register RTX_CODE code = GET_CODE (x);
448 #if 0 /* A QUEUED can hang around after the queue is forced out. */
449 /* Shortcut for most common case. */
450 if (pending_chain == 0)
451 return x;
452 #endif
454 if (code != QUEUED)
456 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
457 use of autoincrement. Make a copy of the contents of the memory
458 location rather than a copy of the address, but not if the value is
459 of mode BLKmode. Don't modify X in place since it might be
460 shared. */
461 if (code == MEM && GET_MODE (x) != BLKmode
462 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
464 register rtx y = XEXP (x, 0);
465 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
467 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
468 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
469 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
471 if (QUEUED_INSN (y))
473 register rtx temp = gen_reg_rtx (GET_MODE (new));
474 emit_insn_before (gen_move_insn (temp, new),
475 QUEUED_INSN (y));
476 return temp;
478 return new;
480 /* Otherwise, recursively protect the subexpressions of all
481 the kinds of rtx's that can contain a QUEUED. */
482 if (code == MEM)
484 rtx tem = protect_from_queue (XEXP (x, 0), 0);
485 if (tem != XEXP (x, 0))
487 x = copy_rtx (x);
488 XEXP (x, 0) = tem;
491 else if (code == PLUS || code == MULT)
493 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
494 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
495 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
497 x = copy_rtx (x);
498 XEXP (x, 0) = new0;
499 XEXP (x, 1) = new1;
502 return x;
504 /* If the increment has not happened, use the variable itself. */
505 if (QUEUED_INSN (x) == 0)
506 return QUEUED_VAR (x);
507 /* If the increment has happened and a pre-increment copy exists,
508 use that copy. */
509 if (QUEUED_COPY (x) != 0)
510 return QUEUED_COPY (x);
511 /* The increment has happened but we haven't set up a pre-increment copy.
512 Set one up now, and use it. */
513 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
514 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
515 QUEUED_INSN (x));
516 return QUEUED_COPY (x);
519 /* Return nonzero if X contains a QUEUED expression:
520 if it contains anything that will be altered by a queued increment.
521 We handle only combinations of MEM, PLUS, MINUS and MULT operators
522 since memory addresses generally contain only those. */
524 static int
525 queued_subexp_p (x)
526 rtx x;
528 register enum rtx_code code = GET_CODE (x);
529 switch (code)
531 case QUEUED:
532 return 1;
533 case MEM:
534 return queued_subexp_p (XEXP (x, 0));
535 case MULT:
536 case PLUS:
537 case MINUS:
538 return queued_subexp_p (XEXP (x, 0))
539 || queued_subexp_p (XEXP (x, 1));
541 return 0;
544 /* Perform all the pending incrementations. */
546 void
547 emit_queue ()
549 register rtx p;
550 while (p = pending_chain)
552 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
553 pending_chain = QUEUED_NEXT (p);
557 static void
558 init_queue ()
560 if (pending_chain)
561 abort ();
564 /* Copy data from FROM to TO, where the machine modes are not the same.
565 Both modes may be integer, or both may be floating.
566 UNSIGNEDP should be nonzero if FROM is an unsigned type.
567 This causes zero-extension instead of sign-extension. */
569 void
570 convert_move (to, from, unsignedp)
571 register rtx to, from;
572 int unsignedp;
574 enum machine_mode to_mode = GET_MODE (to);
575 enum machine_mode from_mode = GET_MODE (from);
576 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
577 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
578 enum insn_code code;
579 rtx libcall;
581 /* rtx code for making an equivalent value. */
582 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
584 to = protect_from_queue (to, 1);
585 from = protect_from_queue (from, 0);
587 if (to_real != from_real)
588 abort ();
590 /* If FROM is a SUBREG that indicates that we have already done at least
591 the required extension, strip it. We don't handle such SUBREGs as
592 TO here. */
594 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
595 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
596 >= GET_MODE_SIZE (to_mode))
597 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
598 from = gen_lowpart (to_mode, from), from_mode = to_mode;
600 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
601 abort ();
603 if (to_mode == from_mode
604 || (from_mode == VOIDmode && CONSTANT_P (from)))
606 emit_move_insn (to, from);
607 return;
610 if (to_real)
612 rtx value;
614 #ifdef HAVE_extendqfhf2
615 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
617 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
618 return;
620 #endif
621 #ifdef HAVE_extendqfsf2
622 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
624 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
625 return;
627 #endif
628 #ifdef HAVE_extendqfdf2
629 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
631 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
632 return;
634 #endif
635 #ifdef HAVE_extendqfxf2
636 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
638 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
639 return;
641 #endif
642 #ifdef HAVE_extendqftf2
643 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
645 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
646 return;
648 #endif
650 #ifdef HAVE_extendhftqf2
651 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
653 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
654 return;
656 #endif
658 #ifdef HAVE_extendhfsf2
659 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
661 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
662 return;
664 #endif
665 #ifdef HAVE_extendhfdf2
666 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
668 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
669 return;
671 #endif
672 #ifdef HAVE_extendhfxf2
673 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
675 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
676 return;
678 #endif
679 #ifdef HAVE_extendhftf2
680 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
682 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
683 return;
685 #endif
687 #ifdef HAVE_extendsfdf2
688 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
690 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
691 return;
693 #endif
694 #ifdef HAVE_extendsfxf2
695 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
697 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
698 return;
700 #endif
701 #ifdef HAVE_extendsftf2
702 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
704 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
705 return;
707 #endif
708 #ifdef HAVE_extenddfxf2
709 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
711 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
712 return;
714 #endif
715 #ifdef HAVE_extenddftf2
716 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
718 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
719 return;
721 #endif
723 #ifdef HAVE_trunchfqf2
724 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
726 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
727 return;
729 #endif
730 #ifdef HAVE_truncsfqf2
731 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
733 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
734 return;
736 #endif
737 #ifdef HAVE_truncdfqf2
738 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
740 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
741 return;
743 #endif
744 #ifdef HAVE_truncxfqf2
745 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
747 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
748 return;
750 #endif
751 #ifdef HAVE_trunctfqf2
752 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
754 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
755 return;
757 #endif
759 #ifdef HAVE_trunctqfhf2
760 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
762 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
763 return;
765 #endif
766 #ifdef HAVE_truncsfhf2
767 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
769 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
770 return;
772 #endif
773 #ifdef HAVE_truncdfhf2
774 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
776 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
777 return;
779 #endif
780 #ifdef HAVE_truncxfhf2
781 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
783 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
784 return;
786 #endif
787 #ifdef HAVE_trunctfhf2
788 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
790 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
791 return;
793 #endif
794 #ifdef HAVE_truncdfsf2
795 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
797 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
798 return;
800 #endif
801 #ifdef HAVE_truncxfsf2
802 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
804 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
805 return;
807 #endif
808 #ifdef HAVE_trunctfsf2
809 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
811 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
812 return;
814 #endif
815 #ifdef HAVE_truncxfdf2
816 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
818 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
819 return;
821 #endif
822 #ifdef HAVE_trunctfdf2
823 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
825 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
826 return;
828 #endif
830 libcall = (rtx) 0;
831 switch (from_mode)
833 case SFmode:
834 switch (to_mode)
836 case DFmode:
837 libcall = extendsfdf2_libfunc;
838 break;
840 case XFmode:
841 libcall = extendsfxf2_libfunc;
842 break;
844 case TFmode:
845 libcall = extendsftf2_libfunc;
846 break;
848 break;
850 case DFmode:
851 switch (to_mode)
853 case SFmode:
854 libcall = truncdfsf2_libfunc;
855 break;
857 case XFmode:
858 libcall = extenddfxf2_libfunc;
859 break;
861 case TFmode:
862 libcall = extenddftf2_libfunc;
863 break;
865 break;
867 case XFmode:
868 switch (to_mode)
870 case SFmode:
871 libcall = truncxfsf2_libfunc;
872 break;
874 case DFmode:
875 libcall = truncxfdf2_libfunc;
876 break;
878 break;
880 case TFmode:
881 switch (to_mode)
883 case SFmode:
884 libcall = trunctfsf2_libfunc;
885 break;
887 case DFmode:
888 libcall = trunctfdf2_libfunc;
889 break;
891 break;
894 if (libcall == (rtx) 0)
895 /* This conversion is not implemented yet. */
896 abort ();
898 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
899 1, from, from_mode);
900 emit_move_insn (to, value);
901 return;
904 /* Now both modes are integers. */
906 /* Handle expanding beyond a word. */
907 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
908 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
910 rtx insns;
911 rtx lowpart;
912 rtx fill_value;
913 rtx lowfrom;
914 int i;
915 enum machine_mode lowpart_mode;
916 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
918 /* Try converting directly if the insn is supported. */
919 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
920 != CODE_FOR_nothing)
922 /* If FROM is a SUBREG, put it into a register. Do this
923 so that we always generate the same set of insns for
924 better cse'ing; if an intermediate assignment occurred,
925 we won't be doing the operation directly on the SUBREG. */
926 if (optimize > 0 && GET_CODE (from) == SUBREG)
927 from = force_reg (from_mode, from);
928 emit_unop_insn (code, to, from, equiv_code);
929 return;
931 /* Next, try converting via full word. */
932 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
933 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
934 != CODE_FOR_nothing))
936 if (GET_CODE (to) == REG)
937 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
938 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
939 emit_unop_insn (code, to,
940 gen_lowpart (word_mode, to), equiv_code);
941 return;
944 /* No special multiword conversion insn; do it by hand. */
945 start_sequence ();
947 /* Since we will turn this into a no conflict block, we must ensure
948 that the source does not overlap the target. */
950 if (reg_overlap_mentioned_p (to, from))
951 from = force_reg (from_mode, from);
953 /* Get a copy of FROM widened to a word, if necessary. */
954 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
955 lowpart_mode = word_mode;
956 else
957 lowpart_mode = from_mode;
959 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
961 lowpart = gen_lowpart (lowpart_mode, to);
962 emit_move_insn (lowpart, lowfrom);
964 /* Compute the value to put in each remaining word. */
965 if (unsignedp)
966 fill_value = const0_rtx;
967 else
969 #ifdef HAVE_slt
970 if (HAVE_slt
971 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
972 && STORE_FLAG_VALUE == -1)
974 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
975 lowpart_mode, 0, 0);
976 fill_value = gen_reg_rtx (word_mode);
977 emit_insn (gen_slt (fill_value));
979 else
980 #endif
982 fill_value
983 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
984 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
985 NULL_RTX, 0);
986 fill_value = convert_to_mode (word_mode, fill_value, 1);
990 /* Fill the remaining words. */
991 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
993 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
994 rtx subword = operand_subword (to, index, 1, to_mode);
996 if (subword == 0)
997 abort ();
999 if (fill_value != subword)
1000 emit_move_insn (subword, fill_value);
1003 insns = get_insns ();
1004 end_sequence ();
1006 emit_no_conflict_block (insns, to, from, NULL_RTX,
1007 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
1008 return;
1011 /* Truncating multi-word to a word or less. */
1012 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
1013 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
1022 convert_move (to, gen_lowpart (word_mode, from), 0);
1023 return;
1026 /* Handle pointer conversion */ /* SPEE 900220 */
1027 if (to_mode == PSImode)
1029 if (from_mode != SImode)
1030 from = convert_to_mode (SImode, from, unsignedp);
1032 #ifdef HAVE_truncsipsi2
1033 if (HAVE_truncsipsi2)
1035 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1036 return;
1038 #endif /* HAVE_truncsipsi2 */
1039 abort ();
1042 if (from_mode == PSImode)
1044 if (to_mode != SImode)
1046 from = convert_to_mode (SImode, from, unsignedp);
1047 from_mode = SImode;
1049 else
1051 #ifdef HAVE_extendpsisi2
1052 if (HAVE_extendpsisi2)
1054 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1055 return;
1057 #endif /* HAVE_extendpsisi2 */
1058 abort ();
1062 if (to_mode == PDImode)
1064 if (from_mode != DImode)
1065 from = convert_to_mode (DImode, from, unsignedp);
1067 #ifdef HAVE_truncdipdi2
1068 if (HAVE_truncdipdi2)
1070 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1071 return;
1073 #endif /* HAVE_truncdipdi2 */
1074 abort ();
1077 if (from_mode == PDImode)
1079 if (to_mode != DImode)
1081 from = convert_to_mode (DImode, from, unsignedp);
1082 from_mode = DImode;
1084 else
1086 #ifdef HAVE_extendpdidi2
1087 if (HAVE_extendpdidi2)
1089 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1090 return;
1092 #endif /* HAVE_extendpdidi2 */
1093 abort ();
1097 /* Now follow all the conversions between integers
1098 no more than a word long. */
1100 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1101 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1102 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1103 GET_MODE_BITSIZE (from_mode)))
1105 if (!((GET_CODE (from) == MEM
1106 && ! MEM_VOLATILE_P (from)
1107 && direct_load[(int) to_mode]
1108 && ! mode_dependent_address_p (XEXP (from, 0)))
1109 || GET_CODE (from) == REG
1110 || GET_CODE (from) == SUBREG))
1111 from = force_reg (from_mode, from);
1112 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1113 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1114 from = copy_to_reg (from);
1115 emit_move_insn (to, gen_lowpart (to_mode, from));
1116 return;
1119 /* Handle extension. */
1120 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1122 /* Convert directly if that works. */
1123 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1124 != CODE_FOR_nothing)
1126 emit_unop_insn (code, to, from, equiv_code);
1127 return;
1129 else
1131 enum machine_mode intermediate;
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1139 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1140 && (can_extend_p (intermediate, from_mode, unsignedp)
1141 != CODE_FOR_nothing))
1143 convert_move (to, convert_to_mode (intermediate, from,
1144 unsignedp), unsignedp);
1145 return;
1148 /* No suitable intermediate mode. */
1149 abort ();
1153 /* Support special truncate insns for certain modes. */
1155 if (from_mode == DImode && to_mode == SImode)
1157 #ifdef HAVE_truncdisi2
1158 if (HAVE_truncdisi2)
1160 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1161 return;
1163 #endif
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 return;
1168 if (from_mode == DImode && to_mode == HImode)
1170 #ifdef HAVE_truncdihi2
1171 if (HAVE_truncdihi2)
1173 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1174 return;
1176 #endif
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 return;
1181 if (from_mode == DImode && to_mode == QImode)
1183 #ifdef HAVE_truncdiqi2
1184 if (HAVE_truncdiqi2)
1186 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1187 return;
1189 #endif
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 return;
1194 if (from_mode == SImode && to_mode == HImode)
1196 #ifdef HAVE_truncsihi2
1197 if (HAVE_truncsihi2)
1199 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1200 return;
1202 #endif
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 return;
1207 if (from_mode == SImode && to_mode == QImode)
1209 #ifdef HAVE_truncsiqi2
1210 if (HAVE_truncsiqi2)
1212 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1213 return;
1215 #endif
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 return;
1220 if (from_mode == HImode && to_mode == QImode)
1222 #ifdef HAVE_trunchiqi2
1223 if (HAVE_trunchiqi2)
1225 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1226 return;
1228 #endif
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 return;
1233 if (from_mode == TImode && to_mode == DImode)
1235 #ifdef HAVE_trunctidi2
1236 if (HAVE_trunctidi2)
1238 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1239 return;
1241 #endif
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 return;
1246 if (from_mode == TImode && to_mode == SImode)
1248 #ifdef HAVE_trunctisi2
1249 if (HAVE_trunctisi2)
1251 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1252 return;
1254 #endif
1255 convert_move (to, force_reg (from_mode, from), unsignedp);
1256 return;
1259 if (from_mode == TImode && to_mode == HImode)
1261 #ifdef HAVE_trunctihi2
1262 if (HAVE_trunctihi2)
1264 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1265 return;
1267 #endif
1268 convert_move (to, force_reg (from_mode, from), unsignedp);
1269 return;
1272 if (from_mode == TImode && to_mode == QImode)
1274 #ifdef HAVE_trunctiqi2
1275 if (HAVE_trunctiqi2)
1277 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1278 return;
1280 #endif
1281 convert_move (to, force_reg (from_mode, from), unsignedp);
1282 return;
1285 /* Handle truncation of volatile memrefs, and so on;
1286 the things that couldn't be truncated directly,
1287 and for which there was no special instruction. */
1288 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1290 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1291 emit_move_insn (to, temp);
1292 return;
1295 /* Mode combination is not recognized. */
1296 abort ();
1299 /* Return an rtx for a value that would result
1300 from converting X to mode MODE.
1301 Both X and MODE may be floating, or both integer.
1302 UNSIGNEDP is nonzero if X is an unsigned value.
1303 This can be done by referring to a part of X in place
1304 or by copying to a new temporary with conversion.
1306 This function *must not* call protect_from_queue
1307 except when putting X into an insn (in which case convert_move does it). */
1310 convert_to_mode (mode, x, unsignedp)
1311 enum machine_mode mode;
1312 rtx x;
1313 int unsignedp;
1315 return convert_modes (mode, VOIDmode, x, unsignedp);
1318 /* Return an rtx for a value that would result
1319 from converting X from mode OLDMODE to mode MODE.
1320 Both modes may be floating, or both integer.
1321 UNSIGNEDP is nonzero if X is an unsigned value.
1323 This can be done by referring to a part of X in place
1324 or by copying to a new temporary with conversion.
1326 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1328 This function *must not* call protect_from_queue
1329 except when putting X into an insn (in which case convert_move does it). */
1332 convert_modes (mode, oldmode, x, unsignedp)
1333 enum machine_mode mode, oldmode;
1334 rtx x;
1335 int unsignedp;
1337 register rtx temp;
1339 /* If FROM is a SUBREG that indicates that we have already done at least
1340 the required extension, strip it. */
1342 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1343 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1344 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1345 x = gen_lowpart (mode, x);
1347 if (GET_MODE (x) != VOIDmode)
1348 oldmode = GET_MODE (x);
1350 if (mode == oldmode)
1351 return x;
1353 /* There is one case that we must handle specially: If we are converting
1354 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1355 we are to interpret the constant as unsigned, gen_lowpart will do
1356 the wrong if the constant appears negative. What we want to do is
1357 make the high-order word of the constant zero, not all ones. */
1359 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1360 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1361 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1362 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1364 /* We can do this with a gen_lowpart if both desired and current modes
1365 are integer, and this is either a constant integer, a register, or a
1366 non-volatile MEM. Except for the constant case where MODE is no
1367 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1369 if ((GET_CODE (x) == CONST_INT
1370 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1371 || (GET_MODE_CLASS (mode) == MODE_INT
1372 && GET_MODE_CLASS (oldmode) == MODE_INT
1373 && (GET_CODE (x) == CONST_DOUBLE
1374 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1375 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1376 && direct_load[(int) mode])
1377 || (GET_CODE (x) == REG
1378 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1379 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1381 /* ?? If we don't know OLDMODE, we have to assume here that
1382 X does not need sign- or zero-extension. This may not be
1383 the case, but it's the best we can do. */
1384 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1385 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1387 HOST_WIDE_INT val = INTVAL (x);
1388 int width = GET_MODE_BITSIZE (oldmode);
1390 /* We must sign or zero-extend in this case. Start by
1391 zero-extending, then sign extend if we need to. */
1392 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1393 if (! unsignedp
1394 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1395 val |= (HOST_WIDE_INT) (-1) << width;
1397 return GEN_INT (val);
1400 return gen_lowpart (mode, x);
1403 temp = gen_reg_rtx (mode);
1404 convert_move (temp, x, unsignedp);
1405 return temp;
1408 /* Generate several move instructions to copy LEN bytes
1409 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1410 The caller must pass FROM and TO
1411 through protect_from_queue before calling.
1412 ALIGN (in bytes) is maximum alignment we can assume. */
1414 static void
1415 move_by_pieces (to, from, len, align)
1416 rtx to, from;
1417 int len, align;
1419 struct move_by_pieces data;
1420 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1421 int max_size = MOVE_MAX + 1;
1423 data.offset = 0;
1424 data.to_addr = to_addr;
1425 data.from_addr = from_addr;
1426 data.to = to;
1427 data.from = from;
1428 data.autinc_to
1429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1431 data.autinc_from
1432 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1433 || GET_CODE (from_addr) == POST_INC
1434 || GET_CODE (from_addr) == POST_DEC);
1436 data.explicit_inc_from = 0;
1437 data.explicit_inc_to = 0;
1438 data.reverse
1439 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1440 if (data.reverse) data.offset = len;
1441 data.len = len;
1443 data.to_struct = MEM_IN_STRUCT_P (to);
1444 data.from_struct = MEM_IN_STRUCT_P (from);
1446 /* If copying requires more than two move insns,
1447 copy addresses to registers (to make displacements shorter)
1448 and use post-increment if available. */
1449 if (!(data.autinc_from && data.autinc_to)
1450 && move_by_pieces_ninsns (len, align) > 2)
1452 #ifdef HAVE_PRE_DECREMENT
1453 if (data.reverse && ! data.autinc_from)
1455 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1456 data.autinc_from = 1;
1457 data.explicit_inc_from = -1;
1459 #endif
1460 #ifdef HAVE_POST_INCREMENT
1461 if (! data.autinc_from)
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = 1;
1467 #endif
1468 if (!data.autinc_from && CONSTANT_P (from_addr))
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 #ifdef HAVE_PRE_DECREMENT
1471 if (data.reverse && ! data.autinc_to)
1473 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1474 data.autinc_to = 1;
1475 data.explicit_inc_to = -1;
1477 #endif
1478 #ifdef HAVE_POST_INCREMENT
1479 if (! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1485 #endif
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1490 if (! SLOW_UNALIGNED_ACCESS
1491 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1492 align = MOVE_MAX;
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1497 while (max_size > 1)
1499 enum machine_mode mode = VOIDmode, tmode;
1500 enum insn_code icode;
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
1505 mode = tmode;
1507 if (mode == VOIDmode)
1508 break;
1510 icode = mov_optab->handlers[(int) mode].insn_code;
1511 if (icode != CODE_FOR_nothing
1512 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1513 GET_MODE_SIZE (mode)))
1514 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1516 max_size = GET_MODE_SIZE (mode);
1519 /* The code above should have handled everything. */
1520 if (data.len != 0)
1521 abort ();
1524 /* Return number of insns required to move L bytes by pieces.
1525 ALIGN (in bytes) is maximum alignment we can assume. */
1527 static int
1528 move_by_pieces_ninsns (l, align)
1529 unsigned int l;
1530 int align;
1532 register int n_insns = 0;
1533 int max_size = MOVE_MAX + 1;
1535 if (! SLOW_UNALIGNED_ACCESS
1536 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1537 align = MOVE_MAX;
1539 while (max_size > 1)
1541 enum machine_mode mode = VOIDmode, tmode;
1542 enum insn_code icode;
1544 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1545 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1546 if (GET_MODE_SIZE (tmode) < max_size)
1547 mode = tmode;
1549 if (mode == VOIDmode)
1550 break;
1552 icode = mov_optab->handlers[(int) mode].insn_code;
1553 if (icode != CODE_FOR_nothing
1554 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1555 GET_MODE_SIZE (mode)))
1556 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1558 max_size = GET_MODE_SIZE (mode);
1561 return n_insns;
1564 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1565 with move instructions for mode MODE. GENFUN is the gen_... function
1566 to make a move insn for that mode. DATA has all the other info. */
1568 static void
1569 move_by_pieces_1 (genfun, mode, data)
1570 rtx (*genfun) ();
1571 enum machine_mode mode;
1572 struct move_by_pieces *data;
1574 register int size = GET_MODE_SIZE (mode);
1575 register rtx to1, from1;
1577 while (data->len >= size)
1579 if (data->reverse) data->offset -= size;
1581 to1 = (data->autinc_to
1582 ? gen_rtx (MEM, mode, data->to_addr)
1583 : change_address (data->to, mode,
1584 plus_constant (data->to_addr, data->offset)));
1585 MEM_IN_STRUCT_P (to1) = data->to_struct;
1586 from1 =
1587 (data->autinc_from
1588 ? gen_rtx (MEM, mode, data->from_addr)
1589 : change_address (data->from, mode,
1590 plus_constant (data->from_addr, data->offset)));
1591 MEM_IN_STRUCT_P (from1) = data->from_struct;
1593 #ifdef HAVE_PRE_DECREMENT
1594 if (data->explicit_inc_to < 0)
1595 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1596 if (data->explicit_inc_from < 0)
1597 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1598 #endif
1600 emit_insn ((*genfun) (to1, from1));
1601 #ifdef HAVE_POST_INCREMENT
1602 if (data->explicit_inc_to > 0)
1603 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1604 if (data->explicit_inc_from > 0)
1605 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1606 #endif
1608 if (! data->reverse) data->offset += size;
1610 data->len -= size;
1614 /* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1619 with mode BLKmode.
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have,
1622 measured in bytes. */
1624 void
1625 emit_block_move (x, y, size, align)
1626 rtx x, y;
1627 rtx size;
1628 int align;
1630 if (GET_MODE (x) != BLKmode)
1631 abort ();
1633 if (GET_MODE (y) != BLKmode)
1634 abort ();
1636 x = protect_from_queue (x, 1);
1637 y = protect_from_queue (y, 0);
1638 size = protect_from_queue (size, 0);
1640 if (GET_CODE (x) != MEM)
1641 abort ();
1642 if (GET_CODE (y) != MEM)
1643 abort ();
1644 if (size == 0)
1645 abort ();
1647 if (GET_CODE (size) == CONST_INT
1648 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1649 move_by_pieces (x, y, INTVAL (size), align);
1650 else
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1656 rtx opalign = GEN_INT (align);
1657 enum machine_mode mode;
1659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1660 mode = GET_MODE_WIDER_MODE (mode))
1662 enum insn_code code = movstr_optab[(int) mode];
1664 if (code != CODE_FOR_nothing
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
1667 returned by the macro, it will definitely be less than the
1668 actual mode mask. */
1669 && ((GET_CODE (size) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1671 <= GET_MODE_MASK (mode)))
1672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1673 && (insn_operand_predicate[(int) code][0] == 0
1674 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1675 && (insn_operand_predicate[(int) code][1] == 0
1676 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1677 && (insn_operand_predicate[(int) code][3] == 0
1678 || (*insn_operand_predicate[(int) code][3]) (opalign,
1679 VOIDmode)))
1681 rtx op2;
1682 rtx last = get_last_insn ();
1683 rtx pat;
1685 op2 = convert_to_mode (mode, size, 1);
1686 if (insn_operand_predicate[(int) code][2] != 0
1687 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1688 op2 = copy_to_mode_reg (mode, op2);
1690 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1691 if (pat)
1693 emit_insn (pat);
1694 return;
1696 else
1697 delete_insns_since (last);
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 emit_library_call (memcpy_libfunc, 0,
1703 VOIDmode, 3, XEXP (x, 0), Pmode,
1704 XEXP (y, 0), Pmode,
1705 convert_to_mode (TYPE_MODE (sizetype), size,
1706 TREE_UNSIGNED (sizetype)),
1707 TYPE_MODE (sizetype));
1708 #else
1709 emit_library_call (bcopy_libfunc, 0,
1710 VOIDmode, 3, XEXP (y, 0), Pmode,
1711 XEXP (x, 0), Pmode,
1712 convert_to_mode (TYPE_MODE (integer_type_node), size,
1713 TREE_UNSIGNED (integer_type_node)),
1714 TYPE_MODE (integer_type_node));
1715 #endif
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1722 void
1723 move_block_to_reg (regno, x, nregs, mode)
1724 int regno;
1725 rtx x;
1726 int nregs;
1727 enum machine_mode mode;
1729 int i;
1730 rtx pat, last;
1732 if (nregs == 0)
1733 return;
1735 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1736 x = validize_mem (force_const_mem (mode, x));
1738 /* See if the machine can do this with a load multiple insn. */
1739 #ifdef HAVE_load_multiple
1740 if (HAVE_load_multiple)
1742 last = get_last_insn ();
1743 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1744 GEN_INT (nregs));
1745 if (pat)
1747 emit_insn (pat);
1748 return;
1750 else
1751 delete_insns_since (last);
1753 #endif
1755 for (i = 0; i < nregs; i++)
1756 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1757 operand_subword_force (x, i, mode));
1760 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1761 The number of registers to be filled is NREGS. SIZE indicates the number
1762 of bytes in the object X. */
1765 void
1766 move_block_from_reg (regno, x, nregs, size)
1767 int regno;
1768 rtx x;
1769 int nregs;
1770 int size;
1772 int i;
1773 rtx pat, last;
1775 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1776 to the left before storing to memory. */
1777 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1779 rtx tem = operand_subword (x, 0, 1, BLKmode);
1780 rtx shift;
1782 if (tem == 0)
1783 abort ();
1785 shift = expand_shift (LSHIFT_EXPR, word_mode,
1786 gen_rtx (REG, word_mode, regno),
1787 build_int_2 ((UNITS_PER_WORD - size)
1788 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1789 emit_move_insn (tem, shift);
1790 return;
1793 /* See if the machine can do this with a store multiple insn. */
1794 #ifdef HAVE_store_multiple
1795 if (HAVE_store_multiple)
1797 last = get_last_insn ();
1798 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1799 GEN_INT (nregs));
1800 if (pat)
1802 emit_insn (pat);
1803 return;
1805 else
1806 delete_insns_since (last);
1808 #endif
1810 for (i = 0; i < nregs; i++)
1812 rtx tem = operand_subword (x, i, 1, BLKmode);
1814 if (tem == 0)
1815 abort ();
1817 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1821 /* Add a USE expression for REG to the (possibly empty) list pointed
1822 to by CALL_FUSAGE. REG must denote a hard register. */
1824 void
1825 use_reg (call_fusage, reg)
1826 rtx *call_fusage, reg;
1828 if (GET_CODE (reg) != REG
1829 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1830 abort();
1832 *call_fusage
1833 = gen_rtx (EXPR_LIST, VOIDmode,
1834 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1837 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1838 starting at REGNO. All of these registers must be hard registers. */
1840 void
1841 use_regs (call_fusage, regno, nregs)
1842 rtx *call_fusage;
1843 int regno;
1844 int nregs;
1846 int i;
1848 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1849 abort ();
1851 for (i = 0; i < nregs; i++)
1852 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1855 /* Generate several move instructions to clear LEN bytes of block TO.
1856 (A MEM rtx with BLKmode). The caller must pass TO through
1857 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1858 we can assume. */
1860 static void
1861 clear_by_pieces (to, len, align)
1862 rtx to;
1863 int len, align;
1865 struct clear_by_pieces data;
1866 rtx to_addr = XEXP (to, 0);
1867 int max_size = MOVE_MAX + 1;
1869 data.offset = 0;
1870 data.to_addr = to_addr;
1871 data.to = to;
1872 data.autinc_to
1873 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1874 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1876 data.explicit_inc_to = 0;
1877 data.reverse
1878 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1879 if (data.reverse) data.offset = len;
1880 data.len = len;
1882 data.to_struct = MEM_IN_STRUCT_P (to);
1884 /* If copying requires more than two move insns,
1885 copy addresses to registers (to make displacements shorter)
1886 and use post-increment if available. */
1887 if (!data.autinc_to
1888 && move_by_pieces_ninsns (len, align) > 2)
1890 #ifdef HAVE_PRE_DECREMENT
1891 if (data.reverse && ! data.autinc_to)
1893 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1894 data.autinc_to = 1;
1895 data.explicit_inc_to = -1;
1897 #endif
1898 #ifdef HAVE_POST_INCREMENT
1899 if (! data.reverse && ! data.autinc_to)
1901 data.to_addr = copy_addr_to_reg (to_addr);
1902 data.autinc_to = 1;
1903 data.explicit_inc_to = 1;
1905 #endif
1906 if (!data.autinc_to && CONSTANT_P (to_addr))
1907 data.to_addr = copy_addr_to_reg (to_addr);
1910 if (! SLOW_UNALIGNED_ACCESS
1911 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1912 align = MOVE_MAX;
1914 /* First move what we can in the largest integer mode, then go to
1915 successively smaller modes. */
1917 while (max_size > 1)
1919 enum machine_mode mode = VOIDmode, tmode;
1920 enum insn_code icode;
1922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1924 if (GET_MODE_SIZE (tmode) < max_size)
1925 mode = tmode;
1927 if (mode == VOIDmode)
1928 break;
1930 icode = mov_optab->handlers[(int) mode].insn_code;
1931 if (icode != CODE_FOR_nothing
1932 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1933 GET_MODE_SIZE (mode)))
1934 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1936 max_size = GET_MODE_SIZE (mode);
1939 /* The code above should have handled everything. */
1940 if (data.len != 0)
1941 abort ();
1944 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1945 with move instructions for mode MODE. GENFUN is the gen_... function
1946 to make a move insn for that mode. DATA has all the other info. */
1948 static void
1949 clear_by_pieces_1 (genfun, mode, data)
1950 rtx (*genfun) ();
1951 enum machine_mode mode;
1952 struct clear_by_pieces *data;
1954 register int size = GET_MODE_SIZE (mode);
1955 register rtx to1;
1957 while (data->len >= size)
1959 if (data->reverse) data->offset -= size;
1961 to1 = (data->autinc_to
1962 ? gen_rtx (MEM, mode, data->to_addr)
1963 : change_address (data->to, mode,
1964 plus_constant (data->to_addr, data->offset)));
1965 MEM_IN_STRUCT_P (to1) = data->to_struct;
1967 #ifdef HAVE_PRE_DECREMENT
1968 if (data->explicit_inc_to < 0)
1969 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1970 #endif
1972 emit_insn ((*genfun) (to1, const0_rtx));
1973 #ifdef HAVE_POST_INCREMENT
1974 if (data->explicit_inc_to > 0)
1975 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1976 #endif
1978 if (! data->reverse) data->offset += size;
1980 data->len -= size;
1984 /* Write zeros through the storage of OBJECT.
1985 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
1986 the maximum alignment we can is has, measured in bytes. */
1988 void
1989 clear_storage (object, size, align)
1990 rtx object;
1991 rtx size;
1992 int align;
1994 if (GET_MODE (object) == BLKmode)
1996 object = protect_from_queue (object, 1);
1997 size = protect_from_queue (size, 0);
1999 if (GET_CODE (size) == CONST_INT
2000 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2001 clear_by_pieces (object, INTVAL (size), align);
2003 else
2005 /* Try the most limited insn first, because there's no point
2006 including more than one in the machine description unless
2007 the more limited one has some advantage. */
2009 rtx opalign = GEN_INT (align);
2010 enum machine_mode mode;
2012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2013 mode = GET_MODE_WIDER_MODE (mode))
2015 enum insn_code code = clrstr_optab[(int) mode];
2017 if (code != CODE_FOR_nothing
2018 /* We don't need MODE to be narrower than
2019 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2020 the mode mask, as it is returned by the macro, it will
2021 definitely be less than the actual mode mask. */
2022 && ((GET_CODE (size) == CONST_INT
2023 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2024 <= GET_MODE_MASK (mode)))
2025 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2026 && (insn_operand_predicate[(int) code][0] == 0
2027 || (*insn_operand_predicate[(int) code][0]) (object,
2028 BLKmode))
2029 && (insn_operand_predicate[(int) code][2] == 0
2030 || (*insn_operand_predicate[(int) code][2]) (opalign,
2031 VOIDmode)))
2033 rtx op1;
2034 rtx last = get_last_insn ();
2035 rtx pat;
2037 op1 = convert_to_mode (mode, size, 1);
2038 if (insn_operand_predicate[(int) code][1] != 0
2039 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2040 mode))
2041 op1 = copy_to_mode_reg (mode, op1);
2043 pat = GEN_FCN ((int) code) (object, op1, opalign);
2044 if (pat)
2046 emit_insn (pat);
2047 return;
2049 else
2050 delete_insns_since (last);
2055 #ifdef TARGET_MEM_FUNCTIONS
2056 emit_library_call (memset_libfunc, 0,
2057 VOIDmode, 3,
2058 XEXP (object, 0), Pmode,
2059 const0_rtx, TYPE_MODE (integer_type_node),
2060 convert_to_mode (TYPE_MODE (sizetype),
2061 size, TREE_UNSIGNED (sizetype)),
2062 TYPE_MODE (sizetype));
2063 #else
2064 emit_library_call (bzero_libfunc, 0,
2065 VOIDmode, 2,
2066 XEXP (object, 0), Pmode,
2067 convert_to_mode (TYPE_MODE (integer_type_node),
2068 size,
2069 TREE_UNSIGNED (integer_type_node)),
2070 TYPE_MODE (integer_type_node));
2071 #endif
2074 else
2075 emit_move_insn (object, const0_rtx);
2078 /* Generate code to copy Y into X.
2079 Both Y and X must have the same mode, except that
2080 Y can be a constant with VOIDmode.
2081 This mode cannot be BLKmode; use emit_block_move for that.
2083 Return the last instruction emitted. */
2086 emit_move_insn (x, y)
2087 rtx x, y;
2089 enum machine_mode mode = GET_MODE (x);
2091 x = protect_from_queue (x, 1);
2092 y = protect_from_queue (y, 0);
2094 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2095 abort ();
2097 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2098 y = force_const_mem (mode, y);
2100 /* If X or Y are memory references, verify that their addresses are valid
2101 for the machine. */
2102 if (GET_CODE (x) == MEM
2103 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2104 && ! push_operand (x, GET_MODE (x)))
2105 || (flag_force_addr
2106 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2107 x = change_address (x, VOIDmode, XEXP (x, 0));
2109 if (GET_CODE (y) == MEM
2110 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2111 || (flag_force_addr
2112 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2113 y = change_address (y, VOIDmode, XEXP (y, 0));
2115 if (mode == BLKmode)
2116 abort ();
2118 return emit_move_insn_1 (x, y);
2121 /* Low level part of emit_move_insn.
2122 Called just like emit_move_insn, but assumes X and Y
2123 are basically valid. */
2126 emit_move_insn_1 (x, y)
2127 rtx x, y;
2129 enum machine_mode mode = GET_MODE (x);
2130 enum machine_mode submode;
2131 enum mode_class class = GET_MODE_CLASS (mode);
2132 int i;
2134 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2135 return
2136 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2138 /* Expand complex moves by moving real part and imag part, if possible. */
2139 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2140 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2141 * BITS_PER_UNIT),
2142 (class == MODE_COMPLEX_INT
2143 ? MODE_INT : MODE_FLOAT),
2145 && (mov_optab->handlers[(int) submode].insn_code
2146 != CODE_FOR_nothing))
2148 /* Don't split destination if it is a stack push. */
2149 int stack = push_operand (x, GET_MODE (x));
2150 rtx insns;
2152 /* If this is a stack, push the highpart first, so it
2153 will be in the argument order.
2155 In that case, change_address is used only to convert
2156 the mode, not to change the address. */
2157 if (stack)
2159 /* Note that the real part always precedes the imag part in memory
2160 regardless of machine's endianness. */
2161 #ifdef STACK_GROWS_DOWNWARD
2162 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2163 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2164 gen_imagpart (submode, y)));
2165 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2166 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2167 gen_realpart (submode, y)));
2168 #else
2169 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2170 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2171 gen_realpart (submode, y)));
2172 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2173 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2174 gen_imagpart (submode, y)));
2175 #endif
2177 else
2179 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2180 (gen_realpart (submode, x), gen_realpart (submode, y)));
2181 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2182 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2185 return get_last_insn ();
2188 /* This will handle any multi-word mode that lacks a move_insn pattern.
2189 However, you will get better code if you define such patterns,
2190 even if they must turn into multiple assembler instructions. */
2191 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2193 rtx last_insn = 0;
2194 rtx insns;
2196 #ifdef PUSH_ROUNDING
2198 /* If X is a push on the stack, do the push now and replace
2199 X with a reference to the stack pointer. */
2200 if (push_operand (x, GET_MODE (x)))
2202 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2203 x = change_address (x, VOIDmode, stack_pointer_rtx);
2205 #endif
2207 /* Show the output dies here. */
2208 if (x != y)
2209 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2211 for (i = 0;
2212 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2213 i++)
2215 rtx xpart = operand_subword (x, i, 1, mode);
2216 rtx ypart = operand_subword (y, i, 1, mode);
2218 /* If we can't get a part of Y, put Y into memory if it is a
2219 constant. Otherwise, force it into a register. If we still
2220 can't get a part of Y, abort. */
2221 if (ypart == 0 && CONSTANT_P (y))
2223 y = force_const_mem (mode, y);
2224 ypart = operand_subword (y, i, 1, mode);
2226 else if (ypart == 0)
2227 ypart = operand_subword_force (y, i, mode);
2229 if (xpart == 0 || ypart == 0)
2230 abort ();
2232 last_insn = emit_move_insn (xpart, ypart);
2235 return last_insn;
2237 else
2238 abort ();
2241 /* Pushing data onto the stack. */
2243 /* Push a block of length SIZE (perhaps variable)
2244 and return an rtx to address the beginning of the block.
2245 Note that it is not possible for the value returned to be a QUEUED.
2246 The value may be virtual_outgoing_args_rtx.
2248 EXTRA is the number of bytes of padding to push in addition to SIZE.
2249 BELOW nonzero means this padding comes at low addresses;
2250 otherwise, the padding comes at high addresses. */
2253 push_block (size, extra, below)
2254 rtx size;
2255 int extra, below;
2257 register rtx temp;
2259 size = convert_modes (Pmode, ptr_mode, size, 1);
2260 if (CONSTANT_P (size))
2261 anti_adjust_stack (plus_constant (size, extra));
2262 else if (GET_CODE (size) == REG && extra == 0)
2263 anti_adjust_stack (size);
2264 else
2266 rtx temp = copy_to_mode_reg (Pmode, size);
2267 if (extra != 0)
2268 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2269 temp, 0, OPTAB_LIB_WIDEN);
2270 anti_adjust_stack (temp);
2273 #ifdef STACK_GROWS_DOWNWARD
2274 temp = virtual_outgoing_args_rtx;
2275 if (extra != 0 && below)
2276 temp = plus_constant (temp, extra);
2277 #else
2278 if (GET_CODE (size) == CONST_INT)
2279 temp = plus_constant (virtual_outgoing_args_rtx,
2280 - INTVAL (size) - (below ? 0 : extra));
2281 else if (extra != 0 && !below)
2282 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2283 negate_rtx (Pmode, plus_constant (size, extra)));
2284 else
2285 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2286 negate_rtx (Pmode, size));
2287 #endif
2289 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2293 gen_push_operand ()
2295 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2298 /* Generate code to push X onto the stack, assuming it has mode MODE and
2299 type TYPE.
2300 MODE is redundant except when X is a CONST_INT (since they don't
2301 carry mode info).
2302 SIZE is an rtx for the size of data to be copied (in bytes),
2303 needed only if X is BLKmode.
2305 ALIGN (in bytes) is maximum alignment we can assume.
2307 If PARTIAL and REG are both nonzero, then copy that many of the first
2308 words of X into registers starting with REG, and push the rest of X.
2309 The amount of space pushed is decreased by PARTIAL words,
2310 rounded *down* to a multiple of PARM_BOUNDARY.
2311 REG must be a hard register in this case.
2312 If REG is zero but PARTIAL is not, take any all others actions for an
2313 argument partially in registers, but do not actually load any
2314 registers.
2316 EXTRA is the amount in bytes of extra space to leave next to this arg.
2317 This is ignored if an argument block has already been allocated.
2319 On a machine that lacks real push insns, ARGS_ADDR is the address of
2320 the bottom of the argument block for this call. We use indexing off there
2321 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2322 argument block has not been preallocated.
2324 ARGS_SO_FAR is the size of args previously pushed for this call. */
2326 void
2327 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2328 args_addr, args_so_far)
2329 register rtx x;
2330 enum machine_mode mode;
2331 tree type;
2332 rtx size;
2333 int align;
2334 int partial;
2335 rtx reg;
2336 int extra;
2337 rtx args_addr;
2338 rtx args_so_far;
2340 rtx xinner;
2341 enum direction stack_direction
2342 #ifdef STACK_GROWS_DOWNWARD
2343 = downward;
2344 #else
2345 = upward;
2346 #endif
2348 /* Decide where to pad the argument: `downward' for below,
2349 `upward' for above, or `none' for don't pad it.
2350 Default is below for small data on big-endian machines; else above. */
2351 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2353 /* Invert direction if stack is post-update. */
2354 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2355 if (where_pad != none)
2356 where_pad = (where_pad == downward ? upward : downward);
2358 xinner = x = protect_from_queue (x, 0);
2360 if (mode == BLKmode)
2362 /* Copy a block into the stack, entirely or partially. */
2364 register rtx temp;
2365 int used = partial * UNITS_PER_WORD;
2366 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2367 int skip;
2369 if (size == 0)
2370 abort ();
2372 used -= offset;
2374 /* USED is now the # of bytes we need not copy to the stack
2375 because registers will take care of them. */
2377 if (partial != 0)
2378 xinner = change_address (xinner, BLKmode,
2379 plus_constant (XEXP (xinner, 0), used));
2381 /* If the partial register-part of the arg counts in its stack size,
2382 skip the part of stack space corresponding to the registers.
2383 Otherwise, start copying to the beginning of the stack space,
2384 by setting SKIP to 0. */
2385 #ifndef REG_PARM_STACK_SPACE
2386 skip = 0;
2387 #else
2388 skip = used;
2389 #endif
2391 #ifdef PUSH_ROUNDING
2392 /* Do it with several push insns if that doesn't take lots of insns
2393 and if there is no difficulty with push insns that skip bytes
2394 on the stack for alignment purposes. */
2395 if (args_addr == 0
2396 && GET_CODE (size) == CONST_INT
2397 && skip == 0
2398 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2399 < MOVE_RATIO)
2400 /* Here we avoid the case of a structure whose weak alignment
2401 forces many pushes of a small amount of data,
2402 and such small pushes do rounding that causes trouble. */
2403 && ((! SLOW_UNALIGNED_ACCESS)
2404 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2405 || PUSH_ROUNDING (align) == align)
2406 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2408 /* Push padding now if padding above and stack grows down,
2409 or if padding below and stack grows up.
2410 But if space already allocated, this has already been done. */
2411 if (extra && args_addr == 0
2412 && where_pad != none && where_pad != stack_direction)
2413 anti_adjust_stack (GEN_INT (extra));
2415 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2416 INTVAL (size) - used, align);
2418 else
2419 #endif /* PUSH_ROUNDING */
2421 /* Otherwise make space on the stack and copy the data
2422 to the address of that space. */
2424 /* Deduct words put into registers from the size we must copy. */
2425 if (partial != 0)
2427 if (GET_CODE (size) == CONST_INT)
2428 size = GEN_INT (INTVAL (size) - used);
2429 else
2430 size = expand_binop (GET_MODE (size), sub_optab, size,
2431 GEN_INT (used), NULL_RTX, 0,
2432 OPTAB_LIB_WIDEN);
2435 /* Get the address of the stack space.
2436 In this case, we do not deal with EXTRA separately.
2437 A single stack adjust will do. */
2438 if (! args_addr)
2440 temp = push_block (size, extra, where_pad == downward);
2441 extra = 0;
2443 else if (GET_CODE (args_so_far) == CONST_INT)
2444 temp = memory_address (BLKmode,
2445 plus_constant (args_addr,
2446 skip + INTVAL (args_so_far)));
2447 else
2448 temp = memory_address (BLKmode,
2449 plus_constant (gen_rtx (PLUS, Pmode,
2450 args_addr, args_so_far),
2451 skip));
2453 /* TEMP is the address of the block. Copy the data there. */
2454 if (GET_CODE (size) == CONST_INT
2455 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2456 < MOVE_RATIO))
2458 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2459 INTVAL (size), align);
2460 goto ret;
2462 /* Try the most limited insn first, because there's no point
2463 including more than one in the machine description unless
2464 the more limited one has some advantage. */
2465 #ifdef HAVE_movstrqi
2466 if (HAVE_movstrqi
2467 && GET_CODE (size) == CONST_INT
2468 && ((unsigned) INTVAL (size)
2469 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2471 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2472 xinner, size, GEN_INT (align));
2473 if (pat != 0)
2475 emit_insn (pat);
2476 goto ret;
2479 #endif
2480 #ifdef HAVE_movstrhi
2481 if (HAVE_movstrhi
2482 && GET_CODE (size) == CONST_INT
2483 && ((unsigned) INTVAL (size)
2484 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2486 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2487 xinner, size, GEN_INT (align));
2488 if (pat != 0)
2490 emit_insn (pat);
2491 goto ret;
2494 #endif
2495 #ifdef HAVE_movstrsi
2496 if (HAVE_movstrsi)
2498 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2499 xinner, size, GEN_INT (align));
2500 if (pat != 0)
2502 emit_insn (pat);
2503 goto ret;
2506 #endif
2507 #ifdef HAVE_movstrdi
2508 if (HAVE_movstrdi)
2510 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2511 xinner, size, GEN_INT (align));
2512 if (pat != 0)
2514 emit_insn (pat);
2515 goto ret;
2518 #endif
2520 #ifndef ACCUMULATE_OUTGOING_ARGS
2521 /* If the source is referenced relative to the stack pointer,
2522 copy it to another register to stabilize it. We do not need
2523 to do this if we know that we won't be changing sp. */
2525 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2526 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2527 temp = copy_to_reg (temp);
2528 #endif
2530 /* Make inhibit_defer_pop nonzero around the library call
2531 to force it to pop the bcopy-arguments right away. */
2532 NO_DEFER_POP;
2533 #ifdef TARGET_MEM_FUNCTIONS
2534 emit_library_call (memcpy_libfunc, 0,
2535 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2536 convert_to_mode (TYPE_MODE (sizetype),
2537 size, TREE_UNSIGNED (sizetype)),
2538 TYPE_MODE (sizetype));
2539 #else
2540 emit_library_call (bcopy_libfunc, 0,
2541 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2542 convert_to_mode (TYPE_MODE (integer_type_node),
2543 size,
2544 TREE_UNSIGNED (integer_type_node)),
2545 TYPE_MODE (integer_type_node));
2546 #endif
2547 OK_DEFER_POP;
2550 else if (partial > 0)
2552 /* Scalar partly in registers. */
2554 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2555 int i;
2556 int not_stack;
2557 /* # words of start of argument
2558 that we must make space for but need not store. */
2559 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2560 int args_offset = INTVAL (args_so_far);
2561 int skip;
2563 /* Push padding now if padding above and stack grows down,
2564 or if padding below and stack grows up.
2565 But if space already allocated, this has already been done. */
2566 if (extra && args_addr == 0
2567 && where_pad != none && where_pad != stack_direction)
2568 anti_adjust_stack (GEN_INT (extra));
2570 /* If we make space by pushing it, we might as well push
2571 the real data. Otherwise, we can leave OFFSET nonzero
2572 and leave the space uninitialized. */
2573 if (args_addr == 0)
2574 offset = 0;
2576 /* Now NOT_STACK gets the number of words that we don't need to
2577 allocate on the stack. */
2578 not_stack = partial - offset;
2580 /* If the partial register-part of the arg counts in its stack size,
2581 skip the part of stack space corresponding to the registers.
2582 Otherwise, start copying to the beginning of the stack space,
2583 by setting SKIP to 0. */
2584 #ifndef REG_PARM_STACK_SPACE
2585 skip = 0;
2586 #else
2587 skip = not_stack;
2588 #endif
2590 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2591 x = validize_mem (force_const_mem (mode, x));
2593 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2594 SUBREGs of such registers are not allowed. */
2595 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2596 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2597 x = copy_to_reg (x);
2599 /* Loop over all the words allocated on the stack for this arg. */
2600 /* We can do it by words, because any scalar bigger than a word
2601 has a size a multiple of a word. */
2602 #ifndef PUSH_ARGS_REVERSED
2603 for (i = not_stack; i < size; i++)
2604 #else
2605 for (i = size - 1; i >= not_stack; i--)
2606 #endif
2607 if (i >= not_stack + offset)
2608 emit_push_insn (operand_subword_force (x, i, mode),
2609 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2610 0, args_addr,
2611 GEN_INT (args_offset + ((i - not_stack + skip)
2612 * UNITS_PER_WORD)));
2614 else
2616 rtx addr;
2618 /* Push padding now if padding above and stack grows down,
2619 or if padding below and stack grows up.
2620 But if space already allocated, this has already been done. */
2621 if (extra && args_addr == 0
2622 && where_pad != none && where_pad != stack_direction)
2623 anti_adjust_stack (GEN_INT (extra));
2625 #ifdef PUSH_ROUNDING
2626 if (args_addr == 0)
2627 addr = gen_push_operand ();
2628 else
2629 #endif
2630 if (GET_CODE (args_so_far) == CONST_INT)
2631 addr
2632 = memory_address (mode,
2633 plus_constant (args_addr, INTVAL (args_so_far)));
2634 else
2635 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2636 args_so_far));
2638 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2641 ret:
2642 /* If part should go in registers, copy that part
2643 into the appropriate registers. Do this now, at the end,
2644 since mem-to-mem copies above may do function calls. */
2645 if (partial > 0 && reg != 0)
2646 move_block_to_reg (REGNO (reg), x, partial, mode);
2648 if (extra && args_addr == 0 && where_pad == stack_direction)
2649 anti_adjust_stack (GEN_INT (extra));
2652 /* Expand an assignment that stores the value of FROM into TO.
2653 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2654 (This may contain a QUEUED rtx;
2655 if the value is constant, this rtx is a constant.)
2656 Otherwise, the returned value is NULL_RTX.
2658 SUGGEST_REG is no longer actually used.
2659 It used to mean, copy the value through a register
2660 and return that register, if that is possible.
2661 We now use WANT_VALUE to decide whether to do this. */
2664 expand_assignment (to, from, want_value, suggest_reg)
2665 tree to, from;
2666 int want_value;
2667 int suggest_reg;
2669 register rtx to_rtx = 0;
2670 rtx result;
2672 /* Don't crash if the lhs of the assignment was erroneous. */
2674 if (TREE_CODE (to) == ERROR_MARK)
2676 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2677 return want_value ? result : NULL_RTX;
2680 if (output_bytecode)
2682 tree dest_innermost;
2684 bc_expand_expr (from);
2685 bc_emit_instruction (duplicate);
2687 dest_innermost = bc_expand_address (to);
2689 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2690 take care of it here. */
2692 bc_store_memory (TREE_TYPE (to), dest_innermost);
2693 return NULL;
2696 /* Assignment of a structure component needs special treatment
2697 if the structure component's rtx is not simply a MEM.
2698 Assignment of an array element at a constant index, and assignment of
2699 an array element in an unaligned packed structure field, has the same
2700 problem. */
2702 if (TREE_CODE (to) == COMPONENT_REF
2703 || TREE_CODE (to) == BIT_FIELD_REF
2704 || (TREE_CODE (to) == ARRAY_REF
2705 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2706 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2707 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2709 enum machine_mode mode1;
2710 int bitsize;
2711 int bitpos;
2712 tree offset;
2713 int unsignedp;
2714 int volatilep = 0;
2715 tree tem;
2716 int alignment;
2718 push_temp_slots ();
2719 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2720 &mode1, &unsignedp, &volatilep);
2722 /* If we are going to use store_bit_field and extract_bit_field,
2723 make sure to_rtx will be safe for multiple use. */
2725 if (mode1 == VOIDmode && want_value)
2726 tem = stabilize_reference (tem);
2728 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2729 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2730 if (offset != 0)
2732 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2734 if (GET_CODE (to_rtx) != MEM)
2735 abort ();
2736 to_rtx = change_address (to_rtx, VOIDmode,
2737 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2738 force_reg (ptr_mode, offset_rtx)));
2739 /* If we have a variable offset, the known alignment
2740 is only that of the innermost structure containing the field.
2741 (Actually, we could sometimes do better by using the
2742 align of an element of the innermost array, but no need.) */
2743 if (TREE_CODE (to) == COMPONENT_REF
2744 || TREE_CODE (to) == BIT_FIELD_REF)
2745 alignment
2746 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2748 if (volatilep)
2750 if (GET_CODE (to_rtx) == MEM)
2752 /* When the offset is zero, to_rtx is the address of the
2753 structure we are storing into, and hence may be shared.
2754 We must make a new MEM before setting the volatile bit. */
2755 if (offset == 0)
2756 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2757 MEM_VOLATILE_P (to_rtx) = 1;
2759 #if 0 /* This was turned off because, when a field is volatile
2760 in an object which is not volatile, the object may be in a register,
2761 and then we would abort over here. */
2762 else
2763 abort ();
2764 #endif
2767 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2768 (want_value
2769 /* Spurious cast makes HPUX compiler happy. */
2770 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2771 : VOIDmode),
2772 unsignedp,
2773 /* Required alignment of containing datum. */
2774 alignment,
2775 int_size_in_bytes (TREE_TYPE (tem)));
2776 preserve_temp_slots (result);
2777 free_temp_slots ();
2778 pop_temp_slots ();
2780 /* If the value is meaningful, convert RESULT to the proper mode.
2781 Otherwise, return nothing. */
2782 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2783 TYPE_MODE (TREE_TYPE (from)),
2784 result,
2785 TREE_UNSIGNED (TREE_TYPE (to)))
2786 : NULL_RTX);
2789 /* If the rhs is a function call and its value is not an aggregate,
2790 call the function before we start to compute the lhs.
2791 This is needed for correct code for cases such as
2792 val = setjmp (buf) on machines where reference to val
2793 requires loading up part of an address in a separate insn.
2795 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2796 a promoted variable where the zero- or sign- extension needs to be done.
2797 Handling this in the normal way is safe because no computation is done
2798 before the call. */
2799 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2800 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2802 rtx value;
2804 push_temp_slots ();
2805 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2806 if (to_rtx == 0)
2807 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2809 if (GET_MODE (to_rtx) == BLKmode)
2810 emit_block_move (to_rtx, value, expr_size (from),
2811 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2812 else
2813 emit_move_insn (to_rtx, value);
2814 preserve_temp_slots (to_rtx);
2815 free_temp_slots ();
2816 pop_temp_slots ();
2817 return want_value ? to_rtx : NULL_RTX;
2820 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2821 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2823 if (to_rtx == 0)
2824 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2826 /* Don't move directly into a return register. */
2827 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2829 rtx temp;
2831 push_temp_slots ();
2832 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2833 emit_move_insn (to_rtx, temp);
2834 preserve_temp_slots (to_rtx);
2835 free_temp_slots ();
2836 pop_temp_slots ();
2837 return want_value ? to_rtx : NULL_RTX;
2840 /* In case we are returning the contents of an object which overlaps
2841 the place the value is being stored, use a safe function when copying
2842 a value through a pointer into a structure value return block. */
2843 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2844 && current_function_returns_struct
2845 && !current_function_returns_pcc_struct)
2847 rtx from_rtx, size;
2849 push_temp_slots ();
2850 size = expr_size (from);
2851 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2853 #ifdef TARGET_MEM_FUNCTIONS
2854 emit_library_call (memcpy_libfunc, 0,
2855 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2856 XEXP (from_rtx, 0), Pmode,
2857 convert_to_mode (TYPE_MODE (sizetype),
2858 size, TREE_UNSIGNED (sizetype)),
2859 TYPE_MODE (sizetype));
2860 #else
2861 emit_library_call (bcopy_libfunc, 0,
2862 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2863 XEXP (to_rtx, 0), Pmode,
2864 convert_to_mode (TYPE_MODE (integer_type_node),
2865 size, TREE_UNSIGNED (integer_type_node)),
2866 TYPE_MODE (integer_type_node));
2867 #endif
2869 preserve_temp_slots (to_rtx);
2870 free_temp_slots ();
2871 pop_temp_slots ();
2872 return want_value ? to_rtx : NULL_RTX;
2875 /* Compute FROM and store the value in the rtx we got. */
2877 push_temp_slots ();
2878 result = store_expr (from, to_rtx, want_value);
2879 preserve_temp_slots (result);
2880 free_temp_slots ();
2881 pop_temp_slots ();
2882 return want_value ? result : NULL_RTX;
2885 /* Generate code for computing expression EXP,
2886 and storing the value into TARGET.
2887 TARGET may contain a QUEUED rtx.
2889 If WANT_VALUE is nonzero, return a copy of the value
2890 not in TARGET, so that we can be sure to use the proper
2891 value in a containing expression even if TARGET has something
2892 else stored in it. If possible, we copy the value through a pseudo
2893 and return that pseudo. Or, if the value is constant, we try to
2894 return the constant. In some cases, we return a pseudo
2895 copied *from* TARGET.
2897 If the mode is BLKmode then we may return TARGET itself.
2898 It turns out that in BLKmode it doesn't cause a problem.
2899 because C has no operators that could combine two different
2900 assignments into the same BLKmode object with different values
2901 with no sequence point. Will other languages need this to
2902 be more thorough?
2904 If WANT_VALUE is 0, we return NULL, to make sure
2905 to catch quickly any cases where the caller uses the value
2906 and fails to set WANT_VALUE. */
2909 store_expr (exp, target, want_value)
2910 register tree exp;
2911 register rtx target;
2912 int want_value;
2914 register rtx temp;
2915 int dont_return_target = 0;
2917 if (TREE_CODE (exp) == COMPOUND_EXPR)
2919 /* Perform first part of compound expression, then assign from second
2920 part. */
2921 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2922 emit_queue ();
2923 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2925 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2927 /* For conditional expression, get safe form of the target. Then
2928 test the condition, doing the appropriate assignment on either
2929 side. This avoids the creation of unnecessary temporaries.
2930 For non-BLKmode, it is more efficient not to do this. */
2932 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2934 emit_queue ();
2935 target = protect_from_queue (target, 1);
2937 do_pending_stack_adjust ();
2938 NO_DEFER_POP;
2939 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2940 store_expr (TREE_OPERAND (exp, 1), target, 0);
2941 emit_queue ();
2942 emit_jump_insn (gen_jump (lab2));
2943 emit_barrier ();
2944 emit_label (lab1);
2945 store_expr (TREE_OPERAND (exp, 2), target, 0);
2946 emit_queue ();
2947 emit_label (lab2);
2948 OK_DEFER_POP;
2949 return want_value ? target : NULL_RTX;
2951 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2952 && GET_MODE (target) != BLKmode)
2953 /* If target is in memory and caller wants value in a register instead,
2954 arrange that. Pass TARGET as target for expand_expr so that,
2955 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2956 We know expand_expr will not use the target in that case.
2957 Don't do this if TARGET is volatile because we are supposed
2958 to write it and then read it. */
2960 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2961 GET_MODE (target), 0);
2962 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2963 temp = copy_to_reg (temp);
2964 dont_return_target = 1;
2966 else if (queued_subexp_p (target))
2967 /* If target contains a postincrement, let's not risk
2968 using it as the place to generate the rhs. */
2970 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2972 /* Expand EXP into a new pseudo. */
2973 temp = gen_reg_rtx (GET_MODE (target));
2974 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2976 else
2977 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2979 /* If target is volatile, ANSI requires accessing the value
2980 *from* the target, if it is accessed. So make that happen.
2981 In no case return the target itself. */
2982 if (! MEM_VOLATILE_P (target) && want_value)
2983 dont_return_target = 1;
2985 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2986 /* If this is an scalar in a register that is stored in a wider mode
2987 than the declared mode, compute the result into its declared mode
2988 and then convert to the wider mode. Our value is the computed
2989 expression. */
2991 /* If we don't want a value, we can do the conversion inside EXP,
2992 which will often result in some optimizations. Do the conversion
2993 in two steps: first change the signedness, if needed, then
2994 the extend. */
2995 if (! want_value)
2997 if (TREE_UNSIGNED (TREE_TYPE (exp))
2998 != SUBREG_PROMOTED_UNSIGNED_P (target))
3000 = convert
3001 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3002 TREE_TYPE (exp)),
3003 exp);
3005 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3006 SUBREG_PROMOTED_UNSIGNED_P (target)),
3007 exp);
3010 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3012 /* If TEMP is a volatile MEM and we want a result value, make
3013 the access now so it gets done only once. Likewise if
3014 it contains TARGET. */
3015 if (GET_CODE (temp) == MEM && want_value
3016 && (MEM_VOLATILE_P (temp)
3017 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3018 temp = copy_to_reg (temp);
3020 /* If TEMP is a VOIDmode constant, use convert_modes to make
3021 sure that we properly convert it. */
3022 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3023 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3024 TYPE_MODE (TREE_TYPE (exp)), temp,
3025 SUBREG_PROMOTED_UNSIGNED_P (target));
3027 convert_move (SUBREG_REG (target), temp,
3028 SUBREG_PROMOTED_UNSIGNED_P (target));
3029 return want_value ? temp : NULL_RTX;
3031 else
3033 temp = expand_expr (exp, target, GET_MODE (target), 0);
3034 /* Return TARGET if it's a specified hardware register.
3035 If TARGET is a volatile mem ref, either return TARGET
3036 or return a reg copied *from* TARGET; ANSI requires this.
3038 Otherwise, if TEMP is not TARGET, return TEMP
3039 if it is constant (for efficiency),
3040 or if we really want the correct value. */
3041 if (!(target && GET_CODE (target) == REG
3042 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3043 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3044 && temp != target
3045 && (CONSTANT_P (temp) || want_value))
3046 dont_return_target = 1;
3049 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3050 the same as that of TARGET, adjust the constant. This is needed, for
3051 example, in case it is a CONST_DOUBLE and we want only a word-sized
3052 value. */
3053 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3054 && TREE_CODE (exp) != ERROR_MARK
3055 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3056 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3057 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3059 /* If value was not generated in the target, store it there.
3060 Convert the value to TARGET's type first if nec. */
3062 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3064 target = protect_from_queue (target, 1);
3065 if (GET_MODE (temp) != GET_MODE (target)
3066 && GET_MODE (temp) != VOIDmode)
3068 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3069 if (dont_return_target)
3071 /* In this case, we will return TEMP,
3072 so make sure it has the proper mode.
3073 But don't forget to store the value into TARGET. */
3074 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3075 emit_move_insn (target, temp);
3077 else
3078 convert_move (target, temp, unsignedp);
3081 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3083 /* Handle copying a string constant into an array.
3084 The string constant may be shorter than the array.
3085 So copy just the string's actual length, and clear the rest. */
3086 rtx size;
3087 rtx addr;
3089 /* Get the size of the data type of the string,
3090 which is actually the size of the target. */
3091 size = expr_size (exp);
3092 if (GET_CODE (size) == CONST_INT
3093 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3094 emit_block_move (target, temp, size,
3095 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3096 else
3098 /* Compute the size of the data to copy from the string. */
3099 tree copy_size
3100 = size_binop (MIN_EXPR,
3101 make_tree (sizetype, size),
3102 convert (sizetype,
3103 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3104 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3105 VOIDmode, 0);
3106 rtx label = 0;
3108 /* Copy that much. */
3109 emit_block_move (target, temp, copy_size_rtx,
3110 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3112 /* Figure out how much is left in TARGET that we have to clear.
3113 Do all calculations in ptr_mode. */
3115 addr = XEXP (target, 0);
3116 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3118 if (GET_CODE (copy_size_rtx) == CONST_INT)
3120 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3121 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3123 else
3125 addr = force_reg (ptr_mode, addr);
3126 addr = expand_binop (ptr_mode, add_optab, addr,
3127 copy_size_rtx, NULL_RTX, 0,
3128 OPTAB_LIB_WIDEN);
3130 size = expand_binop (ptr_mode, sub_optab, size,
3131 copy_size_rtx, NULL_RTX, 0,
3132 OPTAB_LIB_WIDEN);
3134 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3135 GET_MODE (size), 0, 0);
3136 label = gen_label_rtx ();
3137 emit_jump_insn (gen_blt (label));
3140 if (size != const0_rtx)
3142 #ifdef TARGET_MEM_FUNCTIONS
3143 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3144 addr, Pmode,
3145 const0_rtx, TYPE_MODE (integer_type_node),
3146 convert_to_mode (TYPE_MODE (sizetype),
3147 size,
3148 TREE_UNSIGNED (sizetype)),
3149 TYPE_MODE (sizetype));
3150 #else
3151 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3152 addr, Pmode,
3153 convert_to_mode (TYPE_MODE (integer_type_node),
3154 size,
3155 TREE_UNSIGNED (integer_type_node)),
3156 TYPE_MODE (integer_type_node));
3157 #endif
3160 if (label)
3161 emit_label (label);
3164 else if (GET_MODE (temp) == BLKmode)
3165 emit_block_move (target, temp, expr_size (exp),
3166 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3167 else
3168 emit_move_insn (target, temp);
3171 /* If we don't want a value, return NULL_RTX. */
3172 if (! want_value)
3173 return NULL_RTX;
3175 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3176 ??? The latter test doesn't seem to make sense. */
3177 else if (dont_return_target && GET_CODE (temp) != MEM)
3178 return temp;
3180 /* Return TARGET itself if it is a hard register. */
3181 else if (want_value && GET_MODE (target) != BLKmode
3182 && ! (GET_CODE (target) == REG
3183 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3184 return copy_to_reg (target);
3186 else
3187 return target;
3190 /* Return 1 if EXP just contains zeros. */
3192 static int
3193 is_zeros_p (exp)
3194 tree exp;
3196 tree elt;
3198 switch (TREE_CODE (exp))
3200 case CONVERT_EXPR:
3201 case NOP_EXPR:
3202 case NON_LVALUE_EXPR:
3203 return is_zeros_p (TREE_OPERAND (exp, 0));
3205 case INTEGER_CST:
3206 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3208 case COMPLEX_CST:
3209 return
3210 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3212 case REAL_CST:
3213 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3215 case CONSTRUCTOR:
3216 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3217 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3218 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3219 if (! is_zeros_p (TREE_VALUE (elt)))
3220 return 0;
3222 return 1;
3225 return 0;
3228 /* Return 1 if EXP contains mostly (3/4) zeros. */
3230 static int
3231 mostly_zeros_p (exp)
3232 tree exp;
3234 if (TREE_CODE (exp) == CONSTRUCTOR)
3236 int elts = 0, zeros = 0;
3237 tree elt = CONSTRUCTOR_ELTS (exp);
3238 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3240 /* If there are no ranges of true bits, it is all zero. */
3241 return elt == NULL_TREE;
3243 for (; elt; elt = TREE_CHAIN (elt))
3245 /* We do not handle the case where the index is a RANGE_EXPR,
3246 so the statistic will be somewhat inaccurate.
3247 We do make a more accurate count in store_constructor itself,
3248 so since this function is only used for nested array elements,
3249 this should be close enough. */
3250 if (mostly_zeros_p (TREE_VALUE (elt)))
3251 zeros++;
3252 elts++;
3255 return 4 * zeros >= 3 * elts;
3258 return is_zeros_p (exp);
3261 /* Helper function for store_constructor.
3262 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3263 TYPE is the type of the CONSTRUCTOR, not the element type.
3264 CLEARED is as for store_constructor. */
3266 static void
3267 store_constructor_field (target, bitsize, bitpos,
3268 mode, exp, type, cleared)
3269 rtx target;
3270 int bitsize, bitpos;
3271 enum machine_mode mode;
3272 tree exp, type;
3273 int cleared;
3275 if (TREE_CODE (exp) == CONSTRUCTOR
3276 && (bitpos % BITS_PER_UNIT) == 0)
3278 bitpos /= BITS_PER_UNIT;
3279 store_constructor (exp,
3280 change_address (target, VOIDmode,
3281 plus_constant (XEXP (target, 0),
3282 bitpos)),
3283 cleared);
3285 else
3286 store_field (target, bitsize, bitpos, mode, exp,
3287 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3288 int_size_in_bytes (type));
3291 /* Store the value of constructor EXP into the rtx TARGET.
3292 TARGET is either a REG or a MEM.
3293 CLEARED is true if TARGET is known to have been zero'd. */
3295 static void
3296 store_constructor (exp, target, cleared)
3297 tree exp;
3298 rtx target;
3299 int cleared;
3301 tree type = TREE_TYPE (exp);
3303 /* We know our target cannot conflict, since safe_from_p has been called. */
3304 #if 0
3305 /* Don't try copying piece by piece into a hard register
3306 since that is vulnerable to being clobbered by EXP.
3307 Instead, construct in a pseudo register and then copy it all. */
3308 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3310 rtx temp = gen_reg_rtx (GET_MODE (target));
3311 store_constructor (exp, temp, 0);
3312 emit_move_insn (target, temp);
3313 return;
3315 #endif
3317 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3318 || TREE_CODE (type) == QUAL_UNION_TYPE)
3320 register tree elt;
3322 /* Inform later passes that the whole union value is dead. */
3323 if (TREE_CODE (type) == UNION_TYPE
3324 || TREE_CODE (type) == QUAL_UNION_TYPE)
3325 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3327 /* If we are building a static constructor into a register,
3328 set the initial value as zero so we can fold the value into
3329 a constant. But if more than one register is involved,
3330 this probably loses. */
3331 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3332 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3334 if (! cleared)
3335 emit_move_insn (target, const0_rtx);
3337 cleared = 1;
3340 /* If the constructor has fewer fields than the structure
3341 or if we are initializing the structure to mostly zeros,
3342 clear the whole structure first. */
3343 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3344 != list_length (TYPE_FIELDS (type)))
3345 || mostly_zeros_p (exp))
3347 if (! cleared)
3348 clear_storage (target, expr_size (exp),
3349 TYPE_ALIGN (type) / BITS_PER_UNIT);
3351 cleared = 1;
3353 else
3354 /* Inform later passes that the old value is dead. */
3355 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3357 /* Store each element of the constructor into
3358 the corresponding field of TARGET. */
3360 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3362 register tree field = TREE_PURPOSE (elt);
3363 register enum machine_mode mode;
3364 int bitsize;
3365 int bitpos = 0;
3366 int unsignedp;
3367 tree pos, constant = 0, offset = 0;
3368 rtx to_rtx = target;
3370 /* Just ignore missing fields.
3371 We cleared the whole structure, above,
3372 if any fields are missing. */
3373 if (field == 0)
3374 continue;
3376 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3377 continue;
3379 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3380 unsignedp = TREE_UNSIGNED (field);
3381 mode = DECL_MODE (field);
3382 if (DECL_BIT_FIELD (field))
3383 mode = VOIDmode;
3385 pos = DECL_FIELD_BITPOS (field);
3386 if (TREE_CODE (pos) == INTEGER_CST)
3387 constant = pos;
3388 else if (TREE_CODE (pos) == PLUS_EXPR
3389 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3390 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3391 else
3392 offset = pos;
3394 if (constant)
3395 bitpos = TREE_INT_CST_LOW (constant);
3397 if (offset)
3399 rtx offset_rtx;
3401 if (contains_placeholder_p (offset))
3402 offset = build (WITH_RECORD_EXPR, sizetype,
3403 offset, exp);
3405 offset = size_binop (FLOOR_DIV_EXPR, offset,
3406 size_int (BITS_PER_UNIT));
3408 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3409 if (GET_CODE (to_rtx) != MEM)
3410 abort ();
3412 to_rtx
3413 = change_address (to_rtx, VOIDmode,
3414 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3415 force_reg (ptr_mode, offset_rtx)));
3417 if (TREE_READONLY (field))
3419 if (GET_CODE (to_rtx) == MEM)
3420 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3421 XEXP (to_rtx, 0));
3422 RTX_UNCHANGING_P (to_rtx) = 1;
3425 store_constructor_field (to_rtx, bitsize, bitpos,
3426 mode, TREE_VALUE (elt), type, cleared);
3429 else if (TREE_CODE (type) == ARRAY_TYPE)
3431 register tree elt;
3432 register int i;
3433 int need_to_clear;
3434 tree domain = TYPE_DOMAIN (type);
3435 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3436 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3437 tree elttype = TREE_TYPE (type);
3439 /* If the constructor has fewer elements than the array,
3440 clear the whole array first. Similarly if this this is
3441 static constructor of a non-BLKmode object. */
3442 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3443 need_to_clear = 1;
3444 else
3446 HOST_WIDE_INT count = 0, zero_count = 0;
3447 need_to_clear = 0;
3448 /* This loop is a more accurate version of the loop in
3449 mostly_zeros_p (it handles RANGE_EXPR in an index).
3450 It is also needed to check for missing elements. */
3451 for (elt = CONSTRUCTOR_ELTS (exp);
3452 elt != NULL_TREE;
3453 elt = TREE_CHAIN (elt), i++)
3455 tree index = TREE_PURPOSE (elt);
3456 HOST_WIDE_INT this_node_count;
3457 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3459 tree lo_index = TREE_OPERAND (index, 0);
3460 tree hi_index = TREE_OPERAND (index, 1);
3461 if (TREE_CODE (lo_index) != INTEGER_CST
3462 || TREE_CODE (hi_index) != INTEGER_CST)
3464 need_to_clear = 1;
3465 break;
3467 this_node_count = TREE_INT_CST_LOW (hi_index)
3468 - TREE_INT_CST_LOW (lo_index) + 1;
3470 else
3471 this_node_count = 1;
3472 count += this_node_count;
3473 if (mostly_zeros_p (TREE_VALUE (elt)))
3474 zero_count += this_node_count;
3476 if (4 * zero_count >= 3 * count)
3477 need_to_clear = 1;
3479 if (need_to_clear)
3481 if (! cleared)
3482 clear_storage (target, expr_size (exp),
3483 TYPE_ALIGN (type) / BITS_PER_UNIT);
3484 cleared = 1;
3486 else
3487 /* Inform later passes that the old value is dead. */
3488 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3490 /* Store each element of the constructor into
3491 the corresponding element of TARGET, determined
3492 by counting the elements. */
3493 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3494 elt;
3495 elt = TREE_CHAIN (elt), i++)
3497 register enum machine_mode mode;
3498 int bitsize;
3499 int bitpos;
3500 int unsignedp;
3501 tree value = TREE_VALUE (elt);
3502 tree index = TREE_PURPOSE (elt);
3503 rtx xtarget = target;
3505 if (cleared && is_zeros_p (value))
3506 continue;
3508 mode = TYPE_MODE (elttype);
3509 bitsize = GET_MODE_BITSIZE (mode);
3510 unsignedp = TREE_UNSIGNED (elttype);
3512 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3514 tree lo_index = TREE_OPERAND (index, 0);
3515 tree hi_index = TREE_OPERAND (index, 1);
3516 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3517 struct nesting *loop;
3518 tree position;
3520 if (TREE_CODE (lo_index) == INTEGER_CST
3521 && TREE_CODE (hi_index) == INTEGER_CST)
3523 HOST_WIDE_INT lo = TREE_INT_CST_LOW (lo_index);
3524 HOST_WIDE_INT hi = TREE_INT_CST_LOW (hi_index);
3525 HOST_WIDE_INT count = hi - lo + 1;
3527 /* If the range is constant and "small", unroll the loop.
3528 We must also use store_field if the target is not MEM. */
3529 if (GET_CODE (target) != MEM
3530 || count <= 2
3531 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3532 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3533 <= 40 * 8))
3535 lo -= minelt; hi -= minelt;
3536 for (; lo <= hi; lo++)
3538 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3539 store_constructor_field (target, bitsize, bitpos,
3540 mode, value, type, cleared);
3544 else
3546 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3547 loop_top = gen_label_rtx ();
3548 loop_end = gen_label_rtx ();
3550 unsignedp = TREE_UNSIGNED (domain);
3552 index = build_decl (VAR_DECL, NULL_TREE, domain);
3554 DECL_RTL (index) = index_r
3555 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3556 &unsignedp, 0));
3558 if (TREE_CODE (value) == SAVE_EXPR
3559 && SAVE_EXPR_RTL (value) == 0)
3561 /* Make sure value gets expanded once before the loop. */
3562 expand_expr (value, const0_rtx, VOIDmode, 0);
3563 emit_queue ();
3565 store_expr (lo_index, index_r, 0);
3566 loop = expand_start_loop (0);
3568 /* Assign value to element index. */
3569 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3570 size_int (BITS_PER_UNIT));
3571 position = size_binop (MULT_EXPR,
3572 size_binop (MINUS_EXPR, index,
3573 TYPE_MIN_VALUE (domain)),
3574 position);
3575 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3576 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3577 xtarget = change_address (target, mode, addr);
3578 if (TREE_CODE (value) == CONSTRUCTOR)
3579 store_constructor (exp, xtarget, cleared);
3580 else
3581 store_expr (value, xtarget, 0);
3583 expand_exit_loop_if_false (loop,
3584 build (LT_EXPR, integer_type_node,
3585 index, hi_index));
3587 expand_increment (build (PREINCREMENT_EXPR,
3588 TREE_TYPE (index),
3589 index, integer_one_node), 0);
3590 expand_end_loop ();
3591 emit_label (loop_end);
3593 /* Needed by stupid register allocation. to extend the
3594 lifetime of pseudo-regs used by target past the end
3595 of the loop. */
3596 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3599 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3600 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3602 rtx pos_rtx, addr;
3603 tree position;
3605 if (index == 0)
3606 index = size_int (i);
3608 if (minelt)
3609 index = size_binop (MINUS_EXPR, index,
3610 TYPE_MIN_VALUE (domain));
3611 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3612 size_int (BITS_PER_UNIT));
3613 position = size_binop (MULT_EXPR, index, position);
3614 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3615 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3616 xtarget = change_address (target, mode, addr);
3617 store_expr (value, xtarget, 0);
3619 else
3621 if (index != 0)
3622 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3623 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3624 else
3625 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3626 store_constructor_field (target, bitsize, bitpos,
3627 mode, value, type, cleared);
3631 /* set constructor assignments */
3632 else if (TREE_CODE (type) == SET_TYPE)
3634 tree elt = CONSTRUCTOR_ELTS (exp);
3635 rtx xtarget = XEXP (target, 0);
3636 int set_word_size = TYPE_ALIGN (type);
3637 int nbytes = int_size_in_bytes (type), nbits;
3638 tree domain = TYPE_DOMAIN (type);
3639 tree domain_min, domain_max, bitlength;
3641 /* The default implementation strategy is to extract the constant
3642 parts of the constructor, use that to initialize the target,
3643 and then "or" in whatever non-constant ranges we need in addition.
3645 If a large set is all zero or all ones, it is
3646 probably better to set it using memset (if available) or bzero.
3647 Also, if a large set has just a single range, it may also be
3648 better to first clear all the first clear the set (using
3649 bzero/memset), and set the bits we want. */
3651 /* Check for all zeros. */
3652 if (elt == NULL_TREE)
3654 if (!cleared)
3655 clear_storage (target, expr_size (exp),
3656 TYPE_ALIGN (type) / BITS_PER_UNIT);
3657 return;
3660 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3661 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3662 bitlength = size_binop (PLUS_EXPR,
3663 size_binop (MINUS_EXPR, domain_max, domain_min),
3664 size_one_node);
3666 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3667 abort ();
3668 nbits = TREE_INT_CST_LOW (bitlength);
3670 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3671 are "complicated" (more than one range), initialize (the
3672 constant parts) by copying from a constant. */
3673 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3674 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3676 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3677 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3678 char *bit_buffer = (char*) alloca (nbits);
3679 HOST_WIDE_INT word = 0;
3680 int bit_pos = 0;
3681 int ibit = 0;
3682 int offset = 0; /* In bytes from beginning of set. */
3683 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3684 for (;;)
3686 if (bit_buffer[ibit])
3688 if (BYTES_BIG_ENDIAN)
3689 word |= (1 << (set_word_size - 1 - bit_pos));
3690 else
3691 word |= 1 << bit_pos;
3693 bit_pos++; ibit++;
3694 if (bit_pos >= set_word_size || ibit == nbits)
3696 if (word != 0 || ! cleared)
3698 rtx datum = GEN_INT (word);
3699 rtx to_rtx;
3700 /* The assumption here is that it is safe to use XEXP if
3701 the set is multi-word, but not if it's single-word. */
3702 if (GET_CODE (target) == MEM)
3704 to_rtx = plus_constant (XEXP (target, 0), offset);
3705 to_rtx = change_address (target, mode, to_rtx);
3707 else if (offset == 0)
3708 to_rtx = target;
3709 else
3710 abort ();
3711 emit_move_insn (to_rtx, datum);
3713 if (ibit == nbits)
3714 break;
3715 word = 0;
3716 bit_pos = 0;
3717 offset += set_word_size / BITS_PER_UNIT;
3721 else if (!cleared)
3723 /* Don't bother clearing storage if the set is all ones. */
3724 if (TREE_CHAIN (elt) != NULL_TREE
3725 || (TREE_PURPOSE (elt) == NULL_TREE
3726 ? nbits != 1
3727 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3728 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3729 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3730 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3731 != nbits))))
3732 clear_storage (target, expr_size (exp),
3733 TYPE_ALIGN (type) / BITS_PER_UNIT);
3736 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3738 /* start of range of element or NULL */
3739 tree startbit = TREE_PURPOSE (elt);
3740 /* end of range of element, or element value */
3741 tree endbit = TREE_VALUE (elt);
3742 HOST_WIDE_INT startb, endb;
3743 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3745 bitlength_rtx = expand_expr (bitlength,
3746 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3748 /* handle non-range tuple element like [ expr ] */
3749 if (startbit == NULL_TREE)
3751 startbit = save_expr (endbit);
3752 endbit = startbit;
3754 startbit = convert (sizetype, startbit);
3755 endbit = convert (sizetype, endbit);
3756 if (! integer_zerop (domain_min))
3758 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3759 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3761 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3762 EXPAND_CONST_ADDRESS);
3763 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3764 EXPAND_CONST_ADDRESS);
3766 if (REG_P (target))
3768 targetx = assign_stack_temp (GET_MODE (target),
3769 GET_MODE_SIZE (GET_MODE (target)),
3771 emit_move_insn (targetx, target);
3773 else if (GET_CODE (target) == MEM)
3774 targetx = target;
3775 else
3776 abort ();
3778 #ifdef TARGET_MEM_FUNCTIONS
3779 /* Optimization: If startbit and endbit are
3780 constants divisible by BITS_PER_UNIT,
3781 call memset instead. */
3782 if (TREE_CODE (startbit) == INTEGER_CST
3783 && TREE_CODE (endbit) == INTEGER_CST
3784 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3785 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3787 emit_library_call (memset_libfunc, 0,
3788 VOIDmode, 3,
3789 plus_constant (XEXP (targetx, 0),
3790 startb / BITS_PER_UNIT),
3791 Pmode,
3792 constm1_rtx, TYPE_MODE (integer_type_node),
3793 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3794 TYPE_MODE (sizetype));
3796 else
3797 #endif
3799 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3800 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3801 bitlength_rtx, TYPE_MODE (sizetype),
3802 startbit_rtx, TYPE_MODE (sizetype),
3803 endbit_rtx, TYPE_MODE (sizetype));
3805 if (REG_P (target))
3806 emit_move_insn (target, targetx);
3810 else
3811 abort ();
3814 /* Store the value of EXP (an expression tree)
3815 into a subfield of TARGET which has mode MODE and occupies
3816 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3817 If MODE is VOIDmode, it means that we are storing into a bit-field.
3819 If VALUE_MODE is VOIDmode, return nothing in particular.
3820 UNSIGNEDP is not used in this case.
3822 Otherwise, return an rtx for the value stored. This rtx
3823 has mode VALUE_MODE if that is convenient to do.
3824 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3826 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3827 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3829 static rtx
3830 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3831 unsignedp, align, total_size)
3832 rtx target;
3833 int bitsize, bitpos;
3834 enum machine_mode mode;
3835 tree exp;
3836 enum machine_mode value_mode;
3837 int unsignedp;
3838 int align;
3839 int total_size;
3841 HOST_WIDE_INT width_mask = 0;
3843 if (bitsize < HOST_BITS_PER_WIDE_INT)
3844 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3846 /* If we are storing into an unaligned field of an aligned union that is
3847 in a register, we may have the mode of TARGET being an integer mode but
3848 MODE == BLKmode. In that case, get an aligned object whose size and
3849 alignment are the same as TARGET and store TARGET into it (we can avoid
3850 the store if the field being stored is the entire width of TARGET). Then
3851 call ourselves recursively to store the field into a BLKmode version of
3852 that object. Finally, load from the object into TARGET. This is not
3853 very efficient in general, but should only be slightly more expensive
3854 than the otherwise-required unaligned accesses. Perhaps this can be
3855 cleaned up later. */
3857 if (mode == BLKmode
3858 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3860 rtx object = assign_stack_temp (GET_MODE (target),
3861 GET_MODE_SIZE (GET_MODE (target)), 0);
3862 rtx blk_object = copy_rtx (object);
3864 MEM_IN_STRUCT_P (object) = 1;
3865 MEM_IN_STRUCT_P (blk_object) = 1;
3866 PUT_MODE (blk_object, BLKmode);
3868 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3869 emit_move_insn (object, target);
3871 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3872 align, total_size);
3874 /* Even though we aren't returning target, we need to
3875 give it the updated value. */
3876 emit_move_insn (target, object);
3878 return blk_object;
3881 /* If the structure is in a register or if the component
3882 is a bit field, we cannot use addressing to access it.
3883 Use bit-field techniques or SUBREG to store in it. */
3885 if (mode == VOIDmode
3886 || (mode != BLKmode && ! direct_store[(int) mode])
3887 || GET_CODE (target) == REG
3888 || GET_CODE (target) == SUBREG
3889 /* If the field isn't aligned enough to store as an ordinary memref,
3890 store it as a bit field. */
3891 || (SLOW_UNALIGNED_ACCESS
3892 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3893 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3895 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3897 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3898 MODE. */
3899 if (mode != VOIDmode && mode != BLKmode
3900 && mode != TYPE_MODE (TREE_TYPE (exp)))
3901 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3903 /* Store the value in the bitfield. */
3904 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3905 if (value_mode != VOIDmode)
3907 /* The caller wants an rtx for the value. */
3908 /* If possible, avoid refetching from the bitfield itself. */
3909 if (width_mask != 0
3910 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3912 tree count;
3913 enum machine_mode tmode;
3915 if (unsignedp)
3916 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3917 tmode = GET_MODE (temp);
3918 if (tmode == VOIDmode)
3919 tmode = value_mode;
3920 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3921 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3922 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3924 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3925 NULL_RTX, value_mode, 0, align,
3926 total_size);
3928 return const0_rtx;
3930 else
3932 rtx addr = XEXP (target, 0);
3933 rtx to_rtx;
3935 /* If a value is wanted, it must be the lhs;
3936 so make the address stable for multiple use. */
3938 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3939 && ! CONSTANT_ADDRESS_P (addr)
3940 /* A frame-pointer reference is already stable. */
3941 && ! (GET_CODE (addr) == PLUS
3942 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3943 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3944 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3945 addr = copy_to_reg (addr);
3947 /* Now build a reference to just the desired component. */
3949 to_rtx = change_address (target, mode,
3950 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3951 MEM_IN_STRUCT_P (to_rtx) = 1;
3953 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3957 /* Return true if any object containing the innermost array is an unaligned
3958 packed structure field. */
3960 static int
3961 get_inner_unaligned_p (exp)
3962 tree exp;
3964 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3966 while (1)
3968 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3970 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3971 < needed_alignment)
3972 return 1;
3974 else if (TREE_CODE (exp) != ARRAY_REF
3975 && TREE_CODE (exp) != NON_LVALUE_EXPR
3976 && ! ((TREE_CODE (exp) == NOP_EXPR
3977 || TREE_CODE (exp) == CONVERT_EXPR)
3978 && (TYPE_MODE (TREE_TYPE (exp))
3979 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3980 break;
3982 exp = TREE_OPERAND (exp, 0);
3985 return 0;
3988 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3989 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3990 ARRAY_REFs and find the ultimate containing object, which we return.
3992 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3993 bit position, and *PUNSIGNEDP to the signedness of the field.
3994 If the position of the field is variable, we store a tree
3995 giving the variable offset (in units) in *POFFSET.
3996 This offset is in addition to the bit position.
3997 If the position is not variable, we store 0 in *POFFSET.
3999 If any of the extraction expressions is volatile,
4000 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4002 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4003 is a mode that can be used to access the field. In that case, *PBITSIZE
4004 is redundant.
4006 If the field describes a variable-sized object, *PMODE is set to
4007 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4008 this case, but the address of the object can be found. */
4010 tree
4011 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4012 punsignedp, pvolatilep)
4013 tree exp;
4014 int *pbitsize;
4015 int *pbitpos;
4016 tree *poffset;
4017 enum machine_mode *pmode;
4018 int *punsignedp;
4019 int *pvolatilep;
4021 tree orig_exp = exp;
4022 tree size_tree = 0;
4023 enum machine_mode mode = VOIDmode;
4024 tree offset = integer_zero_node;
4026 if (TREE_CODE (exp) == COMPONENT_REF)
4028 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4029 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4030 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4031 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4033 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4035 size_tree = TREE_OPERAND (exp, 1);
4036 *punsignedp = TREE_UNSIGNED (exp);
4038 else
4040 mode = TYPE_MODE (TREE_TYPE (exp));
4041 *pbitsize = GET_MODE_BITSIZE (mode);
4042 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4045 if (size_tree)
4047 if (TREE_CODE (size_tree) != INTEGER_CST)
4048 mode = BLKmode, *pbitsize = -1;
4049 else
4050 *pbitsize = TREE_INT_CST_LOW (size_tree);
4053 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4054 and find the ultimate containing object. */
4056 *pbitpos = 0;
4058 while (1)
4060 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4062 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4063 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4064 : TREE_OPERAND (exp, 2));
4065 tree constant = integer_zero_node, var = pos;
4067 /* If this field hasn't been filled in yet, don't go
4068 past it. This should only happen when folding expressions
4069 made during type construction. */
4070 if (pos == 0)
4071 break;
4073 /* Assume here that the offset is a multiple of a unit.
4074 If not, there should be an explicitly added constant. */
4075 if (TREE_CODE (pos) == PLUS_EXPR
4076 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4077 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4078 else if (TREE_CODE (pos) == INTEGER_CST)
4079 constant = pos, var = integer_zero_node;
4081 *pbitpos += TREE_INT_CST_LOW (constant);
4083 if (var)
4084 offset = size_binop (PLUS_EXPR, offset,
4085 size_binop (EXACT_DIV_EXPR, var,
4086 size_int (BITS_PER_UNIT)));
4089 else if (TREE_CODE (exp) == ARRAY_REF)
4091 /* This code is based on the code in case ARRAY_REF in expand_expr
4092 below. We assume here that the size of an array element is
4093 always an integral multiple of BITS_PER_UNIT. */
4095 tree index = TREE_OPERAND (exp, 1);
4096 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4097 tree low_bound
4098 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4099 tree index_type = TREE_TYPE (index);
4101 if (! integer_zerop (low_bound))
4102 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4104 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4106 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4107 index);
4108 index_type = TREE_TYPE (index);
4111 index = fold (build (MULT_EXPR, index_type, index,
4112 TYPE_SIZE (TREE_TYPE (exp))));
4114 if (TREE_CODE (index) == INTEGER_CST
4115 && TREE_INT_CST_HIGH (index) == 0)
4116 *pbitpos += TREE_INT_CST_LOW (index);
4117 else
4118 offset = size_binop (PLUS_EXPR, offset,
4119 size_binop (FLOOR_DIV_EXPR, index,
4120 size_int (BITS_PER_UNIT)));
4122 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4123 && ! ((TREE_CODE (exp) == NOP_EXPR
4124 || TREE_CODE (exp) == CONVERT_EXPR)
4125 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4126 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4127 != UNION_TYPE))
4128 && (TYPE_MODE (TREE_TYPE (exp))
4129 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4130 break;
4132 /* If any reference in the chain is volatile, the effect is volatile. */
4133 if (TREE_THIS_VOLATILE (exp))
4134 *pvolatilep = 1;
4135 exp = TREE_OPERAND (exp, 0);
4138 /* If this was a bit-field, see if there is a mode that allows direct
4139 access in case EXP is in memory. */
4140 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
4142 mode = mode_for_size (*pbitsize, MODE_INT, 0);
4143 if (mode == BLKmode)
4144 mode = VOIDmode;
4147 if (integer_zerop (offset))
4148 offset = 0;
4150 if (offset != 0 && contains_placeholder_p (offset))
4151 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4153 *pmode = mode;
4154 *poffset = offset;
4155 return exp;
4158 /* Given an rtx VALUE that may contain additions and multiplications,
4159 return an equivalent value that just refers to a register or memory.
4160 This is done by generating instructions to perform the arithmetic
4161 and returning a pseudo-register containing the value.
4163 The returned value may be a REG, SUBREG, MEM or constant. */
4166 force_operand (value, target)
4167 rtx value, target;
4169 register optab binoptab = 0;
4170 /* Use a temporary to force order of execution of calls to
4171 `force_operand'. */
4172 rtx tmp;
4173 register rtx op2;
4174 /* Use subtarget as the target for operand 0 of a binary operation. */
4175 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4177 if (GET_CODE (value) == PLUS)
4178 binoptab = add_optab;
4179 else if (GET_CODE (value) == MINUS)
4180 binoptab = sub_optab;
4181 else if (GET_CODE (value) == MULT)
4183 op2 = XEXP (value, 1);
4184 if (!CONSTANT_P (op2)
4185 && !(GET_CODE (op2) == REG && op2 != subtarget))
4186 subtarget = 0;
4187 tmp = force_operand (XEXP (value, 0), subtarget);
4188 return expand_mult (GET_MODE (value), tmp,
4189 force_operand (op2, NULL_RTX),
4190 target, 0);
4193 if (binoptab)
4195 op2 = XEXP (value, 1);
4196 if (!CONSTANT_P (op2)
4197 && !(GET_CODE (op2) == REG && op2 != subtarget))
4198 subtarget = 0;
4199 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4201 binoptab = add_optab;
4202 op2 = negate_rtx (GET_MODE (value), op2);
4205 /* Check for an addition with OP2 a constant integer and our first
4206 operand a PLUS of a virtual register and something else. In that
4207 case, we want to emit the sum of the virtual register and the
4208 constant first and then add the other value. This allows virtual
4209 register instantiation to simply modify the constant rather than
4210 creating another one around this addition. */
4211 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4212 && GET_CODE (XEXP (value, 0)) == PLUS
4213 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4214 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4215 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4217 rtx temp = expand_binop (GET_MODE (value), binoptab,
4218 XEXP (XEXP (value, 0), 0), op2,
4219 subtarget, 0, OPTAB_LIB_WIDEN);
4220 return expand_binop (GET_MODE (value), binoptab, temp,
4221 force_operand (XEXP (XEXP (value, 0), 1), 0),
4222 target, 0, OPTAB_LIB_WIDEN);
4225 tmp = force_operand (XEXP (value, 0), subtarget);
4226 return expand_binop (GET_MODE (value), binoptab, tmp,
4227 force_operand (op2, NULL_RTX),
4228 target, 0, OPTAB_LIB_WIDEN);
4229 /* We give UNSIGNEDP = 0 to expand_binop
4230 because the only operations we are expanding here are signed ones. */
4232 return value;
4235 /* Subroutine of expand_expr:
4236 save the non-copied parts (LIST) of an expr (LHS), and return a list
4237 which can restore these values to their previous values,
4238 should something modify their storage. */
4240 static tree
4241 save_noncopied_parts (lhs, list)
4242 tree lhs;
4243 tree list;
4245 tree tail;
4246 tree parts = 0;
4248 for (tail = list; tail; tail = TREE_CHAIN (tail))
4249 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4250 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4251 else
4253 tree part = TREE_VALUE (tail);
4254 tree part_type = TREE_TYPE (part);
4255 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4256 rtx target = assign_temp (part_type, 0, 1, 1);
4257 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4258 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4259 parts = tree_cons (to_be_saved,
4260 build (RTL_EXPR, part_type, NULL_TREE,
4261 (tree) target),
4262 parts);
4263 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4265 return parts;
4268 /* Subroutine of expand_expr:
4269 record the non-copied parts (LIST) of an expr (LHS), and return a list
4270 which specifies the initial values of these parts. */
4272 static tree
4273 init_noncopied_parts (lhs, list)
4274 tree lhs;
4275 tree list;
4277 tree tail;
4278 tree parts = 0;
4280 for (tail = list; tail; tail = TREE_CHAIN (tail))
4281 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4282 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4283 else
4285 tree part = TREE_VALUE (tail);
4286 tree part_type = TREE_TYPE (part);
4287 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4288 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4290 return parts;
4293 /* Subroutine of expand_expr: return nonzero iff there is no way that
4294 EXP can reference X, which is being modified. */
4296 static int
4297 safe_from_p (x, exp)
4298 rtx x;
4299 tree exp;
4301 rtx exp_rtl = 0;
4302 int i, nops;
4304 if (x == 0
4305 /* If EXP has varying size, we MUST use a target since we currently
4306 have no way of allocating temporaries of variable size. So we
4307 assume here that something at a higher level has prevented a
4308 clash. This is somewhat bogus, but the best we can do. Only
4309 do this when X is BLKmode. */
4310 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4311 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4312 && GET_MODE (x) == BLKmode))
4313 return 1;
4315 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4316 find the underlying pseudo. */
4317 if (GET_CODE (x) == SUBREG)
4319 x = SUBREG_REG (x);
4320 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4321 return 0;
4324 /* If X is a location in the outgoing argument area, it is always safe. */
4325 if (GET_CODE (x) == MEM
4326 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4327 || (GET_CODE (XEXP (x, 0)) == PLUS
4328 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4329 return 1;
4331 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4333 case 'd':
4334 exp_rtl = DECL_RTL (exp);
4335 break;
4337 case 'c':
4338 return 1;
4340 case 'x':
4341 if (TREE_CODE (exp) == TREE_LIST)
4342 return ((TREE_VALUE (exp) == 0
4343 || safe_from_p (x, TREE_VALUE (exp)))
4344 && (TREE_CHAIN (exp) == 0
4345 || safe_from_p (x, TREE_CHAIN (exp))));
4346 else
4347 return 0;
4349 case '1':
4350 return safe_from_p (x, TREE_OPERAND (exp, 0));
4352 case '2':
4353 case '<':
4354 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4355 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4357 case 'e':
4358 case 'r':
4359 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4360 the expression. If it is set, we conflict iff we are that rtx or
4361 both are in memory. Otherwise, we check all operands of the
4362 expression recursively. */
4364 switch (TREE_CODE (exp))
4366 case ADDR_EXPR:
4367 return (staticp (TREE_OPERAND (exp, 0))
4368 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4370 case INDIRECT_REF:
4371 if (GET_CODE (x) == MEM)
4372 return 0;
4373 break;
4375 case CALL_EXPR:
4376 exp_rtl = CALL_EXPR_RTL (exp);
4377 if (exp_rtl == 0)
4379 /* Assume that the call will clobber all hard registers and
4380 all of memory. */
4381 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4382 || GET_CODE (x) == MEM)
4383 return 0;
4386 break;
4388 case RTL_EXPR:
4389 /* If a sequence exists, we would have to scan every instruction
4390 in the sequence to see if it was safe. This is probably not
4391 worthwhile. */
4392 if (RTL_EXPR_SEQUENCE (exp))
4393 return 0;
4395 exp_rtl = RTL_EXPR_RTL (exp);
4396 break;
4398 case WITH_CLEANUP_EXPR:
4399 exp_rtl = RTL_EXPR_RTL (exp);
4400 break;
4402 case CLEANUP_POINT_EXPR:
4403 return safe_from_p (x, TREE_OPERAND (exp, 0));
4405 case SAVE_EXPR:
4406 exp_rtl = SAVE_EXPR_RTL (exp);
4407 break;
4409 case BIND_EXPR:
4410 /* The only operand we look at is operand 1. The rest aren't
4411 part of the expression. */
4412 return safe_from_p (x, TREE_OPERAND (exp, 1));
4414 case METHOD_CALL_EXPR:
4415 /* This takes a rtx argument, but shouldn't appear here. */
4416 abort ();
4419 /* If we have an rtx, we do not need to scan our operands. */
4420 if (exp_rtl)
4421 break;
4423 nops = tree_code_length[(int) TREE_CODE (exp)];
4424 for (i = 0; i < nops; i++)
4425 if (TREE_OPERAND (exp, i) != 0
4426 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4427 return 0;
4430 /* If we have an rtl, find any enclosed object. Then see if we conflict
4431 with it. */
4432 if (exp_rtl)
4434 if (GET_CODE (exp_rtl) == SUBREG)
4436 exp_rtl = SUBREG_REG (exp_rtl);
4437 if (GET_CODE (exp_rtl) == REG
4438 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4439 return 0;
4442 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4443 are memory and EXP is not readonly. */
4444 return ! (rtx_equal_p (x, exp_rtl)
4445 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4446 && ! TREE_READONLY (exp)));
4449 /* If we reach here, it is safe. */
4450 return 1;
4453 /* Subroutine of expand_expr: return nonzero iff EXP is an
4454 expression whose type is statically determinable. */
4456 static int
4457 fixed_type_p (exp)
4458 tree exp;
4460 if (TREE_CODE (exp) == PARM_DECL
4461 || TREE_CODE (exp) == VAR_DECL
4462 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4463 || TREE_CODE (exp) == COMPONENT_REF
4464 || TREE_CODE (exp) == ARRAY_REF)
4465 return 1;
4466 return 0;
4469 /* expand_expr: generate code for computing expression EXP.
4470 An rtx for the computed value is returned. The value is never null.
4471 In the case of a void EXP, const0_rtx is returned.
4473 The value may be stored in TARGET if TARGET is nonzero.
4474 TARGET is just a suggestion; callers must assume that
4475 the rtx returned may not be the same as TARGET.
4477 If TARGET is CONST0_RTX, it means that the value will be ignored.
4479 If TMODE is not VOIDmode, it suggests generating the
4480 result in mode TMODE. But this is done only when convenient.
4481 Otherwise, TMODE is ignored and the value generated in its natural mode.
4482 TMODE is just a suggestion; callers must assume that
4483 the rtx returned may not have mode TMODE.
4485 Note that TARGET may have neither TMODE nor MODE. In that case, it
4486 probably will not be used.
4488 If MODIFIER is EXPAND_SUM then when EXP is an addition
4489 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4490 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4491 products as above, or REG or MEM, or constant.
4492 Ordinarily in such cases we would output mul or add instructions
4493 and then return a pseudo reg containing the sum.
4495 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4496 it also marks a label as absolutely required (it can't be dead).
4497 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4498 This is used for outputting expressions used in initializers.
4500 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4501 with a constant address even if that address is not normally legitimate.
4502 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4505 expand_expr (exp, target, tmode, modifier)
4506 register tree exp;
4507 rtx target;
4508 enum machine_mode tmode;
4509 enum expand_modifier modifier;
4511 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4512 This is static so it will be accessible to our recursive callees. */
4513 static tree placeholder_list = 0;
4514 register rtx op0, op1, temp;
4515 tree type = TREE_TYPE (exp);
4516 int unsignedp = TREE_UNSIGNED (type);
4517 register enum machine_mode mode = TYPE_MODE (type);
4518 register enum tree_code code = TREE_CODE (exp);
4519 optab this_optab;
4520 /* Use subtarget as the target for operand 0 of a binary operation. */
4521 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4522 rtx original_target = target;
4523 /* Maybe defer this until sure not doing bytecode? */
4524 int ignore = (target == const0_rtx
4525 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4526 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4527 || code == COND_EXPR)
4528 && TREE_CODE (type) == VOID_TYPE));
4529 tree context;
4532 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4534 bc_expand_expr (exp);
4535 return NULL;
4538 /* Don't use hard regs as subtargets, because the combiner
4539 can only handle pseudo regs. */
4540 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4541 subtarget = 0;
4542 /* Avoid subtargets inside loops,
4543 since they hide some invariant expressions. */
4544 if (preserve_subexpressions_p ())
4545 subtarget = 0;
4547 /* If we are going to ignore this result, we need only do something
4548 if there is a side-effect somewhere in the expression. If there
4549 is, short-circuit the most common cases here. Note that we must
4550 not call expand_expr with anything but const0_rtx in case this
4551 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4553 if (ignore)
4555 if (! TREE_SIDE_EFFECTS (exp))
4556 return const0_rtx;
4558 /* Ensure we reference a volatile object even if value is ignored. */
4559 if (TREE_THIS_VOLATILE (exp)
4560 && TREE_CODE (exp) != FUNCTION_DECL
4561 && mode != VOIDmode && mode != BLKmode)
4563 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4564 if (GET_CODE (temp) == MEM)
4565 temp = copy_to_reg (temp);
4566 return const0_rtx;
4569 if (TREE_CODE_CLASS (code) == '1')
4570 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4571 VOIDmode, modifier);
4572 else if (TREE_CODE_CLASS (code) == '2'
4573 || TREE_CODE_CLASS (code) == '<')
4575 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4576 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4577 return const0_rtx;
4579 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4580 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4581 /* If the second operand has no side effects, just evaluate
4582 the first. */
4583 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4584 VOIDmode, modifier);
4586 target = 0;
4589 /* If will do cse, generate all results into pseudo registers
4590 since 1) that allows cse to find more things
4591 and 2) otherwise cse could produce an insn the machine
4592 cannot support. */
4594 if (! cse_not_expected && mode != BLKmode && target
4595 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4596 target = subtarget;
4598 switch (code)
4600 case LABEL_DECL:
4602 tree function = decl_function_context (exp);
4603 /* Handle using a label in a containing function. */
4604 if (function != current_function_decl && function != 0)
4606 struct function *p = find_function_data (function);
4607 /* Allocate in the memory associated with the function
4608 that the label is in. */
4609 push_obstacks (p->function_obstack,
4610 p->function_maybepermanent_obstack);
4612 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4613 label_rtx (exp), p->forced_labels);
4614 pop_obstacks ();
4616 else if (modifier == EXPAND_INITIALIZER)
4617 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4618 label_rtx (exp), forced_labels);
4619 temp = gen_rtx (MEM, FUNCTION_MODE,
4620 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4621 if (function != current_function_decl && function != 0)
4622 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4623 return temp;
4626 case PARM_DECL:
4627 if (DECL_RTL (exp) == 0)
4629 error_with_decl (exp, "prior parameter's size depends on `%s'");
4630 return CONST0_RTX (mode);
4633 /* ... fall through ... */
4635 case VAR_DECL:
4636 /* If a static var's type was incomplete when the decl was written,
4637 but the type is complete now, lay out the decl now. */
4638 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4639 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4641 push_obstacks_nochange ();
4642 end_temporary_allocation ();
4643 layout_decl (exp, 0);
4644 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4645 pop_obstacks ();
4648 /* ... fall through ... */
4650 case FUNCTION_DECL:
4651 case RESULT_DECL:
4652 if (DECL_RTL (exp) == 0)
4653 abort ();
4655 /* Ensure variable marked as used even if it doesn't go through
4656 a parser. If it hasn't be used yet, write out an external
4657 definition. */
4658 if (! TREE_USED (exp))
4660 assemble_external (exp);
4661 TREE_USED (exp) = 1;
4664 /* Show we haven't gotten RTL for this yet. */
4665 temp = 0;
4667 /* Handle variables inherited from containing functions. */
4668 context = decl_function_context (exp);
4670 /* We treat inline_function_decl as an alias for the current function
4671 because that is the inline function whose vars, types, etc.
4672 are being merged into the current function.
4673 See expand_inline_function. */
4675 if (context != 0 && context != current_function_decl
4676 && context != inline_function_decl
4677 /* If var is static, we don't need a static chain to access it. */
4678 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4679 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4681 rtx addr;
4683 /* Mark as non-local and addressable. */
4684 DECL_NONLOCAL (exp) = 1;
4685 mark_addressable (exp);
4686 if (GET_CODE (DECL_RTL (exp)) != MEM)
4687 abort ();
4688 addr = XEXP (DECL_RTL (exp), 0);
4689 if (GET_CODE (addr) == MEM)
4690 addr = gen_rtx (MEM, Pmode,
4691 fix_lexical_addr (XEXP (addr, 0), exp));
4692 else
4693 addr = fix_lexical_addr (addr, exp);
4694 temp = change_address (DECL_RTL (exp), mode, addr);
4697 /* This is the case of an array whose size is to be determined
4698 from its initializer, while the initializer is still being parsed.
4699 See expand_decl. */
4701 else if (GET_CODE (DECL_RTL (exp)) == MEM
4702 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4703 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4704 XEXP (DECL_RTL (exp), 0));
4706 /* If DECL_RTL is memory, we are in the normal case and either
4707 the address is not valid or it is not a register and -fforce-addr
4708 is specified, get the address into a register. */
4710 else if (GET_CODE (DECL_RTL (exp)) == MEM
4711 && modifier != EXPAND_CONST_ADDRESS
4712 && modifier != EXPAND_SUM
4713 && modifier != EXPAND_INITIALIZER
4714 && (! memory_address_p (DECL_MODE (exp),
4715 XEXP (DECL_RTL (exp), 0))
4716 || (flag_force_addr
4717 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4718 temp = change_address (DECL_RTL (exp), VOIDmode,
4719 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4721 /* If we got something, return it. But first, set the alignment
4722 the address is a register. */
4723 if (temp != 0)
4725 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4726 mark_reg_pointer (XEXP (temp, 0),
4727 DECL_ALIGN (exp) / BITS_PER_UNIT);
4729 return temp;
4732 /* If the mode of DECL_RTL does not match that of the decl, it
4733 must be a promoted value. We return a SUBREG of the wanted mode,
4734 but mark it so that we know that it was already extended. */
4736 if (GET_CODE (DECL_RTL (exp)) == REG
4737 && GET_MODE (DECL_RTL (exp)) != mode)
4739 /* Get the signedness used for this variable. Ensure we get the
4740 same mode we got when the variable was declared. */
4741 if (GET_MODE (DECL_RTL (exp))
4742 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4743 abort ();
4745 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4746 SUBREG_PROMOTED_VAR_P (temp) = 1;
4747 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4748 return temp;
4751 return DECL_RTL (exp);
4753 case INTEGER_CST:
4754 return immed_double_const (TREE_INT_CST_LOW (exp),
4755 TREE_INT_CST_HIGH (exp),
4756 mode);
4758 case CONST_DECL:
4759 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4761 case REAL_CST:
4762 /* If optimized, generate immediate CONST_DOUBLE
4763 which will be turned into memory by reload if necessary.
4765 We used to force a register so that loop.c could see it. But
4766 this does not allow gen_* patterns to perform optimizations with
4767 the constants. It also produces two insns in cases like "x = 1.0;".
4768 On most machines, floating-point constants are not permitted in
4769 many insns, so we'd end up copying it to a register in any case.
4771 Now, we do the copying in expand_binop, if appropriate. */
4772 return immed_real_const (exp);
4774 case COMPLEX_CST:
4775 case STRING_CST:
4776 if (! TREE_CST_RTL (exp))
4777 output_constant_def (exp);
4779 /* TREE_CST_RTL probably contains a constant address.
4780 On RISC machines where a constant address isn't valid,
4781 make some insns to get that address into a register. */
4782 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4783 && modifier != EXPAND_CONST_ADDRESS
4784 && modifier != EXPAND_INITIALIZER
4785 && modifier != EXPAND_SUM
4786 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4787 || (flag_force_addr
4788 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4789 return change_address (TREE_CST_RTL (exp), VOIDmode,
4790 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4791 return TREE_CST_RTL (exp);
4793 case SAVE_EXPR:
4794 context = decl_function_context (exp);
4796 /* We treat inline_function_decl as an alias for the current function
4797 because that is the inline function whose vars, types, etc.
4798 are being merged into the current function.
4799 See expand_inline_function. */
4800 if (context == current_function_decl || context == inline_function_decl)
4801 context = 0;
4803 /* If this is non-local, handle it. */
4804 if (context)
4806 temp = SAVE_EXPR_RTL (exp);
4807 if (temp && GET_CODE (temp) == REG)
4809 put_var_into_stack (exp);
4810 temp = SAVE_EXPR_RTL (exp);
4812 if (temp == 0 || GET_CODE (temp) != MEM)
4813 abort ();
4814 return change_address (temp, mode,
4815 fix_lexical_addr (XEXP (temp, 0), exp));
4817 if (SAVE_EXPR_RTL (exp) == 0)
4819 if (mode == VOIDmode)
4820 temp = const0_rtx;
4821 else
4822 temp = assign_temp (type, 0, 0, 0);
4824 SAVE_EXPR_RTL (exp) = temp;
4825 if (!optimize && GET_CODE (temp) == REG)
4826 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4827 save_expr_regs);
4829 /* If the mode of TEMP does not match that of the expression, it
4830 must be a promoted value. We pass store_expr a SUBREG of the
4831 wanted mode but mark it so that we know that it was already
4832 extended. Note that `unsignedp' was modified above in
4833 this case. */
4835 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4837 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4838 SUBREG_PROMOTED_VAR_P (temp) = 1;
4839 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4842 if (temp == const0_rtx)
4843 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4844 else
4845 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4848 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4849 must be a promoted value. We return a SUBREG of the wanted mode,
4850 but mark it so that we know that it was already extended. */
4852 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4853 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4855 /* Compute the signedness and make the proper SUBREG. */
4856 promote_mode (type, mode, &unsignedp, 0);
4857 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4858 SUBREG_PROMOTED_VAR_P (temp) = 1;
4859 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4860 return temp;
4863 return SAVE_EXPR_RTL (exp);
4865 case PLACEHOLDER_EXPR:
4866 /* If there is an object on the head of the placeholder list,
4867 see if some object in it's references is of type TYPE. For
4868 further information, see tree.def. */
4869 if (placeholder_list)
4871 tree object;
4872 tree old_list = placeholder_list;
4874 for (object = TREE_PURPOSE (placeholder_list);
4875 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4876 != TYPE_MAIN_VARIANT (type))
4877 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4878 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4879 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4880 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4881 object = TREE_OPERAND (object, 0))
4884 if (object != 0
4885 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4886 == TYPE_MAIN_VARIANT (type)))
4888 /* Expand this object skipping the list entries before
4889 it was found in case it is also a PLACEHOLDER_EXPR.
4890 In that case, we want to translate it using subsequent
4891 entries. */
4892 placeholder_list = TREE_CHAIN (placeholder_list);
4893 temp = expand_expr (object, original_target, tmode, modifier);
4894 placeholder_list = old_list;
4895 return temp;
4899 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4900 abort ();
4902 case WITH_RECORD_EXPR:
4903 /* Put the object on the placeholder list, expand our first operand,
4904 and pop the list. */
4905 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4906 placeholder_list);
4907 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4908 tmode, modifier);
4909 placeholder_list = TREE_CHAIN (placeholder_list);
4910 return target;
4912 case EXIT_EXPR:
4913 expand_exit_loop_if_false (NULL_PTR,
4914 invert_truthvalue (TREE_OPERAND (exp, 0)));
4915 return const0_rtx;
4917 case LOOP_EXPR:
4918 push_temp_slots ();
4919 expand_start_loop (1);
4920 expand_expr_stmt (TREE_OPERAND (exp, 0));
4921 expand_end_loop ();
4922 pop_temp_slots ();
4924 return const0_rtx;
4926 case BIND_EXPR:
4928 tree vars = TREE_OPERAND (exp, 0);
4929 int vars_need_expansion = 0;
4931 /* Need to open a binding contour here because
4932 if there are any cleanups they most be contained here. */
4933 expand_start_bindings (0);
4935 /* Mark the corresponding BLOCK for output in its proper place. */
4936 if (TREE_OPERAND (exp, 2) != 0
4937 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4938 insert_block (TREE_OPERAND (exp, 2));
4940 /* If VARS have not yet been expanded, expand them now. */
4941 while (vars)
4943 if (DECL_RTL (vars) == 0)
4945 vars_need_expansion = 1;
4946 expand_decl (vars);
4948 expand_decl_init (vars);
4949 vars = TREE_CHAIN (vars);
4952 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4954 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4956 return temp;
4959 case RTL_EXPR:
4960 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4961 abort ();
4962 emit_insns (RTL_EXPR_SEQUENCE (exp));
4963 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4964 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4965 free_temps_for_rtl_expr (exp);
4966 return RTL_EXPR_RTL (exp);
4968 case CONSTRUCTOR:
4969 /* If we don't need the result, just ensure we evaluate any
4970 subexpressions. */
4971 if (ignore)
4973 tree elt;
4974 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4975 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4976 return const0_rtx;
4979 /* All elts simple constants => refer to a constant in memory. But
4980 if this is a non-BLKmode mode, let it store a field at a time
4981 since that should make a CONST_INT or CONST_DOUBLE when we
4982 fold. Likewise, if we have a target we can use, it is best to
4983 store directly into the target unless the type is large enough
4984 that memcpy will be used. If we are making an initializer and
4985 all operands are constant, put it in memory as well. */
4986 else if ((TREE_STATIC (exp)
4987 && ((mode == BLKmode
4988 && ! (target != 0 && safe_from_p (target, exp)))
4989 || TREE_ADDRESSABLE (exp)
4990 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4991 && (move_by_pieces_ninsns
4992 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
4993 TYPE_ALIGN (type) / BITS_PER_UNIT)
4994 > MOVE_RATIO)
4995 && ! mostly_zeros_p (exp))))
4996 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4998 rtx constructor = output_constant_def (exp);
4999 if (modifier != EXPAND_CONST_ADDRESS
5000 && modifier != EXPAND_INITIALIZER
5001 && modifier != EXPAND_SUM
5002 && (! memory_address_p (GET_MODE (constructor),
5003 XEXP (constructor, 0))
5004 || (flag_force_addr
5005 && GET_CODE (XEXP (constructor, 0)) != REG)))
5006 constructor = change_address (constructor, VOIDmode,
5007 XEXP (constructor, 0));
5008 return constructor;
5011 else
5013 if (target == 0 || ! safe_from_p (target, exp))
5015 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5016 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5017 else
5018 target = assign_temp (type, 0, 1, 1);
5021 if (TREE_READONLY (exp))
5023 if (GET_CODE (target) == MEM)
5024 target = change_address (target, GET_MODE (target),
5025 XEXP (target, 0));
5026 RTX_UNCHANGING_P (target) = 1;
5029 store_constructor (exp, target, 0);
5030 return target;
5033 case INDIRECT_REF:
5035 tree exp1 = TREE_OPERAND (exp, 0);
5036 tree exp2;
5038 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
5039 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
5040 This code has the same general effect as simply doing
5041 expand_expr on the save expr, except that the expression PTR
5042 is computed for use as a memory address. This means different
5043 code, suitable for indexing, may be generated. */
5044 if (TREE_CODE (exp1) == SAVE_EXPR
5045 && SAVE_EXPR_RTL (exp1) == 0
5046 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
5048 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
5049 VOIDmode, EXPAND_SUM);
5050 op0 = memory_address (mode, temp);
5051 op0 = copy_all_regs (op0);
5052 SAVE_EXPR_RTL (exp1) = op0;
5054 else
5056 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5057 op0 = memory_address (mode, op0);
5060 temp = gen_rtx (MEM, mode, op0);
5061 /* If address was computed by addition,
5062 mark this as an element of an aggregate. */
5063 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5064 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5065 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5066 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5067 || (TREE_CODE (exp1) == ADDR_EXPR
5068 && (exp2 = TREE_OPERAND (exp1, 0))
5069 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5070 MEM_IN_STRUCT_P (temp) = 1;
5071 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5073 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5074 here, because, in C and C++, the fact that a location is accessed
5075 through a pointer to const does not mean that the value there can
5076 never change. Languages where it can never change should
5077 also set TREE_STATIC. */
5078 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5079 return temp;
5082 case ARRAY_REF:
5083 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5084 abort ();
5087 tree array = TREE_OPERAND (exp, 0);
5088 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5089 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5090 tree index = TREE_OPERAND (exp, 1);
5091 tree index_type = TREE_TYPE (index);
5092 int i;
5094 if (TREE_CODE (low_bound) != INTEGER_CST
5095 && contains_placeholder_p (low_bound))
5096 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5098 /* Optimize the special-case of a zero lower bound.
5100 We convert the low_bound to sizetype to avoid some problems
5101 with constant folding. (E.g. suppose the lower bound is 1,
5102 and its mode is QI. Without the conversion, (ARRAY
5103 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5104 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5106 But sizetype isn't quite right either (especially if
5107 the lowbound is negative). FIXME */
5109 if (! integer_zerop (low_bound))
5110 index = fold (build (MINUS_EXPR, index_type, index,
5111 convert (sizetype, low_bound)));
5113 if ((TREE_CODE (index) != INTEGER_CST
5114 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5115 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5117 /* Nonconstant array index or nonconstant element size, and
5118 not an array in an unaligned (packed) structure field.
5119 Generate the tree for *(&array+index) and expand that,
5120 except do it in a language-independent way
5121 and don't complain about non-lvalue arrays.
5122 `mark_addressable' should already have been called
5123 for any array for which this case will be reached. */
5125 /* Don't forget the const or volatile flag from the array
5126 element. */
5127 tree variant_type = build_type_variant (type,
5128 TREE_READONLY (exp),
5129 TREE_THIS_VOLATILE (exp));
5130 tree array_adr = build1 (ADDR_EXPR,
5131 build_pointer_type (variant_type), array);
5132 tree elt;
5133 tree size = size_in_bytes (type);
5135 /* Convert the integer argument to a type the same size as sizetype
5136 so the multiply won't overflow spuriously. */
5137 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5138 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5139 index);
5141 if (TREE_CODE (size) != INTEGER_CST
5142 && contains_placeholder_p (size))
5143 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5145 /* Don't think the address has side effects
5146 just because the array does.
5147 (In some cases the address might have side effects,
5148 and we fail to record that fact here. However, it should not
5149 matter, since expand_expr should not care.) */
5150 TREE_SIDE_EFFECTS (array_adr) = 0;
5153 = build1
5154 (INDIRECT_REF, type,
5155 fold (build (PLUS_EXPR,
5156 TYPE_POINTER_TO (variant_type),
5157 array_adr,
5158 fold
5159 (build1
5160 (NOP_EXPR,
5161 TYPE_POINTER_TO (variant_type),
5162 fold (build (MULT_EXPR, TREE_TYPE (index),
5163 index,
5164 convert (TREE_TYPE (index),
5165 size))))))));;
5167 /* Volatility, etc., of new expression is same as old
5168 expression. */
5169 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5170 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5171 TREE_READONLY (elt) = TREE_READONLY (exp);
5173 return expand_expr (elt, target, tmode, modifier);
5176 /* Fold an expression like: "foo"[2].
5177 This is not done in fold so it won't happen inside &.
5178 Don't fold if this is for wide characters since it's too
5179 difficult to do correctly and this is a very rare case. */
5181 if (TREE_CODE (array) == STRING_CST
5182 && TREE_CODE (index) == INTEGER_CST
5183 && !TREE_INT_CST_HIGH (index)
5184 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5185 && GET_MODE_CLASS (mode) == MODE_INT
5186 && GET_MODE_SIZE (mode) == 1)
5187 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5189 /* If this is a constant index into a constant array,
5190 just get the value from the array. Handle both the cases when
5191 we have an explicit constructor and when our operand is a variable
5192 that was declared const. */
5194 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5196 if (TREE_CODE (index) == INTEGER_CST
5197 && TREE_INT_CST_HIGH (index) == 0)
5199 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5201 i = TREE_INT_CST_LOW (index);
5202 while (elem && i--)
5203 elem = TREE_CHAIN (elem);
5204 if (elem)
5205 return expand_expr (fold (TREE_VALUE (elem)), target,
5206 tmode, modifier);
5210 else if (optimize >= 1
5211 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5212 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5213 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5215 if (TREE_CODE (index) == INTEGER_CST
5216 && TREE_INT_CST_HIGH (index) == 0)
5218 tree init = DECL_INITIAL (array);
5220 i = TREE_INT_CST_LOW (index);
5221 if (TREE_CODE (init) == CONSTRUCTOR)
5223 tree elem = CONSTRUCTOR_ELTS (init);
5225 while (elem
5226 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5227 elem = TREE_CHAIN (elem);
5228 if (elem)
5229 return expand_expr (fold (TREE_VALUE (elem)), target,
5230 tmode, modifier);
5232 else if (TREE_CODE (init) == STRING_CST
5233 && i < TREE_STRING_LENGTH (init))
5234 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5239 /* Treat array-ref with constant index as a component-ref. */
5241 case COMPONENT_REF:
5242 case BIT_FIELD_REF:
5243 /* If the operand is a CONSTRUCTOR, we can just extract the
5244 appropriate field if it is present. Don't do this if we have
5245 already written the data since we want to refer to that copy
5246 and varasm.c assumes that's what we'll do. */
5247 if (code != ARRAY_REF
5248 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5249 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5251 tree elt;
5253 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5254 elt = TREE_CHAIN (elt))
5255 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5256 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5260 enum machine_mode mode1;
5261 int bitsize;
5262 int bitpos;
5263 tree offset;
5264 int volatilep = 0;
5265 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5266 &mode1, &unsignedp, &volatilep);
5267 int alignment;
5269 /* If we got back the original object, something is wrong. Perhaps
5270 we are evaluating an expression too early. In any event, don't
5271 infinitely recurse. */
5272 if (tem == exp)
5273 abort ();
5275 /* If TEM's type is a union of variable size, pass TARGET to the inner
5276 computation, since it will need a temporary and TARGET is known
5277 to have to do. This occurs in unchecked conversion in Ada. */
5279 op0 = expand_expr (tem,
5280 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5281 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5282 != INTEGER_CST)
5283 ? target : NULL_RTX),
5284 VOIDmode,
5285 modifier == EXPAND_INITIALIZER ? modifier : 0);
5287 /* If this is a constant, put it into a register if it is a
5288 legitimate constant and memory if it isn't. */
5289 if (CONSTANT_P (op0))
5291 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5292 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5293 op0 = force_reg (mode, op0);
5294 else
5295 op0 = validize_mem (force_const_mem (mode, op0));
5298 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
5299 if (offset != 0)
5301 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5303 if (GET_CODE (op0) != MEM)
5304 abort ();
5305 op0 = change_address (op0, VOIDmode,
5306 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5307 force_reg (ptr_mode, offset_rtx)));
5308 /* If we have a variable offset, the known alignment
5309 is only that of the innermost structure containing the field.
5310 (Actually, we could sometimes do better by using the
5311 size of an element of the innermost array, but no need.) */
5312 if (TREE_CODE (exp) == COMPONENT_REF
5313 || TREE_CODE (exp) == BIT_FIELD_REF)
5314 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5315 / BITS_PER_UNIT);
5318 /* Don't forget about volatility even if this is a bitfield. */
5319 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5321 op0 = copy_rtx (op0);
5322 MEM_VOLATILE_P (op0) = 1;
5325 /* In cases where an aligned union has an unaligned object
5326 as a field, we might be extracting a BLKmode value from
5327 an integer-mode (e.g., SImode) object. Handle this case
5328 by doing the extract into an object as wide as the field
5329 (which we know to be the width of a basic mode), then
5330 storing into memory, and changing the mode to BLKmode. */
5331 if (mode1 == VOIDmode
5332 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5333 || (modifier != EXPAND_CONST_ADDRESS
5334 && modifier != EXPAND_SUM
5335 && modifier != EXPAND_INITIALIZER
5336 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5337 /* If the field isn't aligned enough to fetch as a memref,
5338 fetch it as a bit field. */
5339 || (SLOW_UNALIGNED_ACCESS
5340 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5341 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5343 enum machine_mode ext_mode = mode;
5345 if (ext_mode == BLKmode)
5346 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5348 if (ext_mode == BLKmode)
5349 abort ();
5351 op0 = validize_mem (op0);
5353 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5354 mark_reg_pointer (XEXP (op0, 0), alignment);
5356 op0 = extract_bit_field (op0, bitsize, bitpos,
5357 unsignedp, target, ext_mode, ext_mode,
5358 alignment,
5359 int_size_in_bytes (TREE_TYPE (tem)));
5360 if (mode == BLKmode)
5362 rtx new = assign_stack_temp (ext_mode,
5363 bitsize / BITS_PER_UNIT, 0);
5365 emit_move_insn (new, op0);
5366 op0 = copy_rtx (new);
5367 PUT_MODE (op0, BLKmode);
5368 MEM_IN_STRUCT_P (op0) = 1;
5371 return op0;
5374 /* If the result is BLKmode, use that to access the object
5375 now as well. */
5376 if (mode == BLKmode)
5377 mode1 = BLKmode;
5379 /* Get a reference to just this component. */
5380 if (modifier == EXPAND_CONST_ADDRESS
5381 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5382 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5383 (bitpos / BITS_PER_UNIT)));
5384 else
5385 op0 = change_address (op0, mode1,
5386 plus_constant (XEXP (op0, 0),
5387 (bitpos / BITS_PER_UNIT)));
5388 if (GET_CODE (XEXP (op0, 0)) == REG)
5389 mark_reg_pointer (XEXP (op0, 0), alignment);
5391 MEM_IN_STRUCT_P (op0) = 1;
5392 MEM_VOLATILE_P (op0) |= volatilep;
5393 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5394 return op0;
5395 if (target == 0)
5396 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5397 convert_move (target, op0, unsignedp);
5398 return target;
5401 case OFFSET_REF:
5403 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
5404 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
5405 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
5406 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
5407 MEM_IN_STRUCT_P (temp) = 1;
5408 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
5409 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
5410 a location is accessed through a pointer to const does not mean
5411 that the value there can never change. */
5412 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
5413 #endif
5414 return temp;
5417 /* Intended for a reference to a buffer of a file-object in Pascal.
5418 But it's not certain that a special tree code will really be
5419 necessary for these. INDIRECT_REF might work for them. */
5420 case BUFFER_REF:
5421 abort ();
5423 case IN_EXPR:
5425 /* Pascal set IN expression.
5427 Algorithm:
5428 rlo = set_low - (set_low%bits_per_word);
5429 the_word = set [ (index - rlo)/bits_per_word ];
5430 bit_index = index % bits_per_word;
5431 bitmask = 1 << bit_index;
5432 return !!(the_word & bitmask); */
5434 tree set = TREE_OPERAND (exp, 0);
5435 tree index = TREE_OPERAND (exp, 1);
5436 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5437 tree set_type = TREE_TYPE (set);
5438 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5439 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5440 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5441 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5442 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5443 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5444 rtx setaddr = XEXP (setval, 0);
5445 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5446 rtx rlow;
5447 rtx diff, quo, rem, addr, bit, result;
5449 preexpand_calls (exp);
5451 /* If domain is empty, answer is no. Likewise if index is constant
5452 and out of bounds. */
5453 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5454 && TREE_CODE (set_low_bound) == INTEGER_CST
5455 && tree_int_cst_lt (set_high_bound, set_low_bound)
5456 || (TREE_CODE (index) == INTEGER_CST
5457 && TREE_CODE (set_low_bound) == INTEGER_CST
5458 && tree_int_cst_lt (index, set_low_bound))
5459 || (TREE_CODE (set_high_bound) == INTEGER_CST
5460 && TREE_CODE (index) == INTEGER_CST
5461 && tree_int_cst_lt (set_high_bound, index))))
5462 return const0_rtx;
5464 if (target == 0)
5465 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5467 /* If we get here, we have to generate the code for both cases
5468 (in range and out of range). */
5470 op0 = gen_label_rtx ();
5471 op1 = gen_label_rtx ();
5473 if (! (GET_CODE (index_val) == CONST_INT
5474 && GET_CODE (lo_r) == CONST_INT))
5476 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5477 GET_MODE (index_val), iunsignedp, 0);
5478 emit_jump_insn (gen_blt (op1));
5481 if (! (GET_CODE (index_val) == CONST_INT
5482 && GET_CODE (hi_r) == CONST_INT))
5484 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5485 GET_MODE (index_val), iunsignedp, 0);
5486 emit_jump_insn (gen_bgt (op1));
5489 /* Calculate the element number of bit zero in the first word
5490 of the set. */
5491 if (GET_CODE (lo_r) == CONST_INT)
5492 rlow = GEN_INT (INTVAL (lo_r)
5493 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5494 else
5495 rlow = expand_binop (index_mode, and_optab, lo_r,
5496 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5497 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5499 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5500 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5502 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5503 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5504 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5505 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5507 addr = memory_address (byte_mode,
5508 expand_binop (index_mode, add_optab, diff,
5509 setaddr, NULL_RTX, iunsignedp,
5510 OPTAB_LIB_WIDEN));
5512 /* Extract the bit we want to examine */
5513 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5514 gen_rtx (MEM, byte_mode, addr),
5515 make_tree (TREE_TYPE (index), rem),
5516 NULL_RTX, 1);
5517 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5518 GET_MODE (target) == byte_mode ? target : 0,
5519 1, OPTAB_LIB_WIDEN);
5521 if (result != target)
5522 convert_move (target, result, 1);
5524 /* Output the code to handle the out-of-range case. */
5525 emit_jump (op0);
5526 emit_label (op1);
5527 emit_move_insn (target, const0_rtx);
5528 emit_label (op0);
5529 return target;
5532 case WITH_CLEANUP_EXPR:
5533 if (RTL_EXPR_RTL (exp) == 0)
5535 RTL_EXPR_RTL (exp)
5536 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5537 cleanups_this_call
5538 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5539 /* That's it for this cleanup. */
5540 TREE_OPERAND (exp, 2) = 0;
5541 (*interim_eh_hook) (NULL_TREE);
5543 return RTL_EXPR_RTL (exp);
5545 case CLEANUP_POINT_EXPR:
5547 extern int temp_slot_level;
5548 tree old_cleanups = cleanups_this_call;
5549 int old_temp_level = target_temp_slot_level;
5550 push_temp_slots ();
5551 target_temp_slot_level = temp_slot_level;
5552 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5553 /* If we're going to use this value, load it up now. */
5554 if (! ignore)
5555 op0 = force_not_mem (op0);
5556 expand_cleanups_to (old_cleanups);
5557 preserve_temp_slots (op0);
5558 free_temp_slots ();
5559 pop_temp_slots ();
5560 target_temp_slot_level = old_temp_level;
5562 return op0;
5564 case CALL_EXPR:
5565 /* Check for a built-in function. */
5566 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5567 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5568 == FUNCTION_DECL)
5569 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5570 return expand_builtin (exp, target, subtarget, tmode, ignore);
5572 /* If this call was expanded already by preexpand_calls,
5573 just return the result we got. */
5574 if (CALL_EXPR_RTL (exp) != 0)
5575 return CALL_EXPR_RTL (exp);
5577 return expand_call (exp, target, ignore);
5579 case NON_LVALUE_EXPR:
5580 case NOP_EXPR:
5581 case CONVERT_EXPR:
5582 case REFERENCE_EXPR:
5583 if (TREE_CODE (type) == UNION_TYPE)
5585 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5586 if (target == 0)
5588 if (mode != BLKmode)
5589 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5590 else
5591 target = assign_temp (type, 0, 1, 1);
5594 if (GET_CODE (target) == MEM)
5595 /* Store data into beginning of memory target. */
5596 store_expr (TREE_OPERAND (exp, 0),
5597 change_address (target, TYPE_MODE (valtype), 0), 0);
5599 else if (GET_CODE (target) == REG)
5600 /* Store this field into a union of the proper type. */
5601 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5602 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5603 VOIDmode, 0, 1,
5604 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5605 else
5606 abort ();
5608 /* Return the entire union. */
5609 return target;
5612 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5614 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5615 modifier);
5617 /* If the signedness of the conversion differs and OP0 is
5618 a promoted SUBREG, clear that indication since we now
5619 have to do the proper extension. */
5620 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5621 && GET_CODE (op0) == SUBREG)
5622 SUBREG_PROMOTED_VAR_P (op0) = 0;
5624 return op0;
5627 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5628 if (GET_MODE (op0) == mode)
5629 return op0;
5631 /* If OP0 is a constant, just convert it into the proper mode. */
5632 if (CONSTANT_P (op0))
5633 return
5634 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5635 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5637 if (modifier == EXPAND_INITIALIZER)
5638 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5640 if (target == 0)
5641 return
5642 convert_to_mode (mode, op0,
5643 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5644 else
5645 convert_move (target, op0,
5646 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5647 return target;
5649 case PLUS_EXPR:
5650 /* We come here from MINUS_EXPR when the second operand is a constant. */
5651 plus_expr:
5652 this_optab = add_optab;
5654 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5655 something else, make sure we add the register to the constant and
5656 then to the other thing. This case can occur during strength
5657 reduction and doing it this way will produce better code if the
5658 frame pointer or argument pointer is eliminated.
5660 fold-const.c will ensure that the constant is always in the inner
5661 PLUS_EXPR, so the only case we need to do anything about is if
5662 sp, ap, or fp is our second argument, in which case we must swap
5663 the innermost first argument and our second argument. */
5665 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5666 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5667 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5668 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5669 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5670 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5672 tree t = TREE_OPERAND (exp, 1);
5674 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5675 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5678 /* If the result is to be ptr_mode and we are adding an integer to
5679 something, we might be forming a constant. So try to use
5680 plus_constant. If it produces a sum and we can't accept it,
5681 use force_operand. This allows P = &ARR[const] to generate
5682 efficient code on machines where a SYMBOL_REF is not a valid
5683 address.
5685 If this is an EXPAND_SUM call, always return the sum. */
5686 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5687 || mode == ptr_mode)
5689 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5690 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5691 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5693 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5694 EXPAND_SUM);
5695 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5696 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5697 op1 = force_operand (op1, target);
5698 return op1;
5701 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5702 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5703 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5705 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5706 EXPAND_SUM);
5707 if (! CONSTANT_P (op0))
5709 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5710 VOIDmode, modifier);
5711 /* Don't go to both_summands if modifier
5712 says it's not right to return a PLUS. */
5713 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5714 goto binop2;
5715 goto both_summands;
5717 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5718 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5719 op0 = force_operand (op0, target);
5720 return op0;
5724 /* No sense saving up arithmetic to be done
5725 if it's all in the wrong mode to form part of an address.
5726 And force_operand won't know whether to sign-extend or
5727 zero-extend. */
5728 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5729 || mode != ptr_mode)
5730 goto binop;
5732 preexpand_calls (exp);
5733 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5734 subtarget = 0;
5736 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5737 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5739 both_summands:
5740 /* Make sure any term that's a sum with a constant comes last. */
5741 if (GET_CODE (op0) == PLUS
5742 && CONSTANT_P (XEXP (op0, 1)))
5744 temp = op0;
5745 op0 = op1;
5746 op1 = temp;
5748 /* If adding to a sum including a constant,
5749 associate it to put the constant outside. */
5750 if (GET_CODE (op1) == PLUS
5751 && CONSTANT_P (XEXP (op1, 1)))
5753 rtx constant_term = const0_rtx;
5755 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5756 if (temp != 0)
5757 op0 = temp;
5758 /* Ensure that MULT comes first if there is one. */
5759 else if (GET_CODE (op0) == MULT)
5760 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5761 else
5762 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5764 /* Let's also eliminate constants from op0 if possible. */
5765 op0 = eliminate_constant_term (op0, &constant_term);
5767 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5768 their sum should be a constant. Form it into OP1, since the
5769 result we want will then be OP0 + OP1. */
5771 temp = simplify_binary_operation (PLUS, mode, constant_term,
5772 XEXP (op1, 1));
5773 if (temp != 0)
5774 op1 = temp;
5775 else
5776 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5779 /* Put a constant term last and put a multiplication first. */
5780 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5781 temp = op1, op1 = op0, op0 = temp;
5783 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5784 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5786 case MINUS_EXPR:
5787 /* For initializers, we are allowed to return a MINUS of two
5788 symbolic constants. Here we handle all cases when both operands
5789 are constant. */
5790 /* Handle difference of two symbolic constants,
5791 for the sake of an initializer. */
5792 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5793 && really_constant_p (TREE_OPERAND (exp, 0))
5794 && really_constant_p (TREE_OPERAND (exp, 1)))
5796 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5797 VOIDmode, modifier);
5798 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5799 VOIDmode, modifier);
5801 /* If the last operand is a CONST_INT, use plus_constant of
5802 the negated constant. Else make the MINUS. */
5803 if (GET_CODE (op1) == CONST_INT)
5804 return plus_constant (op0, - INTVAL (op1));
5805 else
5806 return gen_rtx (MINUS, mode, op0, op1);
5808 /* Convert A - const to A + (-const). */
5809 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5811 tree negated = fold (build1 (NEGATE_EXPR, type,
5812 TREE_OPERAND (exp, 1)));
5814 /* Deal with the case where we can't negate the constant
5815 in TYPE. */
5816 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5818 tree newtype = signed_type (type);
5819 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5820 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5821 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5823 if (! TREE_OVERFLOW (newneg))
5824 return expand_expr (convert (type,
5825 build (PLUS_EXPR, newtype,
5826 newop0, newneg)),
5827 target, tmode, modifier);
5829 else
5831 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5832 goto plus_expr;
5835 this_optab = sub_optab;
5836 goto binop;
5838 case MULT_EXPR:
5839 preexpand_calls (exp);
5840 /* If first operand is constant, swap them.
5841 Thus the following special case checks need only
5842 check the second operand. */
5843 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5845 register tree t1 = TREE_OPERAND (exp, 0);
5846 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5847 TREE_OPERAND (exp, 1) = t1;
5850 /* Attempt to return something suitable for generating an
5851 indexed address, for machines that support that. */
5853 if (modifier == EXPAND_SUM && mode == ptr_mode
5854 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5855 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5857 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5859 /* Apply distributive law if OP0 is x+c. */
5860 if (GET_CODE (op0) == PLUS
5861 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5862 return gen_rtx (PLUS, mode,
5863 gen_rtx (MULT, mode, XEXP (op0, 0),
5864 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5865 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5866 * INTVAL (XEXP (op0, 1))));
5868 if (GET_CODE (op0) != REG)
5869 op0 = force_operand (op0, NULL_RTX);
5870 if (GET_CODE (op0) != REG)
5871 op0 = copy_to_mode_reg (mode, op0);
5873 return gen_rtx (MULT, mode, op0,
5874 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5877 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5878 subtarget = 0;
5880 /* Check for multiplying things that have been extended
5881 from a narrower type. If this machine supports multiplying
5882 in that narrower type with a result in the desired type,
5883 do it that way, and avoid the explicit type-conversion. */
5884 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5885 && TREE_CODE (type) == INTEGER_TYPE
5886 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5887 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5888 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5889 && int_fits_type_p (TREE_OPERAND (exp, 1),
5890 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5891 /* Don't use a widening multiply if a shift will do. */
5892 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5893 > HOST_BITS_PER_WIDE_INT)
5894 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5896 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5897 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5899 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5900 /* If both operands are extended, they must either both
5901 be zero-extended or both be sign-extended. */
5902 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5904 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5906 enum machine_mode innermode
5907 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5908 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5909 ? smul_widen_optab : umul_widen_optab);
5910 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5911 ? umul_widen_optab : smul_widen_optab);
5912 if (mode == GET_MODE_WIDER_MODE (innermode))
5914 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5916 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5917 NULL_RTX, VOIDmode, 0);
5918 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5919 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5920 VOIDmode, 0);
5921 else
5922 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5923 NULL_RTX, VOIDmode, 0);
5924 goto binop2;
5926 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5927 && innermode == word_mode)
5929 rtx htem;
5930 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5931 NULL_RTX, VOIDmode, 0);
5932 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5933 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5934 VOIDmode, 0);
5935 else
5936 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5937 NULL_RTX, VOIDmode, 0);
5938 temp = expand_binop (mode, other_optab, op0, op1, target,
5939 unsignedp, OPTAB_LIB_WIDEN);
5940 htem = expand_mult_highpart_adjust (innermode,
5941 gen_highpart (innermode, temp),
5942 op0, op1,
5943 gen_highpart (innermode, temp),
5944 unsignedp);
5945 emit_move_insn (gen_highpart (innermode, temp), htem);
5946 return temp;
5950 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5951 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5952 return expand_mult (mode, op0, op1, target, unsignedp);
5954 case TRUNC_DIV_EXPR:
5955 case FLOOR_DIV_EXPR:
5956 case CEIL_DIV_EXPR:
5957 case ROUND_DIV_EXPR:
5958 case EXACT_DIV_EXPR:
5959 preexpand_calls (exp);
5960 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5961 subtarget = 0;
5962 /* Possible optimization: compute the dividend with EXPAND_SUM
5963 then if the divisor is constant can optimize the case
5964 where some terms of the dividend have coeffs divisible by it. */
5965 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5966 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5967 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5969 case RDIV_EXPR:
5970 this_optab = flodiv_optab;
5971 goto binop;
5973 case TRUNC_MOD_EXPR:
5974 case FLOOR_MOD_EXPR:
5975 case CEIL_MOD_EXPR:
5976 case ROUND_MOD_EXPR:
5977 preexpand_calls (exp);
5978 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5979 subtarget = 0;
5980 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5981 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5982 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5984 case FIX_ROUND_EXPR:
5985 case FIX_FLOOR_EXPR:
5986 case FIX_CEIL_EXPR:
5987 abort (); /* Not used for C. */
5989 case FIX_TRUNC_EXPR:
5990 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5991 if (target == 0)
5992 target = gen_reg_rtx (mode);
5993 expand_fix (target, op0, unsignedp);
5994 return target;
5996 case FLOAT_EXPR:
5997 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5998 if (target == 0)
5999 target = gen_reg_rtx (mode);
6000 /* expand_float can't figure out what to do if FROM has VOIDmode.
6001 So give it the correct mode. With -O, cse will optimize this. */
6002 if (GET_MODE (op0) == VOIDmode)
6003 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6004 op0);
6005 expand_float (target, op0,
6006 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6007 return target;
6009 case NEGATE_EXPR:
6010 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6011 temp = expand_unop (mode, neg_optab, op0, target, 0);
6012 if (temp == 0)
6013 abort ();
6014 return temp;
6016 case ABS_EXPR:
6017 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6019 /* Handle complex values specially. */
6020 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6021 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6022 return expand_complex_abs (mode, op0, target, unsignedp);
6024 /* Unsigned abs is simply the operand. Testing here means we don't
6025 risk generating incorrect code below. */
6026 if (TREE_UNSIGNED (type))
6027 return op0;
6029 return expand_abs (mode, op0, target, unsignedp,
6030 safe_from_p (target, TREE_OPERAND (exp, 0)));
6032 case MAX_EXPR:
6033 case MIN_EXPR:
6034 target = original_target;
6035 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6036 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6037 || GET_MODE (target) != mode
6038 || (GET_CODE (target) == REG
6039 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6040 target = gen_reg_rtx (mode);
6041 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6042 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6044 /* First try to do it with a special MIN or MAX instruction.
6045 If that does not win, use a conditional jump to select the proper
6046 value. */
6047 this_optab = (TREE_UNSIGNED (type)
6048 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6049 : (code == MIN_EXPR ? smin_optab : smax_optab));
6051 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6052 OPTAB_WIDEN);
6053 if (temp != 0)
6054 return temp;
6056 /* At this point, a MEM target is no longer useful; we will get better
6057 code without it. */
6059 if (GET_CODE (target) == MEM)
6060 target = gen_reg_rtx (mode);
6062 if (target != op0)
6063 emit_move_insn (target, op0);
6065 op0 = gen_label_rtx ();
6067 /* If this mode is an integer too wide to compare properly,
6068 compare word by word. Rely on cse to optimize constant cases. */
6069 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6071 if (code == MAX_EXPR)
6072 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6073 target, op1, NULL_RTX, op0);
6074 else
6075 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6076 op1, target, NULL_RTX, op0);
6077 emit_move_insn (target, op1);
6079 else
6081 if (code == MAX_EXPR)
6082 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6083 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6084 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6085 else
6086 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6087 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6088 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6089 if (temp == const0_rtx)
6090 emit_move_insn (target, op1);
6091 else if (temp != const_true_rtx)
6093 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6094 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6095 else
6096 abort ();
6097 emit_move_insn (target, op1);
6100 emit_label (op0);
6101 return target;
6103 case BIT_NOT_EXPR:
6104 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6105 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6106 if (temp == 0)
6107 abort ();
6108 return temp;
6110 case FFS_EXPR:
6111 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6112 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6113 if (temp == 0)
6114 abort ();
6115 return temp;
6117 /* ??? Can optimize bitwise operations with one arg constant.
6118 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6119 and (a bitwise1 b) bitwise2 b (etc)
6120 but that is probably not worth while. */
6122 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6123 boolean values when we want in all cases to compute both of them. In
6124 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6125 as actual zero-or-1 values and then bitwise anding. In cases where
6126 there cannot be any side effects, better code would be made by
6127 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6128 how to recognize those cases. */
6130 case TRUTH_AND_EXPR:
6131 case BIT_AND_EXPR:
6132 this_optab = and_optab;
6133 goto binop;
6135 case TRUTH_OR_EXPR:
6136 case BIT_IOR_EXPR:
6137 this_optab = ior_optab;
6138 goto binop;
6140 case TRUTH_XOR_EXPR:
6141 case BIT_XOR_EXPR:
6142 this_optab = xor_optab;
6143 goto binop;
6145 case LSHIFT_EXPR:
6146 case RSHIFT_EXPR:
6147 case LROTATE_EXPR:
6148 case RROTATE_EXPR:
6149 preexpand_calls (exp);
6150 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6151 subtarget = 0;
6152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6153 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6154 unsignedp);
6156 /* Could determine the answer when only additive constants differ. Also,
6157 the addition of one can be handled by changing the condition. */
6158 case LT_EXPR:
6159 case LE_EXPR:
6160 case GT_EXPR:
6161 case GE_EXPR:
6162 case EQ_EXPR:
6163 case NE_EXPR:
6164 preexpand_calls (exp);
6165 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6166 if (temp != 0)
6167 return temp;
6169 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6170 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6171 && original_target
6172 && GET_CODE (original_target) == REG
6173 && (GET_MODE (original_target)
6174 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6176 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6177 VOIDmode, 0);
6179 if (temp != original_target)
6180 temp = copy_to_reg (temp);
6182 op1 = gen_label_rtx ();
6183 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6184 GET_MODE (temp), unsignedp, 0);
6185 emit_jump_insn (gen_beq (op1));
6186 emit_move_insn (temp, const1_rtx);
6187 emit_label (op1);
6188 return temp;
6191 /* If no set-flag instruction, must generate a conditional
6192 store into a temporary variable. Drop through
6193 and handle this like && and ||. */
6195 case TRUTH_ANDIF_EXPR:
6196 case TRUTH_ORIF_EXPR:
6197 if (! ignore
6198 && (target == 0 || ! safe_from_p (target, exp)
6199 /* Make sure we don't have a hard reg (such as function's return
6200 value) live across basic blocks, if not optimizing. */
6201 || (!optimize && GET_CODE (target) == REG
6202 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6203 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6205 if (target)
6206 emit_clr_insn (target);
6208 op1 = gen_label_rtx ();
6209 jumpifnot (exp, op1);
6211 if (target)
6212 emit_0_to_1_insn (target);
6214 emit_label (op1);
6215 return ignore ? const0_rtx : target;
6217 case TRUTH_NOT_EXPR:
6218 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6219 /* The parser is careful to generate TRUTH_NOT_EXPR
6220 only with operands that are always zero or one. */
6221 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6222 target, 1, OPTAB_LIB_WIDEN);
6223 if (temp == 0)
6224 abort ();
6225 return temp;
6227 case COMPOUND_EXPR:
6228 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6229 emit_queue ();
6230 return expand_expr (TREE_OPERAND (exp, 1),
6231 (ignore ? const0_rtx : target),
6232 VOIDmode, 0);
6234 case COND_EXPR:
6236 rtx flag = NULL_RTX;
6237 tree left_cleanups = NULL_TREE;
6238 tree right_cleanups = NULL_TREE;
6240 /* Used to save a pointer to the place to put the setting of
6241 the flag that indicates if this side of the conditional was
6242 taken. We backpatch the code, if we find out later that we
6243 have any conditional cleanups that need to be performed. */
6244 rtx dest_right_flag = NULL_RTX;
6245 rtx dest_left_flag = NULL_RTX;
6247 /* Note that COND_EXPRs whose type is a structure or union
6248 are required to be constructed to contain assignments of
6249 a temporary variable, so that we can evaluate them here
6250 for side effect only. If type is void, we must do likewise. */
6252 /* If an arm of the branch requires a cleanup,
6253 only that cleanup is performed. */
6255 tree singleton = 0;
6256 tree binary_op = 0, unary_op = 0;
6257 tree old_cleanups = cleanups_this_call;
6259 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6260 convert it to our mode, if necessary. */
6261 if (integer_onep (TREE_OPERAND (exp, 1))
6262 && integer_zerop (TREE_OPERAND (exp, 2))
6263 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6265 if (ignore)
6267 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6268 modifier);
6269 return const0_rtx;
6272 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6273 if (GET_MODE (op0) == mode)
6274 return op0;
6276 if (target == 0)
6277 target = gen_reg_rtx (mode);
6278 convert_move (target, op0, unsignedp);
6279 return target;
6282 /* If we are not to produce a result, we have no target. Otherwise,
6283 if a target was specified use it; it will not be used as an
6284 intermediate target unless it is safe. If no target, use a
6285 temporary. */
6287 if (ignore)
6288 temp = 0;
6289 else if (original_target
6290 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
6291 && GET_MODE (original_target) == mode
6292 && ! (GET_CODE (original_target) == MEM
6293 && MEM_VOLATILE_P (original_target)))
6294 temp = original_target;
6295 else
6296 temp = assign_temp (type, 0, 0, 1);
6298 /* Check for X ? A + B : A. If we have this, we can copy
6299 A to the output and conditionally add B. Similarly for unary
6300 operations. Don't do this if X has side-effects because
6301 those side effects might affect A or B and the "?" operation is
6302 a sequence point in ANSI. (We test for side effects later.) */
6304 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6305 && operand_equal_p (TREE_OPERAND (exp, 2),
6306 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6307 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6308 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6309 && operand_equal_p (TREE_OPERAND (exp, 1),
6310 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6311 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6312 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6313 && operand_equal_p (TREE_OPERAND (exp, 2),
6314 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6315 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6316 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6317 && operand_equal_p (TREE_OPERAND (exp, 1),
6318 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6319 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6321 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6322 operation, do this as A + (X != 0). Similarly for other simple
6323 binary operators. */
6324 if (temp && singleton && binary_op
6325 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6326 && (TREE_CODE (binary_op) == PLUS_EXPR
6327 || TREE_CODE (binary_op) == MINUS_EXPR
6328 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6329 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6330 && integer_onep (TREE_OPERAND (binary_op, 1))
6331 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6333 rtx result;
6334 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6335 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6336 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6337 : xor_optab);
6339 /* If we had X ? A : A + 1, do this as A + (X == 0).
6341 We have to invert the truth value here and then put it
6342 back later if do_store_flag fails. We cannot simply copy
6343 TREE_OPERAND (exp, 0) to another variable and modify that
6344 because invert_truthvalue can modify the tree pointed to
6345 by its argument. */
6346 if (singleton == TREE_OPERAND (exp, 1))
6347 TREE_OPERAND (exp, 0)
6348 = invert_truthvalue (TREE_OPERAND (exp, 0));
6350 result = do_store_flag (TREE_OPERAND (exp, 0),
6351 (safe_from_p (temp, singleton)
6352 ? temp : NULL_RTX),
6353 mode, BRANCH_COST <= 1);
6355 if (result)
6357 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6358 return expand_binop (mode, boptab, op1, result, temp,
6359 unsignedp, OPTAB_LIB_WIDEN);
6361 else if (singleton == TREE_OPERAND (exp, 1))
6362 TREE_OPERAND (exp, 0)
6363 = invert_truthvalue (TREE_OPERAND (exp, 0));
6366 do_pending_stack_adjust ();
6367 NO_DEFER_POP;
6368 op0 = gen_label_rtx ();
6370 flag = gen_reg_rtx (word_mode);
6371 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6373 if (temp != 0)
6375 /* If the target conflicts with the other operand of the
6376 binary op, we can't use it. Also, we can't use the target
6377 if it is a hard register, because evaluating the condition
6378 might clobber it. */
6379 if ((binary_op
6380 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6381 || (GET_CODE (temp) == REG
6382 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6383 temp = gen_reg_rtx (mode);
6384 store_expr (singleton, temp, 0);
6386 else
6387 expand_expr (singleton,
6388 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6389 dest_left_flag = get_last_insn ();
6390 if (singleton == TREE_OPERAND (exp, 1))
6391 jumpif (TREE_OPERAND (exp, 0), op0);
6392 else
6393 jumpifnot (TREE_OPERAND (exp, 0), op0);
6395 /* Allows cleanups up to here. */
6396 old_cleanups = cleanups_this_call;
6397 if (binary_op && temp == 0)
6398 /* Just touch the other operand. */
6399 expand_expr (TREE_OPERAND (binary_op, 1),
6400 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6401 else if (binary_op)
6402 store_expr (build (TREE_CODE (binary_op), type,
6403 make_tree (type, temp),
6404 TREE_OPERAND (binary_op, 1)),
6405 temp, 0);
6406 else
6407 store_expr (build1 (TREE_CODE (unary_op), type,
6408 make_tree (type, temp)),
6409 temp, 0);
6410 op1 = op0;
6411 dest_right_flag = get_last_insn ();
6413 #if 0
6414 /* This is now done in jump.c and is better done there because it
6415 produces shorter register lifetimes. */
6417 /* Check for both possibilities either constants or variables
6418 in registers (but not the same as the target!). If so, can
6419 save branches by assigning one, branching, and assigning the
6420 other. */
6421 else if (temp && GET_MODE (temp) != BLKmode
6422 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6423 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6424 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6425 && DECL_RTL (TREE_OPERAND (exp, 1))
6426 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6427 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6428 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6429 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6430 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6431 && DECL_RTL (TREE_OPERAND (exp, 2))
6432 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6433 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6435 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6436 temp = gen_reg_rtx (mode);
6437 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6438 dest_left_flag = get_last_insn ();
6439 jumpifnot (TREE_OPERAND (exp, 0), op0);
6441 /* Allows cleanups up to here. */
6442 old_cleanups = cleanups_this_call;
6443 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6444 op1 = op0;
6445 dest_right_flag = get_last_insn ();
6447 #endif
6448 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6449 comparison operator. If we have one of these cases, set the
6450 output to A, branch on A (cse will merge these two references),
6451 then set the output to FOO. */
6452 else if (temp
6453 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6454 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6455 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6456 TREE_OPERAND (exp, 1), 0)
6457 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6458 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6460 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6461 temp = gen_reg_rtx (mode);
6462 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6463 dest_left_flag = get_last_insn ();
6464 jumpif (TREE_OPERAND (exp, 0), op0);
6466 /* Allows cleanups up to here. */
6467 old_cleanups = cleanups_this_call;
6468 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6469 op1 = op0;
6470 dest_right_flag = get_last_insn ();
6472 else if (temp
6473 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6474 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6475 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6476 TREE_OPERAND (exp, 2), 0)
6477 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6478 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6480 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6481 temp = gen_reg_rtx (mode);
6482 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6483 dest_left_flag = get_last_insn ();
6484 jumpifnot (TREE_OPERAND (exp, 0), op0);
6486 /* Allows cleanups up to here. */
6487 old_cleanups = cleanups_this_call;
6488 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6489 op1 = op0;
6490 dest_right_flag = get_last_insn ();
6492 else
6494 op1 = gen_label_rtx ();
6495 jumpifnot (TREE_OPERAND (exp, 0), op0);
6497 /* Allows cleanups up to here. */
6498 old_cleanups = cleanups_this_call;
6499 if (temp != 0)
6500 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6501 else
6502 expand_expr (TREE_OPERAND (exp, 1),
6503 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6504 dest_left_flag = get_last_insn ();
6506 /* Handle conditional cleanups, if any. */
6507 left_cleanups = defer_cleanups_to (old_cleanups);
6509 emit_queue ();
6510 emit_jump_insn (gen_jump (op1));
6511 emit_barrier ();
6512 emit_label (op0);
6513 if (temp != 0)
6514 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6515 else
6516 expand_expr (TREE_OPERAND (exp, 2),
6517 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6518 dest_right_flag = get_last_insn ();
6521 /* Handle conditional cleanups, if any. */
6522 right_cleanups = defer_cleanups_to (old_cleanups);
6524 emit_queue ();
6525 emit_label (op1);
6526 OK_DEFER_POP;
6528 /* Add back in, any conditional cleanups. */
6529 if (left_cleanups || right_cleanups)
6531 tree new_cleanups;
6532 tree cond;
6533 rtx last;
6535 /* Now that we know that a flag is needed, go back and add in the
6536 setting of the flag. */
6538 /* Do the left side flag. */
6539 last = get_last_insn ();
6540 /* Flag left cleanups as needed. */
6541 emit_move_insn (flag, const1_rtx);
6542 /* ??? deprecated, use sequences instead. */
6543 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6545 /* Do the right side flag. */
6546 last = get_last_insn ();
6547 /* Flag left cleanups as needed. */
6548 emit_move_insn (flag, const0_rtx);
6549 /* ??? deprecated, use sequences instead. */
6550 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6552 /* All cleanups must be on the function_obstack. */
6553 push_obstacks_nochange ();
6554 resume_temporary_allocation ();
6556 /* convert flag, which is an rtx, into a tree. */
6557 cond = make_node (RTL_EXPR);
6558 TREE_TYPE (cond) = integer_type_node;
6559 RTL_EXPR_RTL (cond) = flag;
6560 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6561 cond = save_expr (cond);
6563 if (! left_cleanups)
6564 left_cleanups = integer_zero_node;
6565 if (! right_cleanups)
6566 right_cleanups = integer_zero_node;
6567 new_cleanups = build (COND_EXPR, void_type_node,
6568 truthvalue_conversion (cond),
6569 left_cleanups, right_cleanups);
6570 new_cleanups = fold (new_cleanups);
6572 pop_obstacks ();
6574 /* Now add in the conditionalized cleanups. */
6575 cleanups_this_call
6576 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6577 (*interim_eh_hook) (NULL_TREE);
6579 return temp;
6582 case TARGET_EXPR:
6584 int need_exception_region = 0;
6585 /* Something needs to be initialized, but we didn't know
6586 where that thing was when building the tree. For example,
6587 it could be the return value of a function, or a parameter
6588 to a function which lays down in the stack, or a temporary
6589 variable which must be passed by reference.
6591 We guarantee that the expression will either be constructed
6592 or copied into our original target. */
6594 tree slot = TREE_OPERAND (exp, 0);
6595 tree exp1;
6596 rtx temp;
6598 if (TREE_CODE (slot) != VAR_DECL)
6599 abort ();
6601 if (! ignore)
6602 target = original_target;
6604 if (target == 0)
6606 if (DECL_RTL (slot) != 0)
6608 target = DECL_RTL (slot);
6609 /* If we have already expanded the slot, so don't do
6610 it again. (mrs) */
6611 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6612 return target;
6614 else
6616 target = assign_temp (type, 2, 1, 1);
6617 /* All temp slots at this level must not conflict. */
6618 preserve_temp_slots (target);
6619 DECL_RTL (slot) = target;
6621 /* Since SLOT is not known to the called function
6622 to belong to its stack frame, we must build an explicit
6623 cleanup. This case occurs when we must build up a reference
6624 to pass the reference as an argument. In this case,
6625 it is very likely that such a reference need not be
6626 built here. */
6628 if (TREE_OPERAND (exp, 2) == 0)
6629 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6630 if (TREE_OPERAND (exp, 2))
6632 cleanups_this_call = tree_cons (NULL_TREE,
6633 TREE_OPERAND (exp, 2),
6634 cleanups_this_call);
6635 need_exception_region = 1;
6639 else
6641 /* This case does occur, when expanding a parameter which
6642 needs to be constructed on the stack. The target
6643 is the actual stack address that we want to initialize.
6644 The function we call will perform the cleanup in this case. */
6646 /* If we have already assigned it space, use that space,
6647 not target that we were passed in, as our target
6648 parameter is only a hint. */
6649 if (DECL_RTL (slot) != 0)
6651 target = DECL_RTL (slot);
6652 /* If we have already expanded the slot, so don't do
6653 it again. (mrs) */
6654 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6655 return target;
6658 DECL_RTL (slot) = target;
6661 exp1 = TREE_OPERAND (exp, 1);
6662 /* Mark it as expanded. */
6663 TREE_OPERAND (exp, 1) = NULL_TREE;
6665 temp = expand_expr (exp1, target, tmode, modifier);
6667 if (need_exception_region)
6668 (*interim_eh_hook) (NULL_TREE);
6670 return temp;
6673 case INIT_EXPR:
6675 tree lhs = TREE_OPERAND (exp, 0);
6676 tree rhs = TREE_OPERAND (exp, 1);
6677 tree noncopied_parts = 0;
6678 tree lhs_type = TREE_TYPE (lhs);
6680 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6681 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6682 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6683 TYPE_NONCOPIED_PARTS (lhs_type));
6684 while (noncopied_parts != 0)
6686 expand_assignment (TREE_VALUE (noncopied_parts),
6687 TREE_PURPOSE (noncopied_parts), 0, 0);
6688 noncopied_parts = TREE_CHAIN (noncopied_parts);
6690 return temp;
6693 case MODIFY_EXPR:
6695 /* If lhs is complex, expand calls in rhs before computing it.
6696 That's so we don't compute a pointer and save it over a call.
6697 If lhs is simple, compute it first so we can give it as a
6698 target if the rhs is just a call. This avoids an extra temp and copy
6699 and that prevents a partial-subsumption which makes bad code.
6700 Actually we could treat component_ref's of vars like vars. */
6702 tree lhs = TREE_OPERAND (exp, 0);
6703 tree rhs = TREE_OPERAND (exp, 1);
6704 tree noncopied_parts = 0;
6705 tree lhs_type = TREE_TYPE (lhs);
6707 temp = 0;
6709 if (TREE_CODE (lhs) != VAR_DECL
6710 && TREE_CODE (lhs) != RESULT_DECL
6711 && TREE_CODE (lhs) != PARM_DECL)
6712 preexpand_calls (exp);
6714 /* Check for |= or &= of a bitfield of size one into another bitfield
6715 of size 1. In this case, (unless we need the result of the
6716 assignment) we can do this more efficiently with a
6717 test followed by an assignment, if necessary.
6719 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6720 things change so we do, this code should be enhanced to
6721 support it. */
6722 if (ignore
6723 && TREE_CODE (lhs) == COMPONENT_REF
6724 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6725 || TREE_CODE (rhs) == BIT_AND_EXPR)
6726 && TREE_OPERAND (rhs, 0) == lhs
6727 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6728 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6729 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6731 rtx label = gen_label_rtx ();
6733 do_jump (TREE_OPERAND (rhs, 1),
6734 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6735 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6736 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6737 (TREE_CODE (rhs) == BIT_IOR_EXPR
6738 ? integer_one_node
6739 : integer_zero_node)),
6740 0, 0);
6741 do_pending_stack_adjust ();
6742 emit_label (label);
6743 return const0_rtx;
6746 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6747 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6748 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6749 TYPE_NONCOPIED_PARTS (lhs_type));
6751 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6752 while (noncopied_parts != 0)
6754 expand_assignment (TREE_PURPOSE (noncopied_parts),
6755 TREE_VALUE (noncopied_parts), 0, 0);
6756 noncopied_parts = TREE_CHAIN (noncopied_parts);
6758 return temp;
6761 case PREINCREMENT_EXPR:
6762 case PREDECREMENT_EXPR:
6763 return expand_increment (exp, 0);
6765 case POSTINCREMENT_EXPR:
6766 case POSTDECREMENT_EXPR:
6767 /* Faster to treat as pre-increment if result is not used. */
6768 return expand_increment (exp, ! ignore);
6770 case ADDR_EXPR:
6771 /* If nonzero, TEMP will be set to the address of something that might
6772 be a MEM corresponding to a stack slot. */
6773 temp = 0;
6775 /* Are we taking the address of a nested function? */
6776 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6777 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6779 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6780 op0 = force_operand (op0, target);
6782 /* If we are taking the address of something erroneous, just
6783 return a zero. */
6784 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6785 return const0_rtx;
6786 else
6788 /* We make sure to pass const0_rtx down if we came in with
6789 ignore set, to avoid doing the cleanups twice for something. */
6790 op0 = expand_expr (TREE_OPERAND (exp, 0),
6791 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6792 (modifier == EXPAND_INITIALIZER
6793 ? modifier : EXPAND_CONST_ADDRESS));
6795 /* If we are going to ignore the result, OP0 will have been set
6796 to const0_rtx, so just return it. Don't get confused and
6797 think we are taking the address of the constant. */
6798 if (ignore)
6799 return op0;
6801 op0 = protect_from_queue (op0, 0);
6803 /* We would like the object in memory. If it is a constant,
6804 we can have it be statically allocated into memory. For
6805 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6806 memory and store the value into it. */
6808 if (CONSTANT_P (op0))
6809 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6810 op0);
6811 else if (GET_CODE (op0) == MEM)
6813 mark_temp_addr_taken (op0);
6814 temp = XEXP (op0, 0);
6817 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6818 || GET_CODE (op0) == CONCAT)
6820 /* If this object is in a register, it must be not
6821 be BLKmode. */
6822 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6823 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6825 mark_temp_addr_taken (memloc);
6826 emit_move_insn (memloc, op0);
6827 op0 = memloc;
6830 if (GET_CODE (op0) != MEM)
6831 abort ();
6833 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6835 temp = XEXP (op0, 0);
6836 #ifdef POINTERS_EXTEND_UNSIGNED
6837 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6838 && mode == ptr_mode)
6839 temp = convert_memory_address (ptr_mode, temp);
6840 #endif
6841 return temp;
6844 op0 = force_operand (XEXP (op0, 0), target);
6847 if (flag_force_addr && GET_CODE (op0) != REG)
6848 op0 = force_reg (Pmode, op0);
6850 if (GET_CODE (op0) == REG
6851 && ! REG_USERVAR_P (op0))
6852 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
6854 /* If we might have had a temp slot, add an equivalent address
6855 for it. */
6856 if (temp != 0)
6857 update_temp_slot_address (temp, op0);
6859 #ifdef POINTERS_EXTEND_UNSIGNED
6860 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6861 && mode == ptr_mode)
6862 op0 = convert_memory_address (ptr_mode, op0);
6863 #endif
6865 return op0;
6867 case ENTRY_VALUE_EXPR:
6868 abort ();
6870 /* COMPLEX type for Extended Pascal & Fortran */
6871 case COMPLEX_EXPR:
6873 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6874 rtx insns;
6876 /* Get the rtx code of the operands. */
6877 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6878 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6880 if (! target)
6881 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6883 start_sequence ();
6885 /* Move the real (op0) and imaginary (op1) parts to their location. */
6886 emit_move_insn (gen_realpart (mode, target), op0);
6887 emit_move_insn (gen_imagpart (mode, target), op1);
6889 insns = get_insns ();
6890 end_sequence ();
6892 /* Complex construction should appear as a single unit. */
6893 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6894 each with a separate pseudo as destination.
6895 It's not correct for flow to treat them as a unit. */
6896 if (GET_CODE (target) != CONCAT)
6897 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6898 else
6899 emit_insns (insns);
6901 return target;
6904 case REALPART_EXPR:
6905 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6906 return gen_realpart (mode, op0);
6908 case IMAGPART_EXPR:
6909 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6910 return gen_imagpart (mode, op0);
6912 case CONJ_EXPR:
6914 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6915 rtx imag_t;
6916 rtx insns;
6918 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6920 if (! target)
6921 target = gen_reg_rtx (mode);
6923 start_sequence ();
6925 /* Store the realpart and the negated imagpart to target. */
6926 emit_move_insn (gen_realpart (partmode, target),
6927 gen_realpart (partmode, op0));
6929 imag_t = gen_imagpart (partmode, target);
6930 temp = expand_unop (partmode, neg_optab,
6931 gen_imagpart (partmode, op0), imag_t, 0);
6932 if (temp != imag_t)
6933 emit_move_insn (imag_t, temp);
6935 insns = get_insns ();
6936 end_sequence ();
6938 /* Conjugate should appear as a single unit
6939 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6940 each with a separate pseudo as destination.
6941 It's not correct for flow to treat them as a unit. */
6942 if (GET_CODE (target) != CONCAT)
6943 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6944 else
6945 emit_insns (insns);
6947 return target;
6950 case ERROR_MARK:
6951 op0 = CONST0_RTX (tmode);
6952 if (op0 != 0)
6953 return op0;
6954 return const0_rtx;
6956 default:
6957 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6960 /* Here to do an ordinary binary operator, generating an instruction
6961 from the optab already placed in `this_optab'. */
6962 binop:
6963 preexpand_calls (exp);
6964 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6965 subtarget = 0;
6966 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6967 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6968 binop2:
6969 temp = expand_binop (mode, this_optab, op0, op1, target,
6970 unsignedp, OPTAB_LIB_WIDEN);
6971 if (temp == 0)
6972 abort ();
6973 return temp;
6977 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6978 void
6979 bc_expand_expr (exp)
6980 tree exp;
6982 enum tree_code code;
6983 tree type, arg0;
6984 rtx r;
6985 struct binary_operator *binoptab;
6986 struct unary_operator *unoptab;
6987 struct increment_operator *incroptab;
6988 struct bc_label *lab, *lab1;
6989 enum bytecode_opcode opcode;
6992 code = TREE_CODE (exp);
6994 switch (code)
6996 case PARM_DECL:
6998 if (DECL_RTL (exp) == 0)
7000 error_with_decl (exp, "prior parameter's size depends on `%s'");
7001 return;
7004 bc_load_parmaddr (DECL_RTL (exp));
7005 bc_load_memory (TREE_TYPE (exp), exp);
7007 return;
7009 case VAR_DECL:
7011 if (DECL_RTL (exp) == 0)
7012 abort ();
7014 #if 0
7015 if (BYTECODE_LABEL (DECL_RTL (exp)))
7016 bc_load_externaddr (DECL_RTL (exp));
7017 else
7018 bc_load_localaddr (DECL_RTL (exp));
7019 #endif
7020 if (TREE_PUBLIC (exp))
7021 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7022 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7023 else
7024 bc_load_localaddr (DECL_RTL (exp));
7026 bc_load_memory (TREE_TYPE (exp), exp);
7027 return;
7029 case INTEGER_CST:
7031 #ifdef DEBUG_PRINT_CODE
7032 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7033 #endif
7034 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7035 ? SImode
7036 : TYPE_MODE (TREE_TYPE (exp)))],
7037 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7038 return;
7040 case REAL_CST:
7042 #if 0
7043 #ifdef DEBUG_PRINT_CODE
7044 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7045 #endif
7046 /* FIX THIS: find a better way to pass real_cst's. -bson */
7047 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7048 (double) TREE_REAL_CST (exp));
7049 #else
7050 abort ();
7051 #endif
7053 return;
7055 case CALL_EXPR:
7057 /* We build a call description vector describing the type of
7058 the return value and of the arguments; this call vector,
7059 together with a pointer to a location for the return value
7060 and the base of the argument list, is passed to the low
7061 level machine dependent call subroutine, which is responsible
7062 for putting the arguments wherever real functions expect
7063 them, as well as getting the return value back. */
7065 tree calldesc = 0, arg;
7066 int nargs = 0, i;
7067 rtx retval;
7069 /* Push the evaluated args on the evaluation stack in reverse
7070 order. Also make an entry for each arg in the calldesc
7071 vector while we're at it. */
7073 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7075 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7077 ++nargs;
7078 bc_expand_expr (TREE_VALUE (arg));
7080 calldesc = tree_cons ((tree) 0,
7081 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7082 calldesc);
7083 calldesc = tree_cons ((tree) 0,
7084 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7085 calldesc);
7088 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7090 /* Allocate a location for the return value and push its
7091 address on the evaluation stack. Also make an entry
7092 at the front of the calldesc for the return value type. */
7094 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7095 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7096 bc_load_localaddr (retval);
7098 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7099 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7101 /* Prepend the argument count. */
7102 calldesc = tree_cons ((tree) 0,
7103 build_int_2 (nargs, 0),
7104 calldesc);
7106 /* Push the address of the call description vector on the stack. */
7107 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7108 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7109 build_index_type (build_int_2 (nargs * 2, 0)));
7110 r = output_constant_def (calldesc);
7111 bc_load_externaddr (r);
7113 /* Push the address of the function to be called. */
7114 bc_expand_expr (TREE_OPERAND (exp, 0));
7116 /* Call the function, popping its address and the calldesc vector
7117 address off the evaluation stack in the process. */
7118 bc_emit_instruction (call);
7120 /* Pop the arguments off the stack. */
7121 bc_adjust_stack (nargs);
7123 /* Load the return value onto the stack. */
7124 bc_load_localaddr (retval);
7125 bc_load_memory (type, TREE_OPERAND (exp, 0));
7127 return;
7129 case SAVE_EXPR:
7131 if (!SAVE_EXPR_RTL (exp))
7133 /* First time around: copy to local variable */
7134 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7135 TYPE_ALIGN (TREE_TYPE(exp)));
7136 bc_expand_expr (TREE_OPERAND (exp, 0));
7137 bc_emit_instruction (duplicate);
7139 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7140 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7142 else
7144 /* Consecutive reference: use saved copy */
7145 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7146 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7148 return;
7150 #if 0
7151 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7152 how are they handled instead? */
7153 case LET_STMT:
7155 TREE_USED (exp) = 1;
7156 bc_expand_expr (STMT_BODY (exp));
7157 return;
7158 #endif
7160 case NOP_EXPR:
7161 case CONVERT_EXPR:
7163 bc_expand_expr (TREE_OPERAND (exp, 0));
7164 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7165 return;
7167 case MODIFY_EXPR:
7169 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7170 return;
7172 case ADDR_EXPR:
7174 bc_expand_address (TREE_OPERAND (exp, 0));
7175 return;
7177 case INDIRECT_REF:
7179 bc_expand_expr (TREE_OPERAND (exp, 0));
7180 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7181 return;
7183 case ARRAY_REF:
7185 bc_expand_expr (bc_canonicalize_array_ref (exp));
7186 return;
7188 case COMPONENT_REF:
7190 bc_expand_component_address (exp);
7192 /* If we have a bitfield, generate a proper load */
7193 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7194 return;
7196 case COMPOUND_EXPR:
7198 bc_expand_expr (TREE_OPERAND (exp, 0));
7199 bc_emit_instruction (drop);
7200 bc_expand_expr (TREE_OPERAND (exp, 1));
7201 return;
7203 case COND_EXPR:
7205 bc_expand_expr (TREE_OPERAND (exp, 0));
7206 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7207 lab = bc_get_bytecode_label ();
7208 bc_emit_bytecode (xjumpifnot);
7209 bc_emit_bytecode_labelref (lab);
7211 #ifdef DEBUG_PRINT_CODE
7212 fputc ('\n', stderr);
7213 #endif
7214 bc_expand_expr (TREE_OPERAND (exp, 1));
7215 lab1 = bc_get_bytecode_label ();
7216 bc_emit_bytecode (jump);
7217 bc_emit_bytecode_labelref (lab1);
7219 #ifdef DEBUG_PRINT_CODE
7220 fputc ('\n', stderr);
7221 #endif
7223 bc_emit_bytecode_labeldef (lab);
7224 bc_expand_expr (TREE_OPERAND (exp, 2));
7225 bc_emit_bytecode_labeldef (lab1);
7226 return;
7228 case TRUTH_ANDIF_EXPR:
7230 opcode = xjumpifnot;
7231 goto andorif;
7233 case TRUTH_ORIF_EXPR:
7235 opcode = xjumpif;
7236 goto andorif;
7238 case PLUS_EXPR:
7240 binoptab = optab_plus_expr;
7241 goto binop;
7243 case MINUS_EXPR:
7245 binoptab = optab_minus_expr;
7246 goto binop;
7248 case MULT_EXPR:
7250 binoptab = optab_mult_expr;
7251 goto binop;
7253 case TRUNC_DIV_EXPR:
7254 case FLOOR_DIV_EXPR:
7255 case CEIL_DIV_EXPR:
7256 case ROUND_DIV_EXPR:
7257 case EXACT_DIV_EXPR:
7259 binoptab = optab_trunc_div_expr;
7260 goto binop;
7262 case TRUNC_MOD_EXPR:
7263 case FLOOR_MOD_EXPR:
7264 case CEIL_MOD_EXPR:
7265 case ROUND_MOD_EXPR:
7267 binoptab = optab_trunc_mod_expr;
7268 goto binop;
7270 case FIX_ROUND_EXPR:
7271 case FIX_FLOOR_EXPR:
7272 case FIX_CEIL_EXPR:
7273 abort (); /* Not used for C. */
7275 case FIX_TRUNC_EXPR:
7276 case FLOAT_EXPR:
7277 case MAX_EXPR:
7278 case MIN_EXPR:
7279 case FFS_EXPR:
7280 case LROTATE_EXPR:
7281 case RROTATE_EXPR:
7282 abort (); /* FIXME */
7284 case RDIV_EXPR:
7286 binoptab = optab_rdiv_expr;
7287 goto binop;
7289 case BIT_AND_EXPR:
7291 binoptab = optab_bit_and_expr;
7292 goto binop;
7294 case BIT_IOR_EXPR:
7296 binoptab = optab_bit_ior_expr;
7297 goto binop;
7299 case BIT_XOR_EXPR:
7301 binoptab = optab_bit_xor_expr;
7302 goto binop;
7304 case LSHIFT_EXPR:
7306 binoptab = optab_lshift_expr;
7307 goto binop;
7309 case RSHIFT_EXPR:
7311 binoptab = optab_rshift_expr;
7312 goto binop;
7314 case TRUTH_AND_EXPR:
7316 binoptab = optab_truth_and_expr;
7317 goto binop;
7319 case TRUTH_OR_EXPR:
7321 binoptab = optab_truth_or_expr;
7322 goto binop;
7324 case LT_EXPR:
7326 binoptab = optab_lt_expr;
7327 goto binop;
7329 case LE_EXPR:
7331 binoptab = optab_le_expr;
7332 goto binop;
7334 case GE_EXPR:
7336 binoptab = optab_ge_expr;
7337 goto binop;
7339 case GT_EXPR:
7341 binoptab = optab_gt_expr;
7342 goto binop;
7344 case EQ_EXPR:
7346 binoptab = optab_eq_expr;
7347 goto binop;
7349 case NE_EXPR:
7351 binoptab = optab_ne_expr;
7352 goto binop;
7354 case NEGATE_EXPR:
7356 unoptab = optab_negate_expr;
7357 goto unop;
7359 case BIT_NOT_EXPR:
7361 unoptab = optab_bit_not_expr;
7362 goto unop;
7364 case TRUTH_NOT_EXPR:
7366 unoptab = optab_truth_not_expr;
7367 goto unop;
7369 case PREDECREMENT_EXPR:
7371 incroptab = optab_predecrement_expr;
7372 goto increment;
7374 case PREINCREMENT_EXPR:
7376 incroptab = optab_preincrement_expr;
7377 goto increment;
7379 case POSTDECREMENT_EXPR:
7381 incroptab = optab_postdecrement_expr;
7382 goto increment;
7384 case POSTINCREMENT_EXPR:
7386 incroptab = optab_postincrement_expr;
7387 goto increment;
7389 case CONSTRUCTOR:
7391 bc_expand_constructor (exp);
7392 return;
7394 case ERROR_MARK:
7395 case RTL_EXPR:
7397 return;
7399 case BIND_EXPR:
7401 tree vars = TREE_OPERAND (exp, 0);
7402 int vars_need_expansion = 0;
7404 /* Need to open a binding contour here because
7405 if there are any cleanups they most be contained here. */
7406 expand_start_bindings (0);
7408 /* Mark the corresponding BLOCK for output. */
7409 if (TREE_OPERAND (exp, 2) != 0)
7410 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7412 /* If VARS have not yet been expanded, expand them now. */
7413 while (vars)
7415 if (DECL_RTL (vars) == 0)
7417 vars_need_expansion = 1;
7418 expand_decl (vars);
7420 expand_decl_init (vars);
7421 vars = TREE_CHAIN (vars);
7424 bc_expand_expr (TREE_OPERAND (exp, 1));
7426 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7428 return;
7432 abort ();
7434 binop:
7436 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7437 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7438 return;
7441 unop:
7443 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7444 return;
7447 andorif:
7449 bc_expand_expr (TREE_OPERAND (exp, 0));
7450 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7451 lab = bc_get_bytecode_label ();
7453 bc_emit_instruction (duplicate);
7454 bc_emit_bytecode (opcode);
7455 bc_emit_bytecode_labelref (lab);
7457 #ifdef DEBUG_PRINT_CODE
7458 fputc ('\n', stderr);
7459 #endif
7461 bc_emit_instruction (drop);
7463 bc_expand_expr (TREE_OPERAND (exp, 1));
7464 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7465 bc_emit_bytecode_labeldef (lab);
7466 return;
7469 increment:
7471 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7473 /* Push the quantum. */
7474 bc_expand_expr (TREE_OPERAND (exp, 1));
7476 /* Convert it to the lvalue's type. */
7477 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7479 /* Push the address of the lvalue */
7480 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7482 /* Perform actual increment */
7483 bc_expand_increment (incroptab, type);
7484 return;
7487 /* Return the alignment in bits of EXP, a pointer valued expression.
7488 But don't return more than MAX_ALIGN no matter what.
7489 The alignment returned is, by default, the alignment of the thing that
7490 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7492 Otherwise, look at the expression to see if we can do better, i.e., if the
7493 expression is actually pointing at an object whose alignment is tighter. */
7495 static int
7496 get_pointer_alignment (exp, max_align)
7497 tree exp;
7498 unsigned max_align;
7500 unsigned align, inner;
7502 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7503 return 0;
7505 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7506 align = MIN (align, max_align);
7508 while (1)
7510 switch (TREE_CODE (exp))
7512 case NOP_EXPR:
7513 case CONVERT_EXPR:
7514 case NON_LVALUE_EXPR:
7515 exp = TREE_OPERAND (exp, 0);
7516 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7517 return align;
7518 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7519 align = MIN (inner, max_align);
7520 break;
7522 case PLUS_EXPR:
7523 /* If sum of pointer + int, restrict our maximum alignment to that
7524 imposed by the integer. If not, we can't do any better than
7525 ALIGN. */
7526 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7527 return align;
7529 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7530 & (max_align - 1))
7531 != 0)
7532 max_align >>= 1;
7534 exp = TREE_OPERAND (exp, 0);
7535 break;
7537 case ADDR_EXPR:
7538 /* See what we are pointing at and look at its alignment. */
7539 exp = TREE_OPERAND (exp, 0);
7540 if (TREE_CODE (exp) == FUNCTION_DECL)
7541 align = FUNCTION_BOUNDARY;
7542 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7543 align = DECL_ALIGN (exp);
7544 #ifdef CONSTANT_ALIGNMENT
7545 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7546 align = CONSTANT_ALIGNMENT (exp, align);
7547 #endif
7548 return MIN (align, max_align);
7550 default:
7551 return align;
7556 /* Return the tree node and offset if a given argument corresponds to
7557 a string constant. */
7559 static tree
7560 string_constant (arg, ptr_offset)
7561 tree arg;
7562 tree *ptr_offset;
7564 STRIP_NOPS (arg);
7566 if (TREE_CODE (arg) == ADDR_EXPR
7567 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7569 *ptr_offset = integer_zero_node;
7570 return TREE_OPERAND (arg, 0);
7572 else if (TREE_CODE (arg) == PLUS_EXPR)
7574 tree arg0 = TREE_OPERAND (arg, 0);
7575 tree arg1 = TREE_OPERAND (arg, 1);
7577 STRIP_NOPS (arg0);
7578 STRIP_NOPS (arg1);
7580 if (TREE_CODE (arg0) == ADDR_EXPR
7581 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7583 *ptr_offset = arg1;
7584 return TREE_OPERAND (arg0, 0);
7586 else if (TREE_CODE (arg1) == ADDR_EXPR
7587 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7589 *ptr_offset = arg0;
7590 return TREE_OPERAND (arg1, 0);
7594 return 0;
7597 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7598 way, because it could contain a zero byte in the middle.
7599 TREE_STRING_LENGTH is the size of the character array, not the string.
7601 Unfortunately, string_constant can't access the values of const char
7602 arrays with initializers, so neither can we do so here. */
7604 static tree
7605 c_strlen (src)
7606 tree src;
7608 tree offset_node;
7609 int offset, max;
7610 char *ptr;
7612 src = string_constant (src, &offset_node);
7613 if (src == 0)
7614 return 0;
7615 max = TREE_STRING_LENGTH (src);
7616 ptr = TREE_STRING_POINTER (src);
7617 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7619 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7620 compute the offset to the following null if we don't know where to
7621 start searching for it. */
7622 int i;
7623 for (i = 0; i < max; i++)
7624 if (ptr[i] == 0)
7625 return 0;
7626 /* We don't know the starting offset, but we do know that the string
7627 has no internal zero bytes. We can assume that the offset falls
7628 within the bounds of the string; otherwise, the programmer deserves
7629 what he gets. Subtract the offset from the length of the string,
7630 and return that. */
7631 /* This would perhaps not be valid if we were dealing with named
7632 arrays in addition to literal string constants. */
7633 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7636 /* We have a known offset into the string. Start searching there for
7637 a null character. */
7638 if (offset_node == 0)
7639 offset = 0;
7640 else
7642 /* Did we get a long long offset? If so, punt. */
7643 if (TREE_INT_CST_HIGH (offset_node) != 0)
7644 return 0;
7645 offset = TREE_INT_CST_LOW (offset_node);
7647 /* If the offset is known to be out of bounds, warn, and call strlen at
7648 runtime. */
7649 if (offset < 0 || offset > max)
7651 warning ("offset outside bounds of constant string");
7652 return 0;
7654 /* Use strlen to search for the first zero byte. Since any strings
7655 constructed with build_string will have nulls appended, we win even
7656 if we get handed something like (char[4])"abcd".
7658 Since OFFSET is our starting index into the string, no further
7659 calculation is needed. */
7660 return size_int (strlen (ptr + offset));
7664 expand_builtin_return_addr (fndecl_code, count, tem)
7665 enum built_in_function fndecl_code;
7666 rtx tem;
7667 int count;
7669 int i;
7671 /* Some machines need special handling before we can access
7672 arbitrary frames. For example, on the sparc, we must first flush
7673 all register windows to the stack. */
7674 #ifdef SETUP_FRAME_ADDRESSES
7675 SETUP_FRAME_ADDRESSES ();
7676 #endif
7678 /* On the sparc, the return address is not in the frame, it is in a
7679 register. There is no way to access it off of the current frame
7680 pointer, but it can be accessed off the previous frame pointer by
7681 reading the value from the register window save area. */
7682 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7683 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7684 count--;
7685 #endif
7687 /* Scan back COUNT frames to the specified frame. */
7688 for (i = 0; i < count; i++)
7690 /* Assume the dynamic chain pointer is in the word that the
7691 frame address points to, unless otherwise specified. */
7692 #ifdef DYNAMIC_CHAIN_ADDRESS
7693 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7694 #endif
7695 tem = memory_address (Pmode, tem);
7696 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7699 /* For __builtin_frame_address, return what we've got. */
7700 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7701 return tem;
7703 /* For __builtin_return_address, Get the return address from that
7704 frame. */
7705 #ifdef RETURN_ADDR_RTX
7706 tem = RETURN_ADDR_RTX (count, tem);
7707 #else
7708 tem = memory_address (Pmode,
7709 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7710 tem = gen_rtx (MEM, Pmode, tem);
7711 #endif
7712 return tem;
7715 /* Expand an expression EXP that calls a built-in function,
7716 with result going to TARGET if that's convenient
7717 (and in mode MODE if that's convenient).
7718 SUBTARGET may be used as the target for computing one of EXP's operands.
7719 IGNORE is nonzero if the value is to be ignored. */
7721 #define CALLED_AS_BUILT_IN(NODE) \
7722 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7724 static rtx
7725 expand_builtin (exp, target, subtarget, mode, ignore)
7726 tree exp;
7727 rtx target;
7728 rtx subtarget;
7729 enum machine_mode mode;
7730 int ignore;
7732 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7733 tree arglist = TREE_OPERAND (exp, 1);
7734 rtx op0;
7735 rtx lab1, insns;
7736 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7737 optab builtin_optab;
7739 switch (DECL_FUNCTION_CODE (fndecl))
7741 case BUILT_IN_ABS:
7742 case BUILT_IN_LABS:
7743 case BUILT_IN_FABS:
7744 /* build_function_call changes these into ABS_EXPR. */
7745 abort ();
7747 case BUILT_IN_SIN:
7748 case BUILT_IN_COS:
7749 /* Treat these like sqrt, but only if the user asks for them. */
7750 if (! flag_fast_math)
7751 break;
7752 case BUILT_IN_FSQRT:
7753 /* If not optimizing, call the library function. */
7754 if (! optimize)
7755 break;
7757 if (arglist == 0
7758 /* Arg could be wrong type if user redeclared this fcn wrong. */
7759 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7760 break;
7762 /* Stabilize and compute the argument. */
7763 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7764 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7766 exp = copy_node (exp);
7767 arglist = copy_node (arglist);
7768 TREE_OPERAND (exp, 1) = arglist;
7769 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7771 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7773 /* Make a suitable register to place result in. */
7774 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7776 emit_queue ();
7777 start_sequence ();
7779 switch (DECL_FUNCTION_CODE (fndecl))
7781 case BUILT_IN_SIN:
7782 builtin_optab = sin_optab; break;
7783 case BUILT_IN_COS:
7784 builtin_optab = cos_optab; break;
7785 case BUILT_IN_FSQRT:
7786 builtin_optab = sqrt_optab; break;
7787 default:
7788 abort ();
7791 /* Compute into TARGET.
7792 Set TARGET to wherever the result comes back. */
7793 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7794 builtin_optab, op0, target, 0);
7796 /* If we were unable to expand via the builtin, stop the
7797 sequence (without outputting the insns) and break, causing
7798 a call the the library function. */
7799 if (target == 0)
7801 end_sequence ();
7802 break;
7805 /* Check the results by default. But if flag_fast_math is turned on,
7806 then assume sqrt will always be called with valid arguments. */
7808 if (! flag_fast_math)
7810 /* Don't define the builtin FP instructions
7811 if your machine is not IEEE. */
7812 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7813 abort ();
7815 lab1 = gen_label_rtx ();
7817 /* Test the result; if it is NaN, set errno=EDOM because
7818 the argument was not in the domain. */
7819 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7820 emit_jump_insn (gen_beq (lab1));
7822 #ifdef TARGET_EDOM
7824 #ifdef GEN_ERRNO_RTX
7825 rtx errno_rtx = GEN_ERRNO_RTX;
7826 #else
7827 rtx errno_rtx
7828 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7829 #endif
7831 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7833 #else
7834 /* We can't set errno=EDOM directly; let the library call do it.
7835 Pop the arguments right away in case the call gets deleted. */
7836 NO_DEFER_POP;
7837 expand_call (exp, target, 0);
7838 OK_DEFER_POP;
7839 #endif
7841 emit_label (lab1);
7844 /* Output the entire sequence. */
7845 insns = get_insns ();
7846 end_sequence ();
7847 emit_insns (insns);
7849 return target;
7851 /* __builtin_apply_args returns block of memory allocated on
7852 the stack into which is stored the arg pointer, structure
7853 value address, static chain, and all the registers that might
7854 possibly be used in performing a function call. The code is
7855 moved to the start of the function so the incoming values are
7856 saved. */
7857 case BUILT_IN_APPLY_ARGS:
7858 /* Don't do __builtin_apply_args more than once in a function.
7859 Save the result of the first call and reuse it. */
7860 if (apply_args_value != 0)
7861 return apply_args_value;
7863 /* When this function is called, it means that registers must be
7864 saved on entry to this function. So we migrate the
7865 call to the first insn of this function. */
7866 rtx temp;
7867 rtx seq;
7869 start_sequence ();
7870 temp = expand_builtin_apply_args ();
7871 seq = get_insns ();
7872 end_sequence ();
7874 apply_args_value = temp;
7876 /* Put the sequence after the NOTE that starts the function.
7877 If this is inside a SEQUENCE, make the outer-level insn
7878 chain current, so the code is placed at the start of the
7879 function. */
7880 push_topmost_sequence ();
7881 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7882 pop_topmost_sequence ();
7883 return temp;
7886 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7887 FUNCTION with a copy of the parameters described by
7888 ARGUMENTS, and ARGSIZE. It returns a block of memory
7889 allocated on the stack into which is stored all the registers
7890 that might possibly be used for returning the result of a
7891 function. ARGUMENTS is the value returned by
7892 __builtin_apply_args. ARGSIZE is the number of bytes of
7893 arguments that must be copied. ??? How should this value be
7894 computed? We'll also need a safe worst case value for varargs
7895 functions. */
7896 case BUILT_IN_APPLY:
7897 if (arglist == 0
7898 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7899 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7900 || TREE_CHAIN (arglist) == 0
7901 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7902 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7903 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7904 return const0_rtx;
7905 else
7907 int i;
7908 tree t;
7909 rtx ops[3];
7911 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7912 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7914 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7917 /* __builtin_return (RESULT) causes the function to return the
7918 value described by RESULT. RESULT is address of the block of
7919 memory returned by __builtin_apply. */
7920 case BUILT_IN_RETURN:
7921 if (arglist
7922 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7923 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7924 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7925 NULL_RTX, VOIDmode, 0));
7926 return const0_rtx;
7928 case BUILT_IN_SAVEREGS:
7929 /* Don't do __builtin_saveregs more than once in a function.
7930 Save the result of the first call and reuse it. */
7931 if (saveregs_value != 0)
7932 return saveregs_value;
7934 /* When this function is called, it means that registers must be
7935 saved on entry to this function. So we migrate the
7936 call to the first insn of this function. */
7937 rtx temp;
7938 rtx seq;
7940 /* Now really call the function. `expand_call' does not call
7941 expand_builtin, so there is no danger of infinite recursion here. */
7942 start_sequence ();
7944 #ifdef EXPAND_BUILTIN_SAVEREGS
7945 /* Do whatever the machine needs done in this case. */
7946 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7947 #else
7948 /* The register where the function returns its value
7949 is likely to have something else in it, such as an argument.
7950 So preserve that register around the call. */
7952 if (value_mode != VOIDmode)
7954 rtx valreg = hard_libcall_value (value_mode);
7955 rtx saved_valreg = gen_reg_rtx (value_mode);
7957 emit_move_insn (saved_valreg, valreg);
7958 temp = expand_call (exp, target, ignore);
7959 emit_move_insn (valreg, saved_valreg);
7961 else
7962 /* Generate the call, putting the value in a pseudo. */
7963 temp = expand_call (exp, target, ignore);
7964 #endif
7966 seq = get_insns ();
7967 end_sequence ();
7969 saveregs_value = temp;
7971 /* Put the sequence after the NOTE that starts the function.
7972 If this is inside a SEQUENCE, make the outer-level insn
7973 chain current, so the code is placed at the start of the
7974 function. */
7975 push_topmost_sequence ();
7976 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7977 pop_topmost_sequence ();
7978 return temp;
7981 /* __builtin_args_info (N) returns word N of the arg space info
7982 for the current function. The number and meanings of words
7983 is controlled by the definition of CUMULATIVE_ARGS. */
7984 case BUILT_IN_ARGS_INFO:
7986 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7987 int i;
7988 int *word_ptr = (int *) &current_function_args_info;
7989 tree type, elts, result;
7991 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7992 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7993 __FILE__, __LINE__);
7995 if (arglist != 0)
7997 tree arg = TREE_VALUE (arglist);
7998 if (TREE_CODE (arg) != INTEGER_CST)
7999 error ("argument of `__builtin_args_info' must be constant");
8000 else
8002 int wordnum = TREE_INT_CST_LOW (arg);
8004 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8005 error ("argument of `__builtin_args_info' out of range");
8006 else
8007 return GEN_INT (word_ptr[wordnum]);
8010 else
8011 error ("missing argument in `__builtin_args_info'");
8013 return const0_rtx;
8015 #if 0
8016 for (i = 0; i < nwords; i++)
8017 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8019 type = build_array_type (integer_type_node,
8020 build_index_type (build_int_2 (nwords, 0)));
8021 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8022 TREE_CONSTANT (result) = 1;
8023 TREE_STATIC (result) = 1;
8024 result = build (INDIRECT_REF, build_pointer_type (type), result);
8025 TREE_CONSTANT (result) = 1;
8026 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8027 #endif
8030 /* Return the address of the first anonymous stack arg. */
8031 case BUILT_IN_NEXT_ARG:
8033 tree fntype = TREE_TYPE (current_function_decl);
8035 if ((TYPE_ARG_TYPES (fntype) == 0
8036 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8037 == void_type_node))
8038 && ! current_function_varargs)
8040 error ("`va_start' used in function with fixed args");
8041 return const0_rtx;
8044 if (arglist)
8046 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8047 tree arg = TREE_VALUE (arglist);
8049 /* Strip off all nops for the sake of the comparison. This
8050 is not quite the same as STRIP_NOPS. It does more.
8051 We must also strip off INDIRECT_EXPR for C++ reference
8052 parameters. */
8053 while (TREE_CODE (arg) == NOP_EXPR
8054 || TREE_CODE (arg) == CONVERT_EXPR
8055 || TREE_CODE (arg) == NON_LVALUE_EXPR
8056 || TREE_CODE (arg) == INDIRECT_REF)
8057 arg = TREE_OPERAND (arg, 0);
8058 if (arg != last_parm)
8059 warning ("second parameter of `va_start' not last named argument");
8061 else if (! current_function_varargs)
8062 /* Evidently an out of date version of <stdarg.h>; can't validate
8063 va_start's second argument, but can still work as intended. */
8064 warning ("`__builtin_next_arg' called without an argument");
8067 return expand_binop (Pmode, add_optab,
8068 current_function_internal_arg_pointer,
8069 current_function_arg_offset_rtx,
8070 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8072 case BUILT_IN_CLASSIFY_TYPE:
8073 if (arglist != 0)
8075 tree type = TREE_TYPE (TREE_VALUE (arglist));
8076 enum tree_code code = TREE_CODE (type);
8077 if (code == VOID_TYPE)
8078 return GEN_INT (void_type_class);
8079 if (code == INTEGER_TYPE)
8080 return GEN_INT (integer_type_class);
8081 if (code == CHAR_TYPE)
8082 return GEN_INT (char_type_class);
8083 if (code == ENUMERAL_TYPE)
8084 return GEN_INT (enumeral_type_class);
8085 if (code == BOOLEAN_TYPE)
8086 return GEN_INT (boolean_type_class);
8087 if (code == POINTER_TYPE)
8088 return GEN_INT (pointer_type_class);
8089 if (code == REFERENCE_TYPE)
8090 return GEN_INT (reference_type_class);
8091 if (code == OFFSET_TYPE)
8092 return GEN_INT (offset_type_class);
8093 if (code == REAL_TYPE)
8094 return GEN_INT (real_type_class);
8095 if (code == COMPLEX_TYPE)
8096 return GEN_INT (complex_type_class);
8097 if (code == FUNCTION_TYPE)
8098 return GEN_INT (function_type_class);
8099 if (code == METHOD_TYPE)
8100 return GEN_INT (method_type_class);
8101 if (code == RECORD_TYPE)
8102 return GEN_INT (record_type_class);
8103 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8104 return GEN_INT (union_type_class);
8105 if (code == ARRAY_TYPE)
8107 if (TYPE_STRING_FLAG (type))
8108 return GEN_INT (string_type_class);
8109 else
8110 return GEN_INT (array_type_class);
8112 if (code == SET_TYPE)
8113 return GEN_INT (set_type_class);
8114 if (code == FILE_TYPE)
8115 return GEN_INT (file_type_class);
8116 if (code == LANG_TYPE)
8117 return GEN_INT (lang_type_class);
8119 return GEN_INT (no_type_class);
8121 case BUILT_IN_CONSTANT_P:
8122 if (arglist == 0)
8123 return const0_rtx;
8124 else
8126 tree arg = TREE_VALUE (arglist);
8128 STRIP_NOPS (arg);
8129 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8130 || (TREE_CODE (arg) == ADDR_EXPR
8131 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8132 ? const1_rtx : const0_rtx);
8135 case BUILT_IN_FRAME_ADDRESS:
8136 /* The argument must be a nonnegative integer constant.
8137 It counts the number of frames to scan up the stack.
8138 The value is the address of that frame. */
8139 case BUILT_IN_RETURN_ADDRESS:
8140 /* The argument must be a nonnegative integer constant.
8141 It counts the number of frames to scan up the stack.
8142 The value is the return address saved in that frame. */
8143 if (arglist == 0)
8144 /* Warning about missing arg was already issued. */
8145 return const0_rtx;
8146 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8148 error ("invalid arg to `__builtin_return_address'");
8149 return const0_rtx;
8151 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8153 error ("invalid arg to `__builtin_return_address'");
8154 return const0_rtx;
8156 else
8158 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8159 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8160 hard_frame_pointer_rtx);
8162 /* For __builtin_frame_address, return what we've got. */
8163 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8164 return tem;
8166 if (GET_CODE (tem) != REG)
8167 tem = copy_to_reg (tem);
8168 return tem;
8171 case BUILT_IN_ALLOCA:
8172 if (arglist == 0
8173 /* Arg could be non-integer if user redeclared this fcn wrong. */
8174 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8175 break;
8177 /* Compute the argument. */
8178 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8180 /* Allocate the desired space. */
8181 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8183 case BUILT_IN_FFS:
8184 /* If not optimizing, call the library function. */
8185 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8186 break;
8188 if (arglist == 0
8189 /* Arg could be non-integer if user redeclared this fcn wrong. */
8190 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8191 break;
8193 /* Compute the argument. */
8194 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8195 /* Compute ffs, into TARGET if possible.
8196 Set TARGET to wherever the result comes back. */
8197 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8198 ffs_optab, op0, target, 1);
8199 if (target == 0)
8200 abort ();
8201 return target;
8203 case BUILT_IN_STRLEN:
8204 /* If not optimizing, call the library function. */
8205 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8206 break;
8208 if (arglist == 0
8209 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8210 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8211 break;
8212 else
8214 tree src = TREE_VALUE (arglist);
8215 tree len = c_strlen (src);
8217 int align
8218 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8220 rtx result, src_rtx, char_rtx;
8221 enum machine_mode insn_mode = value_mode, char_mode;
8222 enum insn_code icode;
8224 /* If the length is known, just return it. */
8225 if (len != 0)
8226 return expand_expr (len, target, mode, 0);
8228 /* If SRC is not a pointer type, don't do this operation inline. */
8229 if (align == 0)
8230 break;
8232 /* Call a function if we can't compute strlen in the right mode. */
8234 while (insn_mode != VOIDmode)
8236 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8237 if (icode != CODE_FOR_nothing)
8238 break;
8240 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8242 if (insn_mode == VOIDmode)
8243 break;
8245 /* Make a place to write the result of the instruction. */
8246 result = target;
8247 if (! (result != 0
8248 && GET_CODE (result) == REG
8249 && GET_MODE (result) == insn_mode
8250 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8251 result = gen_reg_rtx (insn_mode);
8253 /* Make sure the operands are acceptable to the predicates. */
8255 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8256 result = gen_reg_rtx (insn_mode);
8258 src_rtx = memory_address (BLKmode,
8259 expand_expr (src, NULL_RTX, ptr_mode,
8260 EXPAND_NORMAL));
8261 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8262 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8264 char_rtx = const0_rtx;
8265 char_mode = insn_operand_mode[(int)icode][2];
8266 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8267 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8269 emit_insn (GEN_FCN (icode) (result,
8270 gen_rtx (MEM, BLKmode, src_rtx),
8271 char_rtx, GEN_INT (align)));
8273 /* Return the value in the proper mode for this function. */
8274 if (GET_MODE (result) == value_mode)
8275 return result;
8276 else if (target != 0)
8278 convert_move (target, result, 0);
8279 return target;
8281 else
8282 return convert_to_mode (value_mode, result, 0);
8285 case BUILT_IN_STRCPY:
8286 /* If not optimizing, call the library function. */
8287 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8288 break;
8290 if (arglist == 0
8291 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8292 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8293 || TREE_CHAIN (arglist) == 0
8294 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8295 break;
8296 else
8298 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8300 if (len == 0)
8301 break;
8303 len = size_binop (PLUS_EXPR, len, integer_one_node);
8305 chainon (arglist, build_tree_list (NULL_TREE, len));
8308 /* Drops in. */
8309 case BUILT_IN_MEMCPY:
8310 /* If not optimizing, call the library function. */
8311 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8312 break;
8314 if (arglist == 0
8315 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8316 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8317 || TREE_CHAIN (arglist) == 0
8318 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8319 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8320 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8321 break;
8322 else
8324 tree dest = TREE_VALUE (arglist);
8325 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8326 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8327 tree type;
8329 int src_align
8330 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8331 int dest_align
8332 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8333 rtx dest_rtx, dest_mem, src_mem;
8335 /* If either SRC or DEST is not a pointer type, don't do
8336 this operation in-line. */
8337 if (src_align == 0 || dest_align == 0)
8339 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8340 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8341 break;
8344 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8345 dest_mem = gen_rtx (MEM, BLKmode,
8346 memory_address (BLKmode, dest_rtx));
8347 /* There could be a void* cast on top of the object. */
8348 while (TREE_CODE (dest) == NOP_EXPR)
8349 dest = TREE_OPERAND (dest, 0);
8350 type = TREE_TYPE (TREE_TYPE (dest));
8351 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8352 src_mem = gen_rtx (MEM, BLKmode,
8353 memory_address (BLKmode,
8354 expand_expr (src, NULL_RTX,
8355 ptr_mode,
8356 EXPAND_SUM)));
8357 /* There could be a void* cast on top of the object. */
8358 while (TREE_CODE (src) == NOP_EXPR)
8359 src = TREE_OPERAND (src, 0);
8360 type = TREE_TYPE (TREE_TYPE (src));
8361 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8363 /* Copy word part most expediently. */
8364 emit_block_move (dest_mem, src_mem,
8365 expand_expr (len, NULL_RTX, VOIDmode, 0),
8366 MIN (src_align, dest_align));
8367 return force_operand (dest_rtx, NULL_RTX);
8370 /* These comparison functions need an instruction that returns an actual
8371 index. An ordinary compare that just sets the condition codes
8372 is not enough. */
8373 #ifdef HAVE_cmpstrsi
8374 case BUILT_IN_STRCMP:
8375 /* If not optimizing, call the library function. */
8376 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8377 break;
8379 if (arglist == 0
8380 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8381 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8382 || TREE_CHAIN (arglist) == 0
8383 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8384 break;
8385 else if (!HAVE_cmpstrsi)
8386 break;
8388 tree arg1 = TREE_VALUE (arglist);
8389 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8390 tree offset;
8391 tree len, len2;
8393 len = c_strlen (arg1);
8394 if (len)
8395 len = size_binop (PLUS_EXPR, integer_one_node, len);
8396 len2 = c_strlen (arg2);
8397 if (len2)
8398 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8400 /* If we don't have a constant length for the first, use the length
8401 of the second, if we know it. We don't require a constant for
8402 this case; some cost analysis could be done if both are available
8403 but neither is constant. For now, assume they're equally cheap.
8405 If both strings have constant lengths, use the smaller. This
8406 could arise if optimization results in strcpy being called with
8407 two fixed strings, or if the code was machine-generated. We should
8408 add some code to the `memcmp' handler below to deal with such
8409 situations, someday. */
8410 if (!len || TREE_CODE (len) != INTEGER_CST)
8412 if (len2)
8413 len = len2;
8414 else if (len == 0)
8415 break;
8417 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8419 if (tree_int_cst_lt (len2, len))
8420 len = len2;
8423 chainon (arglist, build_tree_list (NULL_TREE, len));
8426 /* Drops in. */
8427 case BUILT_IN_MEMCMP:
8428 /* If not optimizing, call the library function. */
8429 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8430 break;
8432 if (arglist == 0
8433 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8434 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8435 || TREE_CHAIN (arglist) == 0
8436 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8437 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8438 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8439 break;
8440 else if (!HAVE_cmpstrsi)
8441 break;
8443 tree arg1 = TREE_VALUE (arglist);
8444 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8445 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8446 rtx result;
8448 int arg1_align
8449 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8450 int arg2_align
8451 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8452 enum machine_mode insn_mode
8453 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8455 /* If we don't have POINTER_TYPE, call the function. */
8456 if (arg1_align == 0 || arg2_align == 0)
8458 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8459 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8460 break;
8463 /* Make a place to write the result of the instruction. */
8464 result = target;
8465 if (! (result != 0
8466 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8467 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8468 result = gen_reg_rtx (insn_mode);
8470 emit_insn (gen_cmpstrsi (result,
8471 gen_rtx (MEM, BLKmode,
8472 expand_expr (arg1, NULL_RTX,
8473 ptr_mode,
8474 EXPAND_NORMAL)),
8475 gen_rtx (MEM, BLKmode,
8476 expand_expr (arg2, NULL_RTX,
8477 ptr_mode,
8478 EXPAND_NORMAL)),
8479 expand_expr (len, NULL_RTX, VOIDmode, 0),
8480 GEN_INT (MIN (arg1_align, arg2_align))));
8482 /* Return the value in the proper mode for this function. */
8483 mode = TYPE_MODE (TREE_TYPE (exp));
8484 if (GET_MODE (result) == mode)
8485 return result;
8486 else if (target != 0)
8488 convert_move (target, result, 0);
8489 return target;
8491 else
8492 return convert_to_mode (mode, result, 0);
8494 #else
8495 case BUILT_IN_STRCMP:
8496 case BUILT_IN_MEMCMP:
8497 break;
8498 #endif
8500 /* __builtin_setjmp is passed a pointer to an array of five words
8501 (not all will be used on all machines). It operates similarly to
8502 the C library function of the same name, but is more efficient.
8503 Much of the code below (and for longjmp) is copied from the handling
8504 of non-local gotos.
8506 NOTE: This is intended for use by GNAT and will only work in
8507 the method used by it. This code will likely NOT survive to
8508 the GCC 2.8.0 release. */
8509 case BUILT_IN_SETJMP:
8510 if (arglist == 0
8511 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8512 break;
8515 rtx buf_addr
8516 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist), subtarget,
8517 VOIDmode, 0));
8518 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8519 enum machine_mode sa_mode = Pmode;
8520 rtx stack_save;
8522 if (target == 0 || GET_CODE (target) != REG
8523 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8524 target = gen_reg_rtx (value_mode);
8526 emit_queue ();
8528 emit_note (NULL_PTR, NOTE_INSN_SETJMP);
8529 current_function_calls_setjmp = 1;
8531 /* We store the frame pointer and the address of lab1 in the buffer
8532 and use the rest of it for the stack save area, which is
8533 machine-dependent. */
8534 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8535 virtual_stack_vars_rtx);
8536 emit_move_insn
8537 (validize_mem (gen_rtx (MEM, Pmode,
8538 plus_constant (buf_addr,
8539 GET_MODE_SIZE (Pmode)))),
8540 gen_rtx (LABEL_REF, Pmode, lab1));
8542 #ifdef HAVE_save_stack_nonlocal
8543 if (HAVE_save_stack_nonlocal)
8544 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8545 #endif
8547 stack_save = gen_rtx (MEM, sa_mode,
8548 plus_constant (buf_addr,
8549 2 * GET_MODE_SIZE (Pmode)));
8550 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8552 /* Set TARGET to zero and branch around the other case. */
8553 emit_move_insn (target, const0_rtx);
8554 emit_jump_insn (gen_jump (lab2));
8555 emit_barrier ();
8556 emit_label (lab1);
8558 /* Now put in the code to restore the frame pointer, and argument
8559 pointer, if needed. The code below is from expand_end_bindings
8560 in stmt.c; see detailed documentation there. */
8561 #ifdef HAVE_nonlocal_goto
8562 if (! HAVE_nonlocal_goto)
8563 #endif
8564 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8566 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8567 if (fixed_regs[ARG_POINTER_REGNUM])
8569 #ifdef ELIMINABLE_REGS
8570 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8571 int i;
8573 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8574 if (elim_regs[i].from == ARG_POINTER_REGNUM
8575 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8576 break;
8578 if (i == sizeof elim_regs / sizeof elim_regs [0])
8579 #endif
8581 /* Now restore our arg pointer from the address at which it
8582 was saved in our stack frame.
8583 If there hasn't be space allocated for it yet, make
8584 some now. */
8585 if (arg_pointer_save_area == 0)
8586 arg_pointer_save_area
8587 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8588 emit_move_insn (virtual_incoming_args_rtx,
8589 copy_to_reg (arg_pointer_save_area));
8592 #endif
8594 /* The result to return is in the static chain pointer. */
8595 if (GET_MODE (static_chain_rtx) == GET_MODE (target))
8596 emit_move_insn (target, static_chain_rtx);
8597 else
8598 convert_move (target, static_chain_rtx, 0);
8600 emit_label (lab2);
8601 return target;
8604 /* __builtin_longjmp is passed a pointer to an array of five words
8605 and a value to return. It's similar to the C library longjmp
8606 function but works with __builtin_setjmp above. */
8607 case BUILT_IN_LONGJMP:
8608 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8609 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8610 break;
8613 rtx buf_addr
8614 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist), NULL_RTX,
8615 VOIDmode, 0));
8616 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8617 rtx lab = gen_rtx (MEM, Pmode,
8618 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8619 enum machine_mode sa_mode
8620 #ifdef HAVE_save_stack_nonlocal
8621 = (HAVE_save_stack_nonlocal
8622 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8623 : Pmode);
8624 #else
8625 = Pmode;
8626 #endif
8627 rtx stack = gen_rtx (MEM, sa_mode,
8628 plus_constant (buf_addr,
8629 2 * GET_MODE_SIZE (Pmode)));
8630 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), NULL_RTX,
8631 VOIDmode, 0);
8633 /* Pick up FP, label, and SP from the block and jump. This code is
8634 from expand_goto in stmt.c; see there for detailed comments. */
8635 #if HAVE_nonlocal_goto
8636 if (HAVE_nonlocal_goto)
8637 emit_insn (gen_nonlocal_goto (fp, lab, stack, value));
8638 else
8639 #endif
8641 emit_move_insn (hard_frame_pointer_rtx, fp);
8642 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8644 /* Put in the static chain register the return value. */
8645 emit_move_insn (static_chain_rtx, value);
8646 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8647 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8648 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
8649 emit_indirect_jump (copy_to_reg (lab));
8652 return const0_rtx;
8655 default: /* just do library call, if unknown builtin */
8656 error ("built-in function `%s' not currently supported",
8657 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8660 /* The switch statement above can drop through to cause the function
8661 to be called normally. */
8663 return expand_call (exp, target, ignore);
8666 /* Built-in functions to perform an untyped call and return. */
8668 /* For each register that may be used for calling a function, this
8669 gives a mode used to copy the register's value. VOIDmode indicates
8670 the register is not used for calling a function. If the machine
8671 has register windows, this gives only the outbound registers.
8672 INCOMING_REGNO gives the corresponding inbound register. */
8673 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8675 /* For each register that may be used for returning values, this gives
8676 a mode used to copy the register's value. VOIDmode indicates the
8677 register is not used for returning values. If the machine has
8678 register windows, this gives only the outbound registers.
8679 INCOMING_REGNO gives the corresponding inbound register. */
8680 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8682 /* For each register that may be used for calling a function, this
8683 gives the offset of that register into the block returned by
8684 __builtin_apply_args. 0 indicates that the register is not
8685 used for calling a function. */
8686 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8688 /* Return the offset of register REGNO into the block returned by
8689 __builtin_apply_args. This is not declared static, since it is
8690 needed in objc-act.c. */
8692 int
8693 apply_args_register_offset (regno)
8694 int regno;
8696 apply_args_size ();
8698 /* Arguments are always put in outgoing registers (in the argument
8699 block) if such make sense. */
8700 #ifdef OUTGOING_REGNO
8701 regno = OUTGOING_REGNO(regno);
8702 #endif
8703 return apply_args_reg_offset[regno];
8706 /* Return the size required for the block returned by __builtin_apply_args,
8707 and initialize apply_args_mode. */
8709 static int
8710 apply_args_size ()
8712 static int size = -1;
8713 int align, regno;
8714 enum machine_mode mode;
8716 /* The values computed by this function never change. */
8717 if (size < 0)
8719 /* The first value is the incoming arg-pointer. */
8720 size = GET_MODE_SIZE (Pmode);
8722 /* The second value is the structure value address unless this is
8723 passed as an "invisible" first argument. */
8724 if (struct_value_rtx)
8725 size += GET_MODE_SIZE (Pmode);
8727 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8728 if (FUNCTION_ARG_REGNO_P (regno))
8730 /* Search for the proper mode for copying this register's
8731 value. I'm not sure this is right, but it works so far. */
8732 enum machine_mode best_mode = VOIDmode;
8734 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8735 mode != VOIDmode;
8736 mode = GET_MODE_WIDER_MODE (mode))
8737 if (HARD_REGNO_MODE_OK (regno, mode)
8738 && HARD_REGNO_NREGS (regno, mode) == 1)
8739 best_mode = mode;
8741 if (best_mode == VOIDmode)
8742 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8743 mode != VOIDmode;
8744 mode = GET_MODE_WIDER_MODE (mode))
8745 if (HARD_REGNO_MODE_OK (regno, mode)
8746 && (mov_optab->handlers[(int) mode].insn_code
8747 != CODE_FOR_nothing))
8748 best_mode = mode;
8750 mode = best_mode;
8751 if (mode == VOIDmode)
8752 abort ();
8754 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8755 if (size % align != 0)
8756 size = CEIL (size, align) * align;
8757 apply_args_reg_offset[regno] = size;
8758 size += GET_MODE_SIZE (mode);
8759 apply_args_mode[regno] = mode;
8761 else
8763 apply_args_mode[regno] = VOIDmode;
8764 apply_args_reg_offset[regno] = 0;
8767 return size;
8770 /* Return the size required for the block returned by __builtin_apply,
8771 and initialize apply_result_mode. */
8773 static int
8774 apply_result_size ()
8776 static int size = -1;
8777 int align, regno;
8778 enum machine_mode mode;
8780 /* The values computed by this function never change. */
8781 if (size < 0)
8783 size = 0;
8785 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8786 if (FUNCTION_VALUE_REGNO_P (regno))
8788 /* Search for the proper mode for copying this register's
8789 value. I'm not sure this is right, but it works so far. */
8790 enum machine_mode best_mode = VOIDmode;
8792 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8793 mode != TImode;
8794 mode = GET_MODE_WIDER_MODE (mode))
8795 if (HARD_REGNO_MODE_OK (regno, mode))
8796 best_mode = mode;
8798 if (best_mode == VOIDmode)
8799 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8800 mode != VOIDmode;
8801 mode = GET_MODE_WIDER_MODE (mode))
8802 if (HARD_REGNO_MODE_OK (regno, mode)
8803 && (mov_optab->handlers[(int) mode].insn_code
8804 != CODE_FOR_nothing))
8805 best_mode = mode;
8807 mode = best_mode;
8808 if (mode == VOIDmode)
8809 abort ();
8811 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8812 if (size % align != 0)
8813 size = CEIL (size, align) * align;
8814 size += GET_MODE_SIZE (mode);
8815 apply_result_mode[regno] = mode;
8817 else
8818 apply_result_mode[regno] = VOIDmode;
8820 /* Allow targets that use untyped_call and untyped_return to override
8821 the size so that machine-specific information can be stored here. */
8822 #ifdef APPLY_RESULT_SIZE
8823 size = APPLY_RESULT_SIZE;
8824 #endif
8826 return size;
8829 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8830 /* Create a vector describing the result block RESULT. If SAVEP is true,
8831 the result block is used to save the values; otherwise it is used to
8832 restore the values. */
8834 static rtx
8835 result_vector (savep, result)
8836 int savep;
8837 rtx result;
8839 int regno, size, align, nelts;
8840 enum machine_mode mode;
8841 rtx reg, mem;
8842 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8844 size = nelts = 0;
8845 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8846 if ((mode = apply_result_mode[regno]) != VOIDmode)
8848 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8849 if (size % align != 0)
8850 size = CEIL (size, align) * align;
8851 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8852 mem = change_address (result, mode,
8853 plus_constant (XEXP (result, 0), size));
8854 savevec[nelts++] = (savep
8855 ? gen_rtx (SET, VOIDmode, mem, reg)
8856 : gen_rtx (SET, VOIDmode, reg, mem));
8857 size += GET_MODE_SIZE (mode);
8859 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8861 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8863 /* Save the state required to perform an untyped call with the same
8864 arguments as were passed to the current function. */
8866 static rtx
8867 expand_builtin_apply_args ()
8869 rtx registers;
8870 int size, align, regno;
8871 enum machine_mode mode;
8873 /* Create a block where the arg-pointer, structure value address,
8874 and argument registers can be saved. */
8875 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8877 /* Walk past the arg-pointer and structure value address. */
8878 size = GET_MODE_SIZE (Pmode);
8879 if (struct_value_rtx)
8880 size += GET_MODE_SIZE (Pmode);
8882 /* Save each register used in calling a function to the block. */
8883 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8884 if ((mode = apply_args_mode[regno]) != VOIDmode)
8886 rtx tem;
8888 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8889 if (size % align != 0)
8890 size = CEIL (size, align) * align;
8892 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8894 #ifdef STACK_REGS
8895 /* For reg-stack.c's stack register household.
8896 Compare with a similar piece of code in function.c. */
8898 emit_insn (gen_rtx (USE, mode, tem));
8899 #endif
8901 emit_move_insn (change_address (registers, mode,
8902 plus_constant (XEXP (registers, 0),
8903 size)),
8904 tem);
8905 size += GET_MODE_SIZE (mode);
8908 /* Save the arg pointer to the block. */
8909 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8910 copy_to_reg (virtual_incoming_args_rtx));
8911 size = GET_MODE_SIZE (Pmode);
8913 /* Save the structure value address unless this is passed as an
8914 "invisible" first argument. */
8915 if (struct_value_incoming_rtx)
8917 emit_move_insn (change_address (registers, Pmode,
8918 plus_constant (XEXP (registers, 0),
8919 size)),
8920 copy_to_reg (struct_value_incoming_rtx));
8921 size += GET_MODE_SIZE (Pmode);
8924 /* Return the address of the block. */
8925 return copy_addr_to_reg (XEXP (registers, 0));
8928 /* Perform an untyped call and save the state required to perform an
8929 untyped return of whatever value was returned by the given function. */
8931 static rtx
8932 expand_builtin_apply (function, arguments, argsize)
8933 rtx function, arguments, argsize;
8935 int size, align, regno;
8936 enum machine_mode mode;
8937 rtx incoming_args, result, reg, dest, call_insn;
8938 rtx old_stack_level = 0;
8939 rtx call_fusage = 0;
8941 /* Create a block where the return registers can be saved. */
8942 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8944 /* ??? The argsize value should be adjusted here. */
8946 /* Fetch the arg pointer from the ARGUMENTS block. */
8947 incoming_args = gen_reg_rtx (Pmode);
8948 emit_move_insn (incoming_args,
8949 gen_rtx (MEM, Pmode, arguments));
8950 #ifndef STACK_GROWS_DOWNWARD
8951 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8952 incoming_args, 0, OPTAB_LIB_WIDEN);
8953 #endif
8955 /* Perform postincrements before actually calling the function. */
8956 emit_queue ();
8958 /* Push a new argument block and copy the arguments. */
8959 do_pending_stack_adjust ();
8960 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8962 /* Push a block of memory onto the stack to store the memory arguments.
8963 Save the address in a register, and copy the memory arguments. ??? I
8964 haven't figured out how the calling convention macros effect this,
8965 but it's likely that the source and/or destination addresses in
8966 the block copy will need updating in machine specific ways. */
8967 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8968 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8969 gen_rtx (MEM, BLKmode, incoming_args),
8970 argsize,
8971 PARM_BOUNDARY / BITS_PER_UNIT);
8973 /* Refer to the argument block. */
8974 apply_args_size ();
8975 arguments = gen_rtx (MEM, BLKmode, arguments);
8977 /* Walk past the arg-pointer and structure value address. */
8978 size = GET_MODE_SIZE (Pmode);
8979 if (struct_value_rtx)
8980 size += GET_MODE_SIZE (Pmode);
8982 /* Restore each of the registers previously saved. Make USE insns
8983 for each of these registers for use in making the call. */
8984 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8985 if ((mode = apply_args_mode[regno]) != VOIDmode)
8987 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8988 if (size % align != 0)
8989 size = CEIL (size, align) * align;
8990 reg = gen_rtx (REG, mode, regno);
8991 emit_move_insn (reg,
8992 change_address (arguments, mode,
8993 plus_constant (XEXP (arguments, 0),
8994 size)));
8996 use_reg (&call_fusage, reg);
8997 size += GET_MODE_SIZE (mode);
9000 /* Restore the structure value address unless this is passed as an
9001 "invisible" first argument. */
9002 size = GET_MODE_SIZE (Pmode);
9003 if (struct_value_rtx)
9005 rtx value = gen_reg_rtx (Pmode);
9006 emit_move_insn (value,
9007 change_address (arguments, Pmode,
9008 plus_constant (XEXP (arguments, 0),
9009 size)));
9010 emit_move_insn (struct_value_rtx, value);
9011 if (GET_CODE (struct_value_rtx) == REG)
9012 use_reg (&call_fusage, struct_value_rtx);
9013 size += GET_MODE_SIZE (Pmode);
9016 /* All arguments and registers used for the call are set up by now! */
9017 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9019 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9020 and we don't want to load it into a register as an optimization,
9021 because prepare_call_address already did it if it should be done. */
9022 if (GET_CODE (function) != SYMBOL_REF)
9023 function = memory_address (FUNCTION_MODE, function);
9025 /* Generate the actual call instruction and save the return value. */
9026 #ifdef HAVE_untyped_call
9027 if (HAVE_untyped_call)
9028 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9029 result, result_vector (1, result)));
9030 else
9031 #endif
9032 #ifdef HAVE_call_value
9033 if (HAVE_call_value)
9035 rtx valreg = 0;
9037 /* Locate the unique return register. It is not possible to
9038 express a call that sets more than one return register using
9039 call_value; use untyped_call for that. In fact, untyped_call
9040 only needs to save the return registers in the given block. */
9041 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9042 if ((mode = apply_result_mode[regno]) != VOIDmode)
9044 if (valreg)
9045 abort (); /* HAVE_untyped_call required. */
9046 valreg = gen_rtx (REG, mode, regno);
9049 emit_call_insn (gen_call_value (valreg,
9050 gen_rtx (MEM, FUNCTION_MODE, function),
9051 const0_rtx, NULL_RTX, const0_rtx));
9053 emit_move_insn (change_address (result, GET_MODE (valreg),
9054 XEXP (result, 0)),
9055 valreg);
9057 else
9058 #endif
9059 abort ();
9061 /* Find the CALL insn we just emitted. */
9062 for (call_insn = get_last_insn ();
9063 call_insn && GET_CODE (call_insn) != CALL_INSN;
9064 call_insn = PREV_INSN (call_insn))
9067 if (! call_insn)
9068 abort ();
9070 /* Put the register usage information on the CALL. If there is already
9071 some usage information, put ours at the end. */
9072 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9074 rtx link;
9076 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9077 link = XEXP (link, 1))
9080 XEXP (link, 1) = call_fusage;
9082 else
9083 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9085 /* Restore the stack. */
9086 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9088 /* Return the address of the result block. */
9089 return copy_addr_to_reg (XEXP (result, 0));
9092 /* Perform an untyped return. */
9094 static void
9095 expand_builtin_return (result)
9096 rtx result;
9098 int size, align, regno;
9099 enum machine_mode mode;
9100 rtx reg;
9101 rtx call_fusage = 0;
9103 apply_result_size ();
9104 result = gen_rtx (MEM, BLKmode, result);
9106 #ifdef HAVE_untyped_return
9107 if (HAVE_untyped_return)
9109 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9110 emit_barrier ();
9111 return;
9113 #endif
9115 /* Restore the return value and note that each value is used. */
9116 size = 0;
9117 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9118 if ((mode = apply_result_mode[regno]) != VOIDmode)
9120 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9121 if (size % align != 0)
9122 size = CEIL (size, align) * align;
9123 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9124 emit_move_insn (reg,
9125 change_address (result, mode,
9126 plus_constant (XEXP (result, 0),
9127 size)));
9129 push_to_sequence (call_fusage);
9130 emit_insn (gen_rtx (USE, VOIDmode, reg));
9131 call_fusage = get_insns ();
9132 end_sequence ();
9133 size += GET_MODE_SIZE (mode);
9136 /* Put the USE insns before the return. */
9137 emit_insns (call_fusage);
9139 /* Return whatever values was restored by jumping directly to the end
9140 of the function. */
9141 expand_null_return ();
9144 /* Expand code for a post- or pre- increment or decrement
9145 and return the RTX for the result.
9146 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9148 static rtx
9149 expand_increment (exp, post)
9150 register tree exp;
9151 int post;
9153 register rtx op0, op1;
9154 register rtx temp, value;
9155 register tree incremented = TREE_OPERAND (exp, 0);
9156 optab this_optab = add_optab;
9157 int icode;
9158 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9159 int op0_is_copy = 0;
9160 int single_insn = 0;
9161 /* 1 means we can't store into OP0 directly,
9162 because it is a subreg narrower than a word,
9163 and we don't dare clobber the rest of the word. */
9164 int bad_subreg = 0;
9166 if (output_bytecode)
9168 bc_expand_expr (exp);
9169 return NULL_RTX;
9172 /* Stabilize any component ref that might need to be
9173 evaluated more than once below. */
9174 if (!post
9175 || TREE_CODE (incremented) == BIT_FIELD_REF
9176 || (TREE_CODE (incremented) == COMPONENT_REF
9177 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9178 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9179 incremented = stabilize_reference (incremented);
9180 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9181 ones into save exprs so that they don't accidentally get evaluated
9182 more than once by the code below. */
9183 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9184 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9185 incremented = save_expr (incremented);
9187 /* Compute the operands as RTX.
9188 Note whether OP0 is the actual lvalue or a copy of it:
9189 I believe it is a copy iff it is a register or subreg
9190 and insns were generated in computing it. */
9192 temp = get_last_insn ();
9193 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9195 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9196 in place but instead must do sign- or zero-extension during assignment,
9197 so we copy it into a new register and let the code below use it as
9198 a copy.
9200 Note that we can safely modify this SUBREG since it is know not to be
9201 shared (it was made by the expand_expr call above). */
9203 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9205 if (post)
9206 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9207 else
9208 bad_subreg = 1;
9210 else if (GET_CODE (op0) == SUBREG
9211 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9213 /* We cannot increment this SUBREG in place. If we are
9214 post-incrementing, get a copy of the old value. Otherwise,
9215 just mark that we cannot increment in place. */
9216 if (post)
9217 op0 = copy_to_reg (op0);
9218 else
9219 bad_subreg = 1;
9222 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9223 && temp != get_last_insn ());
9224 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9226 /* Decide whether incrementing or decrementing. */
9227 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9228 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9229 this_optab = sub_optab;
9231 /* Convert decrement by a constant into a negative increment. */
9232 if (this_optab == sub_optab
9233 && GET_CODE (op1) == CONST_INT)
9235 op1 = GEN_INT (- INTVAL (op1));
9236 this_optab = add_optab;
9239 /* For a preincrement, see if we can do this with a single instruction. */
9240 if (!post)
9242 icode = (int) this_optab->handlers[(int) mode].insn_code;
9243 if (icode != (int) CODE_FOR_nothing
9244 /* Make sure that OP0 is valid for operands 0 and 1
9245 of the insn we want to queue. */
9246 && (*insn_operand_predicate[icode][0]) (op0, mode)
9247 && (*insn_operand_predicate[icode][1]) (op0, mode)
9248 && (*insn_operand_predicate[icode][2]) (op1, mode))
9249 single_insn = 1;
9252 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9253 then we cannot just increment OP0. We must therefore contrive to
9254 increment the original value. Then, for postincrement, we can return
9255 OP0 since it is a copy of the old value. For preincrement, expand here
9256 unless we can do it with a single insn.
9258 Likewise if storing directly into OP0 would clobber high bits
9259 we need to preserve (bad_subreg). */
9260 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9262 /* This is the easiest way to increment the value wherever it is.
9263 Problems with multiple evaluation of INCREMENTED are prevented
9264 because either (1) it is a component_ref or preincrement,
9265 in which case it was stabilized above, or (2) it is an array_ref
9266 with constant index in an array in a register, which is
9267 safe to reevaluate. */
9268 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9269 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9270 ? MINUS_EXPR : PLUS_EXPR),
9271 TREE_TYPE (exp),
9272 incremented,
9273 TREE_OPERAND (exp, 1));
9275 while (TREE_CODE (incremented) == NOP_EXPR
9276 || TREE_CODE (incremented) == CONVERT_EXPR)
9278 newexp = convert (TREE_TYPE (incremented), newexp);
9279 incremented = TREE_OPERAND (incremented, 0);
9282 temp = expand_assignment (incremented, newexp, ! post, 0);
9283 return post ? op0 : temp;
9286 if (post)
9288 /* We have a true reference to the value in OP0.
9289 If there is an insn to add or subtract in this mode, queue it.
9290 Queueing the increment insn avoids the register shuffling
9291 that often results if we must increment now and first save
9292 the old value for subsequent use. */
9294 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9295 op0 = stabilize (op0);
9296 #endif
9298 icode = (int) this_optab->handlers[(int) mode].insn_code;
9299 if (icode != (int) CODE_FOR_nothing
9300 /* Make sure that OP0 is valid for operands 0 and 1
9301 of the insn we want to queue. */
9302 && (*insn_operand_predicate[icode][0]) (op0, mode)
9303 && (*insn_operand_predicate[icode][1]) (op0, mode))
9305 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9306 op1 = force_reg (mode, op1);
9308 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9312 /* Preincrement, or we can't increment with one simple insn. */
9313 if (post)
9314 /* Save a copy of the value before inc or dec, to return it later. */
9315 temp = value = copy_to_reg (op0);
9316 else
9317 /* Arrange to return the incremented value. */
9318 /* Copy the rtx because expand_binop will protect from the queue,
9319 and the results of that would be invalid for us to return
9320 if our caller does emit_queue before using our result. */
9321 temp = copy_rtx (value = op0);
9323 /* Increment however we can. */
9324 op1 = expand_binop (mode, this_optab, value, op1, op0,
9325 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9326 /* Make sure the value is stored into OP0. */
9327 if (op1 != op0)
9328 emit_move_insn (op0, op1);
9330 return temp;
9333 /* Expand all function calls contained within EXP, innermost ones first.
9334 But don't look within expressions that have sequence points.
9335 For each CALL_EXPR, record the rtx for its value
9336 in the CALL_EXPR_RTL field. */
9338 static void
9339 preexpand_calls (exp)
9340 tree exp;
9342 register int nops, i;
9343 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9345 if (! do_preexpand_calls)
9346 return;
9348 /* Only expressions and references can contain calls. */
9350 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9351 return;
9353 switch (TREE_CODE (exp))
9355 case CALL_EXPR:
9356 /* Do nothing if already expanded. */
9357 if (CALL_EXPR_RTL (exp) != 0)
9358 return;
9360 /* Do nothing to built-in functions. */
9361 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
9362 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
9363 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9364 /* Do nothing if the call returns a variable-sized object. */
9365 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
9366 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9367 return;
9369 case COMPOUND_EXPR:
9370 case COND_EXPR:
9371 case TRUTH_ANDIF_EXPR:
9372 case TRUTH_ORIF_EXPR:
9373 /* If we find one of these, then we can be sure
9374 the adjust will be done for it (since it makes jumps).
9375 Do it now, so that if this is inside an argument
9376 of a function, we don't get the stack adjustment
9377 after some other args have already been pushed. */
9378 do_pending_stack_adjust ();
9379 return;
9381 case BLOCK:
9382 case RTL_EXPR:
9383 case WITH_CLEANUP_EXPR:
9384 case CLEANUP_POINT_EXPR:
9385 return;
9387 case SAVE_EXPR:
9388 if (SAVE_EXPR_RTL (exp) != 0)
9389 return;
9392 nops = tree_code_length[(int) TREE_CODE (exp)];
9393 for (i = 0; i < nops; i++)
9394 if (TREE_OPERAND (exp, i) != 0)
9396 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9397 if (type == 'e' || type == '<' || type == '1' || type == '2'
9398 || type == 'r')
9399 preexpand_calls (TREE_OPERAND (exp, i));
9403 /* At the start of a function, record that we have no previously-pushed
9404 arguments waiting to be popped. */
9406 void
9407 init_pending_stack_adjust ()
9409 pending_stack_adjust = 0;
9412 /* When exiting from function, if safe, clear out any pending stack adjust
9413 so the adjustment won't get done. */
9415 void
9416 clear_pending_stack_adjust ()
9418 #ifdef EXIT_IGNORE_STACK
9419 if (optimize > 0
9420 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9421 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9422 && ! flag_inline_functions)
9423 pending_stack_adjust = 0;
9424 #endif
9427 /* Pop any previously-pushed arguments that have not been popped yet. */
9429 void
9430 do_pending_stack_adjust ()
9432 if (inhibit_defer_pop == 0)
9434 if (pending_stack_adjust != 0)
9435 adjust_stack (GEN_INT (pending_stack_adjust));
9436 pending_stack_adjust = 0;
9440 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9441 Returns the cleanups to be performed. */
9443 static tree
9444 defer_cleanups_to (old_cleanups)
9445 tree old_cleanups;
9447 tree new_cleanups = NULL_TREE;
9448 tree cleanups = cleanups_this_call;
9449 tree last = NULL_TREE;
9451 while (cleanups_this_call != old_cleanups)
9453 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
9454 last = cleanups_this_call;
9455 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9458 if (last)
9460 /* Remove the list from the chain of cleanups. */
9461 TREE_CHAIN (last) = NULL_TREE;
9463 /* reverse them so that we can build them in the right order. */
9464 cleanups = nreverse (cleanups);
9466 /* All cleanups must be on the function_obstack. */
9467 push_obstacks_nochange ();
9468 resume_temporary_allocation ();
9470 while (cleanups)
9472 if (new_cleanups)
9473 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9474 TREE_VALUE (cleanups), new_cleanups);
9475 else
9476 new_cleanups = TREE_VALUE (cleanups);
9478 cleanups = TREE_CHAIN (cleanups);
9481 pop_obstacks ();
9484 return new_cleanups;
9487 /* Expand all cleanups up to OLD_CLEANUPS.
9488 Needed here, and also for language-dependent calls. */
9490 void
9491 expand_cleanups_to (old_cleanups)
9492 tree old_cleanups;
9494 while (cleanups_this_call != old_cleanups)
9496 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
9497 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9498 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9502 /* Expand conditional expressions. */
9504 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9505 LABEL is an rtx of code CODE_LABEL, in this function and all the
9506 functions here. */
9508 void
9509 jumpifnot (exp, label)
9510 tree exp;
9511 rtx label;
9513 do_jump (exp, label, NULL_RTX);
9516 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9518 void
9519 jumpif (exp, label)
9520 tree exp;
9521 rtx label;
9523 do_jump (exp, NULL_RTX, label);
9526 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9527 the result is zero, or IF_TRUE_LABEL if the result is one.
9528 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9529 meaning fall through in that case.
9531 do_jump always does any pending stack adjust except when it does not
9532 actually perform a jump. An example where there is no jump
9533 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9535 This function is responsible for optimizing cases such as
9536 &&, || and comparison operators in EXP. */
9538 void
9539 do_jump (exp, if_false_label, if_true_label)
9540 tree exp;
9541 rtx if_false_label, if_true_label;
9543 register enum tree_code code = TREE_CODE (exp);
9544 /* Some cases need to create a label to jump to
9545 in order to properly fall through.
9546 These cases set DROP_THROUGH_LABEL nonzero. */
9547 rtx drop_through_label = 0;
9548 rtx temp;
9549 rtx comparison = 0;
9550 int i;
9551 tree type;
9552 enum machine_mode mode;
9554 emit_queue ();
9556 switch (code)
9558 case ERROR_MARK:
9559 break;
9561 case INTEGER_CST:
9562 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9563 if (temp)
9564 emit_jump (temp);
9565 break;
9567 #if 0
9568 /* This is not true with #pragma weak */
9569 case ADDR_EXPR:
9570 /* The address of something can never be zero. */
9571 if (if_true_label)
9572 emit_jump (if_true_label);
9573 break;
9574 #endif
9576 case NOP_EXPR:
9577 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9578 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9579 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9580 goto normal;
9581 case CONVERT_EXPR:
9582 /* If we are narrowing the operand, we have to do the compare in the
9583 narrower mode. */
9584 if ((TYPE_PRECISION (TREE_TYPE (exp))
9585 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9586 goto normal;
9587 case NON_LVALUE_EXPR:
9588 case REFERENCE_EXPR:
9589 case ABS_EXPR:
9590 case NEGATE_EXPR:
9591 case LROTATE_EXPR:
9592 case RROTATE_EXPR:
9593 /* These cannot change zero->non-zero or vice versa. */
9594 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9595 break;
9597 #if 0
9598 /* This is never less insns than evaluating the PLUS_EXPR followed by
9599 a test and can be longer if the test is eliminated. */
9600 case PLUS_EXPR:
9601 /* Reduce to minus. */
9602 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9603 TREE_OPERAND (exp, 0),
9604 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9605 TREE_OPERAND (exp, 1))));
9606 /* Process as MINUS. */
9607 #endif
9609 case MINUS_EXPR:
9610 /* Non-zero iff operands of minus differ. */
9611 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9612 TREE_OPERAND (exp, 0),
9613 TREE_OPERAND (exp, 1)),
9614 NE, NE);
9615 break;
9617 case BIT_AND_EXPR:
9618 /* If we are AND'ing with a small constant, do this comparison in the
9619 smallest type that fits. If the machine doesn't have comparisons
9620 that small, it will be converted back to the wider comparison.
9621 This helps if we are testing the sign bit of a narrower object.
9622 combine can't do this for us because it can't know whether a
9623 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9625 if (! SLOW_BYTE_ACCESS
9626 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9627 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9628 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9629 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9630 && (type = type_for_mode (mode, 1)) != 0
9631 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9632 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9633 != CODE_FOR_nothing))
9635 do_jump (convert (type, exp), if_false_label, if_true_label);
9636 break;
9638 goto normal;
9640 case TRUTH_NOT_EXPR:
9641 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9642 break;
9644 case TRUTH_ANDIF_EXPR:
9646 rtx seq1, seq2;
9647 tree cleanups, old_cleanups;
9649 if (if_false_label == 0)
9650 if_false_label = drop_through_label = gen_label_rtx ();
9651 start_sequence ();
9652 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9653 seq1 = get_insns ();
9654 end_sequence ();
9656 old_cleanups = cleanups_this_call;
9657 start_sequence ();
9658 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9659 seq2 = get_insns ();
9660 end_sequence ();
9662 cleanups = defer_cleanups_to (old_cleanups);
9663 if (cleanups)
9665 rtx flag = gen_reg_rtx (word_mode);
9666 tree new_cleanups;
9667 tree cond;
9669 /* Flag cleanups as not needed. */
9670 emit_move_insn (flag, const0_rtx);
9671 emit_insns (seq1);
9673 /* Flag cleanups as needed. */
9674 emit_move_insn (flag, const1_rtx);
9675 emit_insns (seq2);
9677 /* All cleanups must be on the function_obstack. */
9678 push_obstacks_nochange ();
9679 resume_temporary_allocation ();
9681 /* convert flag, which is an rtx, into a tree. */
9682 cond = make_node (RTL_EXPR);
9683 TREE_TYPE (cond) = integer_type_node;
9684 RTL_EXPR_RTL (cond) = flag;
9685 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9686 cond = save_expr (cond);
9688 new_cleanups = build (COND_EXPR, void_type_node,
9689 truthvalue_conversion (cond),
9690 cleanups, integer_zero_node);
9691 new_cleanups = fold (new_cleanups);
9693 pop_obstacks ();
9695 /* Now add in the conditionalized cleanups. */
9696 cleanups_this_call
9697 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9698 (*interim_eh_hook) (NULL_TREE);
9700 else
9702 emit_insns (seq1);
9703 emit_insns (seq2);
9706 break;
9708 case TRUTH_ORIF_EXPR:
9710 rtx seq1, seq2;
9711 tree cleanups, old_cleanups;
9713 if (if_true_label == 0)
9714 if_true_label = drop_through_label = gen_label_rtx ();
9715 start_sequence ();
9716 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9717 seq1 = get_insns ();
9718 end_sequence ();
9720 old_cleanups = cleanups_this_call;
9721 start_sequence ();
9722 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9723 seq2 = get_insns ();
9724 end_sequence ();
9726 cleanups = defer_cleanups_to (old_cleanups);
9727 if (cleanups)
9729 rtx flag = gen_reg_rtx (word_mode);
9730 tree new_cleanups;
9731 tree cond;
9733 /* Flag cleanups as not needed. */
9734 emit_move_insn (flag, const0_rtx);
9735 emit_insns (seq1);
9737 /* Flag cleanups as needed. */
9738 emit_move_insn (flag, const1_rtx);
9739 emit_insns (seq2);
9741 /* All cleanups must be on the function_obstack. */
9742 push_obstacks_nochange ();
9743 resume_temporary_allocation ();
9745 /* convert flag, which is an rtx, into a tree. */
9746 cond = make_node (RTL_EXPR);
9747 TREE_TYPE (cond) = integer_type_node;
9748 RTL_EXPR_RTL (cond) = flag;
9749 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9750 cond = save_expr (cond);
9752 new_cleanups = build (COND_EXPR, void_type_node,
9753 truthvalue_conversion (cond),
9754 cleanups, integer_zero_node);
9755 new_cleanups = fold (new_cleanups);
9757 pop_obstacks ();
9759 /* Now add in the conditionalized cleanups. */
9760 cleanups_this_call
9761 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9762 (*interim_eh_hook) (NULL_TREE);
9764 else
9766 emit_insns (seq1);
9767 emit_insns (seq2);
9770 break;
9772 case COMPOUND_EXPR:
9773 push_temp_slots ();
9774 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9775 free_temp_slots ();
9776 pop_temp_slots ();
9777 emit_queue ();
9778 do_pending_stack_adjust ();
9779 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9780 break;
9782 case COMPONENT_REF:
9783 case BIT_FIELD_REF:
9784 case ARRAY_REF:
9786 int bitsize, bitpos, unsignedp;
9787 enum machine_mode mode;
9788 tree type;
9789 tree offset;
9790 int volatilep = 0;
9792 /* Get description of this reference. We don't actually care
9793 about the underlying object here. */
9794 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9795 &mode, &unsignedp, &volatilep);
9797 type = type_for_size (bitsize, unsignedp);
9798 if (! SLOW_BYTE_ACCESS
9799 && type != 0 && bitsize >= 0
9800 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9801 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9802 != CODE_FOR_nothing))
9804 do_jump (convert (type, exp), if_false_label, if_true_label);
9805 break;
9807 goto normal;
9810 case COND_EXPR:
9811 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9812 if (integer_onep (TREE_OPERAND (exp, 1))
9813 && integer_zerop (TREE_OPERAND (exp, 2)))
9814 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9816 else if (integer_zerop (TREE_OPERAND (exp, 1))
9817 && integer_onep (TREE_OPERAND (exp, 2)))
9818 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9820 else
9822 register rtx label1 = gen_label_rtx ();
9823 drop_through_label = gen_label_rtx ();
9824 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9825 /* Now the THEN-expression. */
9826 do_jump (TREE_OPERAND (exp, 1),
9827 if_false_label ? if_false_label : drop_through_label,
9828 if_true_label ? if_true_label : drop_through_label);
9829 /* In case the do_jump just above never jumps. */
9830 do_pending_stack_adjust ();
9831 emit_label (label1);
9832 /* Now the ELSE-expression. */
9833 do_jump (TREE_OPERAND (exp, 2),
9834 if_false_label ? if_false_label : drop_through_label,
9835 if_true_label ? if_true_label : drop_through_label);
9837 break;
9839 case EQ_EXPR:
9841 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9843 if (integer_zerop (TREE_OPERAND (exp, 1)))
9844 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9845 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9846 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9847 do_jump
9848 (fold
9849 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9850 fold (build (EQ_EXPR, TREE_TYPE (exp),
9851 fold (build1 (REALPART_EXPR,
9852 TREE_TYPE (inner_type),
9853 TREE_OPERAND (exp, 0))),
9854 fold (build1 (REALPART_EXPR,
9855 TREE_TYPE (inner_type),
9856 TREE_OPERAND (exp, 1))))),
9857 fold (build (EQ_EXPR, TREE_TYPE (exp),
9858 fold (build1 (IMAGPART_EXPR,
9859 TREE_TYPE (inner_type),
9860 TREE_OPERAND (exp, 0))),
9861 fold (build1 (IMAGPART_EXPR,
9862 TREE_TYPE (inner_type),
9863 TREE_OPERAND (exp, 1))))))),
9864 if_false_label, if_true_label);
9865 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9866 && !can_compare_p (TYPE_MODE (inner_type)))
9867 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9868 else
9869 comparison = compare (exp, EQ, EQ);
9870 break;
9873 case NE_EXPR:
9875 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9877 if (integer_zerop (TREE_OPERAND (exp, 1)))
9878 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9879 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9880 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9881 do_jump
9882 (fold
9883 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9884 fold (build (NE_EXPR, TREE_TYPE (exp),
9885 fold (build1 (REALPART_EXPR,
9886 TREE_TYPE (inner_type),
9887 TREE_OPERAND (exp, 0))),
9888 fold (build1 (REALPART_EXPR,
9889 TREE_TYPE (inner_type),
9890 TREE_OPERAND (exp, 1))))),
9891 fold (build (NE_EXPR, TREE_TYPE (exp),
9892 fold (build1 (IMAGPART_EXPR,
9893 TREE_TYPE (inner_type),
9894 TREE_OPERAND (exp, 0))),
9895 fold (build1 (IMAGPART_EXPR,
9896 TREE_TYPE (inner_type),
9897 TREE_OPERAND (exp, 1))))))),
9898 if_false_label, if_true_label);
9899 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9900 && !can_compare_p (TYPE_MODE (inner_type)))
9901 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9902 else
9903 comparison = compare (exp, NE, NE);
9904 break;
9907 case LT_EXPR:
9908 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9909 == MODE_INT)
9910 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9911 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9912 else
9913 comparison = compare (exp, LT, LTU);
9914 break;
9916 case LE_EXPR:
9917 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9918 == MODE_INT)
9919 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9920 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9921 else
9922 comparison = compare (exp, LE, LEU);
9923 break;
9925 case GT_EXPR:
9926 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9927 == MODE_INT)
9928 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9929 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9930 else
9931 comparison = compare (exp, GT, GTU);
9932 break;
9934 case GE_EXPR:
9935 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9936 == MODE_INT)
9937 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9938 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9939 else
9940 comparison = compare (exp, GE, GEU);
9941 break;
9943 default:
9944 normal:
9945 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9946 #if 0
9947 /* This is not needed any more and causes poor code since it causes
9948 comparisons and tests from non-SI objects to have different code
9949 sequences. */
9950 /* Copy to register to avoid generating bad insns by cse
9951 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9952 if (!cse_not_expected && GET_CODE (temp) == MEM)
9953 temp = copy_to_reg (temp);
9954 #endif
9955 do_pending_stack_adjust ();
9956 if (GET_CODE (temp) == CONST_INT)
9957 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9958 else if (GET_CODE (temp) == LABEL_REF)
9959 comparison = const_true_rtx;
9960 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9961 && !can_compare_p (GET_MODE (temp)))
9962 /* Note swapping the labels gives us not-equal. */
9963 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9964 else if (GET_MODE (temp) != VOIDmode)
9965 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9966 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9967 GET_MODE (temp), NULL_RTX, 0);
9968 else
9969 abort ();
9972 /* Do any postincrements in the expression that was tested. */
9973 emit_queue ();
9975 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9976 straight into a conditional jump instruction as the jump condition.
9977 Otherwise, all the work has been done already. */
9979 if (comparison == const_true_rtx)
9981 if (if_true_label)
9982 emit_jump (if_true_label);
9984 else if (comparison == const0_rtx)
9986 if (if_false_label)
9987 emit_jump (if_false_label);
9989 else if (comparison)
9990 do_jump_for_compare (comparison, if_false_label, if_true_label);
9992 if (drop_through_label)
9994 /* If do_jump produces code that might be jumped around,
9995 do any stack adjusts from that code, before the place
9996 where control merges in. */
9997 do_pending_stack_adjust ();
9998 emit_label (drop_through_label);
10002 /* Given a comparison expression EXP for values too wide to be compared
10003 with one insn, test the comparison and jump to the appropriate label.
10004 The code of EXP is ignored; we always test GT if SWAP is 0,
10005 and LT if SWAP is 1. */
10007 static void
10008 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10009 tree exp;
10010 int swap;
10011 rtx if_false_label, if_true_label;
10013 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10014 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10015 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10016 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10017 rtx drop_through_label = 0;
10018 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10019 int i;
10021 if (! if_true_label || ! if_false_label)
10022 drop_through_label = gen_label_rtx ();
10023 if (! if_true_label)
10024 if_true_label = drop_through_label;
10025 if (! if_false_label)
10026 if_false_label = drop_through_label;
10028 /* Compare a word at a time, high order first. */
10029 for (i = 0; i < nwords; i++)
10031 rtx comp;
10032 rtx op0_word, op1_word;
10034 if (WORDS_BIG_ENDIAN)
10036 op0_word = operand_subword_force (op0, i, mode);
10037 op1_word = operand_subword_force (op1, i, mode);
10039 else
10041 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10042 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10045 /* All but high-order word must be compared as unsigned. */
10046 comp = compare_from_rtx (op0_word, op1_word,
10047 (unsignedp || i > 0) ? GTU : GT,
10048 unsignedp, word_mode, NULL_RTX, 0);
10049 if (comp == const_true_rtx)
10050 emit_jump (if_true_label);
10051 else if (comp != const0_rtx)
10052 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10054 /* Consider lower words only if these are equal. */
10055 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10056 NULL_RTX, 0);
10057 if (comp == const_true_rtx)
10058 emit_jump (if_false_label);
10059 else if (comp != const0_rtx)
10060 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10063 if (if_false_label)
10064 emit_jump (if_false_label);
10065 if (drop_through_label)
10066 emit_label (drop_through_label);
10069 /* Compare OP0 with OP1, word at a time, in mode MODE.
10070 UNSIGNEDP says to do unsigned comparison.
10071 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10073 void
10074 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10075 enum machine_mode mode;
10076 int unsignedp;
10077 rtx op0, op1;
10078 rtx if_false_label, if_true_label;
10080 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10081 rtx drop_through_label = 0;
10082 int i;
10084 if (! if_true_label || ! if_false_label)
10085 drop_through_label = gen_label_rtx ();
10086 if (! if_true_label)
10087 if_true_label = drop_through_label;
10088 if (! if_false_label)
10089 if_false_label = drop_through_label;
10091 /* Compare a word at a time, high order first. */
10092 for (i = 0; i < nwords; i++)
10094 rtx comp;
10095 rtx op0_word, op1_word;
10097 if (WORDS_BIG_ENDIAN)
10099 op0_word = operand_subword_force (op0, i, mode);
10100 op1_word = operand_subword_force (op1, i, mode);
10102 else
10104 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10105 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10108 /* All but high-order word must be compared as unsigned. */
10109 comp = compare_from_rtx (op0_word, op1_word,
10110 (unsignedp || i > 0) ? GTU : GT,
10111 unsignedp, word_mode, NULL_RTX, 0);
10112 if (comp == const_true_rtx)
10113 emit_jump (if_true_label);
10114 else if (comp != const0_rtx)
10115 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10117 /* Consider lower words only if these are equal. */
10118 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10119 NULL_RTX, 0);
10120 if (comp == const_true_rtx)
10121 emit_jump (if_false_label);
10122 else if (comp != const0_rtx)
10123 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10126 if (if_false_label)
10127 emit_jump (if_false_label);
10128 if (drop_through_label)
10129 emit_label (drop_through_label);
10132 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10133 with one insn, test the comparison and jump to the appropriate label. */
10135 static void
10136 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10137 tree exp;
10138 rtx if_false_label, if_true_label;
10140 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10141 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10142 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10143 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10144 int i;
10145 rtx drop_through_label = 0;
10147 if (! if_false_label)
10148 drop_through_label = if_false_label = gen_label_rtx ();
10150 for (i = 0; i < nwords; i++)
10152 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10153 operand_subword_force (op1, i, mode),
10154 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10155 word_mode, NULL_RTX, 0);
10156 if (comp == const_true_rtx)
10157 emit_jump (if_false_label);
10158 else if (comp != const0_rtx)
10159 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10162 if (if_true_label)
10163 emit_jump (if_true_label);
10164 if (drop_through_label)
10165 emit_label (drop_through_label);
10168 /* Jump according to whether OP0 is 0.
10169 We assume that OP0 has an integer mode that is too wide
10170 for the available compare insns. */
10172 static void
10173 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10174 rtx op0;
10175 rtx if_false_label, if_true_label;
10177 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10178 int i;
10179 rtx drop_through_label = 0;
10181 if (! if_false_label)
10182 drop_through_label = if_false_label = gen_label_rtx ();
10184 for (i = 0; i < nwords; i++)
10186 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10187 GET_MODE (op0)),
10188 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10189 if (comp == const_true_rtx)
10190 emit_jump (if_false_label);
10191 else if (comp != const0_rtx)
10192 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10195 if (if_true_label)
10196 emit_jump (if_true_label);
10197 if (drop_through_label)
10198 emit_label (drop_through_label);
10201 /* Given a comparison expression in rtl form, output conditional branches to
10202 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10204 static void
10205 do_jump_for_compare (comparison, if_false_label, if_true_label)
10206 rtx comparison, if_false_label, if_true_label;
10208 if (if_true_label)
10210 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10211 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10212 else
10213 abort ();
10215 if (if_false_label)
10216 emit_jump (if_false_label);
10218 else if (if_false_label)
10220 rtx insn;
10221 rtx prev = get_last_insn ();
10222 rtx branch = 0;
10224 /* Output the branch with the opposite condition. Then try to invert
10225 what is generated. If more than one insn is a branch, or if the
10226 branch is not the last insn written, abort. If we can't invert
10227 the branch, emit make a true label, redirect this jump to that,
10228 emit a jump to the false label and define the true label. */
10230 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10231 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10232 else
10233 abort ();
10235 /* Here we get the first insn that was just emitted. It used to be the
10236 case that, on some machines, emitting the branch would discard
10237 the previous compare insn and emit a replacement. This isn't
10238 done anymore, but abort if we see that PREV is deleted. */
10240 if (prev == 0)
10241 insn = get_insns ();
10242 else if (INSN_DELETED_P (prev))
10243 abort ();
10244 else
10245 insn = NEXT_INSN (prev);
10247 for (; insn; insn = NEXT_INSN (insn))
10248 if (GET_CODE (insn) == JUMP_INSN)
10250 if (branch)
10251 abort ();
10252 branch = insn;
10255 if (branch != get_last_insn ())
10256 abort ();
10258 JUMP_LABEL (branch) = if_false_label;
10259 if (! invert_jump (branch, if_false_label))
10261 if_true_label = gen_label_rtx ();
10262 redirect_jump (branch, if_true_label);
10263 emit_jump (if_false_label);
10264 emit_label (if_true_label);
10269 /* Generate code for a comparison expression EXP
10270 (including code to compute the values to be compared)
10271 and set (CC0) according to the result.
10272 SIGNED_CODE should be the rtx operation for this comparison for
10273 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10275 We force a stack adjustment unless there are currently
10276 things pushed on the stack that aren't yet used. */
10278 static rtx
10279 compare (exp, signed_code, unsigned_code)
10280 register tree exp;
10281 enum rtx_code signed_code, unsigned_code;
10283 register rtx op0
10284 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10285 register rtx op1
10286 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10287 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10288 register enum machine_mode mode = TYPE_MODE (type);
10289 int unsignedp = TREE_UNSIGNED (type);
10290 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10292 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10293 ((mode == BLKmode)
10294 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10295 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10298 /* Like compare but expects the values to compare as two rtx's.
10299 The decision as to signed or unsigned comparison must be made by the caller.
10301 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10302 compared.
10304 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10305 size of MODE should be used. */
10308 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10309 register rtx op0, op1;
10310 enum rtx_code code;
10311 int unsignedp;
10312 enum machine_mode mode;
10313 rtx size;
10314 int align;
10316 rtx tem;
10318 /* If one operand is constant, make it the second one. Only do this
10319 if the other operand is not constant as well. */
10321 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10322 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10324 tem = op0;
10325 op0 = op1;
10326 op1 = tem;
10327 code = swap_condition (code);
10330 if (flag_force_mem)
10332 op0 = force_not_mem (op0);
10333 op1 = force_not_mem (op1);
10336 do_pending_stack_adjust ();
10338 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10339 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10340 return tem;
10342 #if 0
10343 /* There's no need to do this now that combine.c can eliminate lots of
10344 sign extensions. This can be less efficient in certain cases on other
10345 machines. */
10347 /* If this is a signed equality comparison, we can do it as an
10348 unsigned comparison since zero-extension is cheaper than sign
10349 extension and comparisons with zero are done as unsigned. This is
10350 the case even on machines that can do fast sign extension, since
10351 zero-extension is easier to combine with other operations than
10352 sign-extension is. If we are comparing against a constant, we must
10353 convert it to what it would look like unsigned. */
10354 if ((code == EQ || code == NE) && ! unsignedp
10355 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10357 if (GET_CODE (op1) == CONST_INT
10358 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10359 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10360 unsignedp = 1;
10362 #endif
10364 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10366 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10369 /* Generate code to calculate EXP using a store-flag instruction
10370 and return an rtx for the result. EXP is either a comparison
10371 or a TRUTH_NOT_EXPR whose operand is a comparison.
10373 If TARGET is nonzero, store the result there if convenient.
10375 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10376 cheap.
10378 Return zero if there is no suitable set-flag instruction
10379 available on this machine.
10381 Once expand_expr has been called on the arguments of the comparison,
10382 we are committed to doing the store flag, since it is not safe to
10383 re-evaluate the expression. We emit the store-flag insn by calling
10384 emit_store_flag, but only expand the arguments if we have a reason
10385 to believe that emit_store_flag will be successful. If we think that
10386 it will, but it isn't, we have to simulate the store-flag with a
10387 set/jump/set sequence. */
10389 static rtx
10390 do_store_flag (exp, target, mode, only_cheap)
10391 tree exp;
10392 rtx target;
10393 enum machine_mode mode;
10394 int only_cheap;
10396 enum rtx_code code;
10397 tree arg0, arg1, type;
10398 tree tem;
10399 enum machine_mode operand_mode;
10400 int invert = 0;
10401 int unsignedp;
10402 rtx op0, op1;
10403 enum insn_code icode;
10404 rtx subtarget = target;
10405 rtx result, label, pattern, jump_pat;
10407 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10408 result at the end. We can't simply invert the test since it would
10409 have already been inverted if it were valid. This case occurs for
10410 some floating-point comparisons. */
10412 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10413 invert = 1, exp = TREE_OPERAND (exp, 0);
10415 arg0 = TREE_OPERAND (exp, 0);
10416 arg1 = TREE_OPERAND (exp, 1);
10417 type = TREE_TYPE (arg0);
10418 operand_mode = TYPE_MODE (type);
10419 unsignedp = TREE_UNSIGNED (type);
10421 /* We won't bother with BLKmode store-flag operations because it would mean
10422 passing a lot of information to emit_store_flag. */
10423 if (operand_mode == BLKmode)
10424 return 0;
10426 STRIP_NOPS (arg0);
10427 STRIP_NOPS (arg1);
10429 /* Get the rtx comparison code to use. We know that EXP is a comparison
10430 operation of some type. Some comparisons against 1 and -1 can be
10431 converted to comparisons with zero. Do so here so that the tests
10432 below will be aware that we have a comparison with zero. These
10433 tests will not catch constants in the first operand, but constants
10434 are rarely passed as the first operand. */
10436 switch (TREE_CODE (exp))
10438 case EQ_EXPR:
10439 code = EQ;
10440 break;
10441 case NE_EXPR:
10442 code = NE;
10443 break;
10444 case LT_EXPR:
10445 if (integer_onep (arg1))
10446 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10447 else
10448 code = unsignedp ? LTU : LT;
10449 break;
10450 case LE_EXPR:
10451 if (! unsignedp && integer_all_onesp (arg1))
10452 arg1 = integer_zero_node, code = LT;
10453 else
10454 code = unsignedp ? LEU : LE;
10455 break;
10456 case GT_EXPR:
10457 if (! unsignedp && integer_all_onesp (arg1))
10458 arg1 = integer_zero_node, code = GE;
10459 else
10460 code = unsignedp ? GTU : GT;
10461 break;
10462 case GE_EXPR:
10463 if (integer_onep (arg1))
10464 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10465 else
10466 code = unsignedp ? GEU : GE;
10467 break;
10468 default:
10469 abort ();
10472 /* Put a constant second. */
10473 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10475 tem = arg0; arg0 = arg1; arg1 = tem;
10476 code = swap_condition (code);
10479 /* If this is an equality or inequality test of a single bit, we can
10480 do this by shifting the bit being tested to the low-order bit and
10481 masking the result with the constant 1. If the condition was EQ,
10482 we xor it with 1. This does not require an scc insn and is faster
10483 than an scc insn even if we have it. */
10485 if ((code == NE || code == EQ)
10486 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10487 && integer_pow2p (TREE_OPERAND (arg0, 1))
10488 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10490 tree inner = TREE_OPERAND (arg0, 0);
10491 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10492 NULL_RTX, VOIDmode, 0)));
10493 int ops_unsignedp;
10495 /* If INNER is a right shift of a constant and it plus BITNUM does
10496 not overflow, adjust BITNUM and INNER. */
10498 if (TREE_CODE (inner) == RSHIFT_EXPR
10499 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10500 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10501 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10502 < TYPE_PRECISION (type)))
10504 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10505 inner = TREE_OPERAND (inner, 0);
10508 /* If we are going to be able to omit the AND below, we must do our
10509 operations as unsigned. If we must use the AND, we have a choice.
10510 Normally unsigned is faster, but for some machines signed is. */
10511 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10512 #ifdef LOAD_EXTEND_OP
10513 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10514 #else
10516 #endif
10519 if (subtarget == 0 || GET_CODE (subtarget) != REG
10520 || GET_MODE (subtarget) != operand_mode
10521 || ! safe_from_p (subtarget, inner))
10522 subtarget = 0;
10524 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10526 if (bitnum != 0)
10527 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10528 size_int (bitnum), subtarget, ops_unsignedp);
10530 if (GET_MODE (op0) != mode)
10531 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10533 if ((code == EQ && ! invert) || (code == NE && invert))
10534 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10535 ops_unsignedp, OPTAB_LIB_WIDEN);
10537 /* Put the AND last so it can combine with more things. */
10538 if (bitnum != TYPE_PRECISION (type) - 1)
10539 op0 = expand_and (op0, const1_rtx, subtarget);
10541 return op0;
10544 /* Now see if we are likely to be able to do this. Return if not. */
10545 if (! can_compare_p (operand_mode))
10546 return 0;
10547 icode = setcc_gen_code[(int) code];
10548 if (icode == CODE_FOR_nothing
10549 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10551 /* We can only do this if it is one of the special cases that
10552 can be handled without an scc insn. */
10553 if ((code == LT && integer_zerop (arg1))
10554 || (! only_cheap && code == GE && integer_zerop (arg1)))
10556 else if (BRANCH_COST >= 0
10557 && ! only_cheap && (code == NE || code == EQ)
10558 && TREE_CODE (type) != REAL_TYPE
10559 && ((abs_optab->handlers[(int) operand_mode].insn_code
10560 != CODE_FOR_nothing)
10561 || (ffs_optab->handlers[(int) operand_mode].insn_code
10562 != CODE_FOR_nothing)))
10564 else
10565 return 0;
10568 preexpand_calls (exp);
10569 if (subtarget == 0 || GET_CODE (subtarget) != REG
10570 || GET_MODE (subtarget) != operand_mode
10571 || ! safe_from_p (subtarget, arg1))
10572 subtarget = 0;
10574 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10575 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10577 if (target == 0)
10578 target = gen_reg_rtx (mode);
10580 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10581 because, if the emit_store_flag does anything it will succeed and
10582 OP0 and OP1 will not be used subsequently. */
10584 result = emit_store_flag (target, code,
10585 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10586 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10587 operand_mode, unsignedp, 1);
10589 if (result)
10591 if (invert)
10592 result = expand_binop (mode, xor_optab, result, const1_rtx,
10593 result, 0, OPTAB_LIB_WIDEN);
10594 return result;
10597 /* If this failed, we have to do this with set/compare/jump/set code. */
10598 if (target == 0 || GET_CODE (target) != REG
10599 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10600 target = gen_reg_rtx (GET_MODE (target));
10602 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10603 result = compare_from_rtx (op0, op1, code, unsignedp,
10604 operand_mode, NULL_RTX, 0);
10605 if (GET_CODE (result) == CONST_INT)
10606 return (((result == const0_rtx && ! invert)
10607 || (result != const0_rtx && invert))
10608 ? const0_rtx : const1_rtx);
10610 label = gen_label_rtx ();
10611 if (bcc_gen_fctn[(int) code] == 0)
10612 abort ();
10614 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10615 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10616 emit_label (label);
10618 return target;
10621 /* Generate a tablejump instruction (used for switch statements). */
10623 #ifdef HAVE_tablejump
10625 /* INDEX is the value being switched on, with the lowest value
10626 in the table already subtracted.
10627 MODE is its expected mode (needed if INDEX is constant).
10628 RANGE is the length of the jump table.
10629 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10631 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10632 index value is out of range. */
10634 void
10635 do_tablejump (index, mode, range, table_label, default_label)
10636 rtx index, range, table_label, default_label;
10637 enum machine_mode mode;
10639 register rtx temp, vector;
10641 /* Do an unsigned comparison (in the proper mode) between the index
10642 expression and the value which represents the length of the range.
10643 Since we just finished subtracting the lower bound of the range
10644 from the index expression, this comparison allows us to simultaneously
10645 check that the original index expression value is both greater than
10646 or equal to the minimum value of the range and less than or equal to
10647 the maximum value of the range. */
10649 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10650 emit_jump_insn (gen_bgtu (default_label));
10652 /* If index is in range, it must fit in Pmode.
10653 Convert to Pmode so we can index with it. */
10654 if (mode != Pmode)
10655 index = convert_to_mode (Pmode, index, 1);
10657 /* Don't let a MEM slip thru, because then INDEX that comes
10658 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10659 and break_out_memory_refs will go to work on it and mess it up. */
10660 #ifdef PIC_CASE_VECTOR_ADDRESS
10661 if (flag_pic && GET_CODE (index) != REG)
10662 index = copy_to_mode_reg (Pmode, index);
10663 #endif
10665 /* If flag_force_addr were to affect this address
10666 it could interfere with the tricky assumptions made
10667 about addresses that contain label-refs,
10668 which may be valid only very near the tablejump itself. */
10669 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10670 GET_MODE_SIZE, because this indicates how large insns are. The other
10671 uses should all be Pmode, because they are addresses. This code
10672 could fail if addresses and insns are not the same size. */
10673 index = gen_rtx (PLUS, Pmode,
10674 gen_rtx (MULT, Pmode, index,
10675 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10676 gen_rtx (LABEL_REF, Pmode, table_label));
10677 #ifdef PIC_CASE_VECTOR_ADDRESS
10678 if (flag_pic)
10679 index = PIC_CASE_VECTOR_ADDRESS (index);
10680 else
10681 #endif
10682 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10683 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10684 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
10685 RTX_UNCHANGING_P (vector) = 1;
10686 convert_move (temp, vector, 0);
10688 emit_jump_insn (gen_tablejump (temp, table_label));
10690 #ifndef CASE_VECTOR_PC_RELATIVE
10691 /* If we are generating PIC code or if the table is PC-relative, the
10692 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10693 if (! flag_pic)
10694 emit_barrier ();
10695 #endif
10698 #endif /* HAVE_tablejump */
10701 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
10702 to that value is on the top of the stack. The resulting type is TYPE, and
10703 the source declaration is DECL. */
10705 void
10706 bc_load_memory (type, decl)
10707 tree type, decl;
10709 enum bytecode_opcode opcode;
10712 /* Bit fields are special. We only know about signed and
10713 unsigned ints, and enums. The latter are treated as
10714 signed integers. */
10716 if (DECL_BIT_FIELD (decl))
10717 if (TREE_CODE (type) == ENUMERAL_TYPE
10718 || TREE_CODE (type) == INTEGER_TYPE)
10719 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
10720 else
10721 abort ();
10722 else
10723 /* See corresponding comment in bc_store_memory(). */
10724 if (TYPE_MODE (type) == BLKmode
10725 || TYPE_MODE (type) == VOIDmode)
10726 return;
10727 else
10728 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
10730 if (opcode == neverneverland)
10731 abort ();
10733 bc_emit_bytecode (opcode);
10735 #ifdef DEBUG_PRINT_CODE
10736 fputc ('\n', stderr);
10737 #endif
10741 /* Store the contents of the second stack slot to the address in the
10742 top stack slot. DECL is the declaration of the destination and is used
10743 to determine whether we're dealing with a bitfield. */
10745 void
10746 bc_store_memory (type, decl)
10747 tree type, decl;
10749 enum bytecode_opcode opcode;
10752 if (DECL_BIT_FIELD (decl))
10754 if (TREE_CODE (type) == ENUMERAL_TYPE
10755 || TREE_CODE (type) == INTEGER_TYPE)
10756 opcode = sstoreBI;
10757 else
10758 abort ();
10760 else
10761 if (TYPE_MODE (type) == BLKmode)
10763 /* Copy structure. This expands to a block copy instruction, storeBLK.
10764 In addition to the arguments expected by the other store instructions,
10765 it also expects a type size (SImode) on top of the stack, which is the
10766 structure size in size units (usually bytes). The two first arguments
10767 are already on the stack; so we just put the size on level 1. For some
10768 other languages, the size may be variable, this is why we don't encode
10769 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10771 bc_expand_expr (TYPE_SIZE (type));
10772 opcode = storeBLK;
10774 else
10775 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
10777 if (opcode == neverneverland)
10778 abort ();
10780 bc_emit_bytecode (opcode);
10782 #ifdef DEBUG_PRINT_CODE
10783 fputc ('\n', stderr);
10784 #endif
10788 /* Allocate local stack space sufficient to hold a value of the given
10789 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10790 integral power of 2. A special case is locals of type VOID, which
10791 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10792 remapped into the corresponding attribute of SI. */
10795 bc_allocate_local (size, alignment)
10796 int size, alignment;
10798 rtx retval;
10799 int byte_alignment;
10801 if (size < 0)
10802 abort ();
10804 /* Normalize size and alignment */
10805 if (!size)
10806 size = UNITS_PER_WORD;
10808 if (alignment < BITS_PER_UNIT)
10809 byte_alignment = 1 << (INT_ALIGN - 1);
10810 else
10811 /* Align */
10812 byte_alignment = alignment / BITS_PER_UNIT;
10814 if (local_vars_size & (byte_alignment - 1))
10815 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
10817 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10818 local_vars_size += size;
10820 return retval;
10824 /* Allocate variable-sized local array. Variable-sized arrays are
10825 actually pointers to the address in memory where they are stored. */
10828 bc_allocate_variable_array (size)
10829 tree size;
10831 rtx retval;
10832 const int ptralign = (1 << (PTR_ALIGN - 1));
10834 /* Align pointer */
10835 if (local_vars_size & ptralign)
10836 local_vars_size += ptralign - (local_vars_size & ptralign);
10838 /* Note down local space needed: pointer to block; also return
10839 dummy rtx */
10841 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10842 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10843 return retval;
10847 /* Push the machine address for the given external variable offset. */
10848 void
10849 bc_load_externaddr (externaddr)
10850 rtx externaddr;
10852 bc_emit_bytecode (constP);
10853 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10854 BYTECODE_BC_LABEL (externaddr)->offset);
10856 #ifdef DEBUG_PRINT_CODE
10857 fputc ('\n', stderr);
10858 #endif
10862 static char *
10863 bc_strdup (s)
10864 char *s;
10866 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10867 strcpy (new, s);
10868 return new;
10872 /* Like above, but expects an IDENTIFIER. */
10873 void
10874 bc_load_externaddr_id (id, offset)
10875 tree id;
10876 int offset;
10878 if (!IDENTIFIER_POINTER (id))
10879 abort ();
10881 bc_emit_bytecode (constP);
10882 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
10884 #ifdef DEBUG_PRINT_CODE
10885 fputc ('\n', stderr);
10886 #endif
10890 /* Push the machine address for the given local variable offset. */
10891 void
10892 bc_load_localaddr (localaddr)
10893 rtx localaddr;
10895 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10899 /* Push the machine address for the given parameter offset.
10900 NOTE: offset is in bits. */
10901 void
10902 bc_load_parmaddr (parmaddr)
10903 rtx parmaddr;
10905 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10906 / BITS_PER_UNIT));
10910 /* Convert a[i] into *(a + i). */
10911 tree
10912 bc_canonicalize_array_ref (exp)
10913 tree exp;
10915 tree type = TREE_TYPE (exp);
10916 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10917 TREE_OPERAND (exp, 0));
10918 tree index = TREE_OPERAND (exp, 1);
10921 /* Convert the integer argument to a type the same size as a pointer
10922 so the multiply won't overflow spuriously. */
10924 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10925 index = convert (type_for_size (POINTER_SIZE, 0), index);
10927 /* The array address isn't volatile even if the array is.
10928 (Of course this isn't terribly relevant since the bytecode
10929 translator treats nearly everything as volatile anyway.) */
10930 TREE_THIS_VOLATILE (array_adr) = 0;
10932 return build1 (INDIRECT_REF, type,
10933 fold (build (PLUS_EXPR,
10934 TYPE_POINTER_TO (type),
10935 array_adr,
10936 fold (build (MULT_EXPR,
10937 TYPE_POINTER_TO (type),
10938 index,
10939 size_in_bytes (type))))));
10943 /* Load the address of the component referenced by the given
10944 COMPONENT_REF expression.
10946 Returns innermost lvalue. */
10948 tree
10949 bc_expand_component_address (exp)
10950 tree exp;
10952 tree tem, chain;
10953 enum machine_mode mode;
10954 int bitpos = 0;
10955 HOST_WIDE_INT SIval;
10958 tem = TREE_OPERAND (exp, 1);
10959 mode = DECL_MODE (tem);
10962 /* Compute cumulative bit offset for nested component refs
10963 and array refs, and find the ultimate containing object. */
10965 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10967 if (TREE_CODE (tem) == COMPONENT_REF)
10968 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10969 else
10970 if (TREE_CODE (tem) == ARRAY_REF
10971 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10972 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10974 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10975 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10976 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10977 else
10978 break;
10981 bc_expand_expr (tem);
10984 /* For bitfields also push their offset and size */
10985 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10986 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10987 else
10988 if (SIval = bitpos / BITS_PER_UNIT)
10989 bc_emit_instruction (addconstPSI, SIval);
10991 return (TREE_OPERAND (exp, 1));
10995 /* Emit code to push two SI constants */
10996 void
10997 bc_push_offset_and_size (offset, size)
10998 HOST_WIDE_INT offset, size;
11000 bc_emit_instruction (constSI, offset);
11001 bc_emit_instruction (constSI, size);
11005 /* Emit byte code to push the address of the given lvalue expression to
11006 the stack. If it's a bit field, we also push offset and size info.
11008 Returns innermost component, which allows us to determine not only
11009 its type, but also whether it's a bitfield. */
11011 tree
11012 bc_expand_address (exp)
11013 tree exp;
11015 /* Safeguard */
11016 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11017 return (exp);
11020 switch (TREE_CODE (exp))
11022 case ARRAY_REF:
11024 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11026 case COMPONENT_REF:
11028 return (bc_expand_component_address (exp));
11030 case INDIRECT_REF:
11032 bc_expand_expr (TREE_OPERAND (exp, 0));
11034 /* For variable-sized types: retrieve pointer. Sometimes the
11035 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11036 also make sure we have an operand, just in case... */
11038 if (TREE_OPERAND (exp, 0)
11039 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11040 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11041 bc_emit_instruction (loadP);
11043 /* If packed, also return offset and size */
11044 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11046 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11047 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11049 return (TREE_OPERAND (exp, 0));
11051 case FUNCTION_DECL:
11053 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11054 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11055 break;
11057 case PARM_DECL:
11059 bc_load_parmaddr (DECL_RTL (exp));
11061 /* For variable-sized types: retrieve pointer */
11062 if (TYPE_SIZE (TREE_TYPE (exp))
11063 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11064 bc_emit_instruction (loadP);
11066 /* If packed, also return offset and size */
11067 if (DECL_BIT_FIELD (exp))
11068 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11069 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11071 break;
11073 case RESULT_DECL:
11075 bc_emit_instruction (returnP);
11076 break;
11078 case VAR_DECL:
11080 #if 0
11081 if (BYTECODE_LABEL (DECL_RTL (exp)))
11082 bc_load_externaddr (DECL_RTL (exp));
11083 #endif
11085 if (DECL_EXTERNAL (exp))
11086 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11087 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11088 else
11089 bc_load_localaddr (DECL_RTL (exp));
11091 /* For variable-sized types: retrieve pointer */
11092 if (TYPE_SIZE (TREE_TYPE (exp))
11093 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11094 bc_emit_instruction (loadP);
11096 /* If packed, also return offset and size */
11097 if (DECL_BIT_FIELD (exp))
11098 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11099 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11101 break;
11103 case STRING_CST:
11105 rtx r;
11107 bc_emit_bytecode (constP);
11108 r = output_constant_def (exp);
11109 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11111 #ifdef DEBUG_PRINT_CODE
11112 fputc ('\n', stderr);
11113 #endif
11115 break;
11117 default:
11119 abort();
11120 break;
11123 /* Most lvalues don't have components. */
11124 return (exp);
11128 /* Emit a type code to be used by the runtime support in handling
11129 parameter passing. The type code consists of the machine mode
11130 plus the minimal alignment shifted left 8 bits. */
11132 tree
11133 bc_runtime_type_code (type)
11134 tree type;
11136 int val;
11138 switch (TREE_CODE (type))
11140 case VOID_TYPE:
11141 case INTEGER_TYPE:
11142 case REAL_TYPE:
11143 case COMPLEX_TYPE:
11144 case ENUMERAL_TYPE:
11145 case POINTER_TYPE:
11146 case RECORD_TYPE:
11148 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11149 break;
11151 case ERROR_MARK:
11153 val = 0;
11154 break;
11156 default:
11158 abort ();
11160 return build_int_2 (val, 0);
11164 /* Generate constructor label */
11165 char *
11166 bc_gen_constr_label ()
11168 static int label_counter;
11169 static char label[20];
11171 sprintf (label, "*LR%d", label_counter++);
11173 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11177 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11178 expand the constructor data as static data, and push a pointer to it.
11179 The pointer is put in the pointer table and is retrieved by a constP
11180 bytecode instruction. We then loop and store each constructor member in
11181 the corresponding component. Finally, we return the original pointer on
11182 the stack. */
11184 void
11185 bc_expand_constructor (constr)
11186 tree constr;
11188 char *l;
11189 HOST_WIDE_INT ptroffs;
11190 rtx constr_rtx;
11193 /* Literal constructors are handled as constants, whereas
11194 non-literals are evaluated and stored element by element
11195 into the data segment. */
11197 /* Allocate space in proper segment and push pointer to space on stack.
11200 l = bc_gen_constr_label ();
11202 if (TREE_CONSTANT (constr))
11204 text_section ();
11206 bc_emit_const_labeldef (l);
11207 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11209 else
11211 data_section ();
11213 bc_emit_data_labeldef (l);
11214 bc_output_data_constructor (constr);
11218 /* Add reference to pointer table and recall pointer to stack;
11219 this code is common for both types of constructors: literals
11220 and non-literals. */
11222 ptroffs = bc_define_pointer (l);
11223 bc_emit_instruction (constP, ptroffs);
11225 /* This is all that has to be done if it's a literal. */
11226 if (TREE_CONSTANT (constr))
11227 return;
11230 /* At this point, we have the pointer to the structure on top of the stack.
11231 Generate sequences of store_memory calls for the constructor. */
11233 /* constructor type is structure */
11234 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11236 register tree elt;
11238 /* If the constructor has fewer fields than the structure,
11239 clear the whole structure first. */
11241 if (list_length (CONSTRUCTOR_ELTS (constr))
11242 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11244 bc_emit_instruction (duplicate);
11245 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11246 bc_emit_instruction (clearBLK);
11249 /* Store each element of the constructor into the corresponding
11250 field of TARGET. */
11252 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11254 register tree field = TREE_PURPOSE (elt);
11255 register enum machine_mode mode;
11256 int bitsize;
11257 int bitpos;
11258 int unsignedp;
11260 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11261 mode = DECL_MODE (field);
11262 unsignedp = TREE_UNSIGNED (field);
11264 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11266 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11267 /* The alignment of TARGET is
11268 at least what its type requires. */
11269 VOIDmode, 0,
11270 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11271 int_size_in_bytes (TREE_TYPE (constr)));
11274 else
11276 /* Constructor type is array */
11277 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11279 register tree elt;
11280 register int i;
11281 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11282 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11283 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11284 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11286 /* If the constructor has fewer fields than the structure,
11287 clear the whole structure first. */
11289 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11291 bc_emit_instruction (duplicate);
11292 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11293 bc_emit_instruction (clearBLK);
11297 /* Store each element of the constructor into the corresponding
11298 element of TARGET, determined by counting the elements. */
11300 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11301 elt;
11302 elt = TREE_CHAIN (elt), i++)
11304 register enum machine_mode mode;
11305 int bitsize;
11306 int bitpos;
11307 int unsignedp;
11309 mode = TYPE_MODE (elttype);
11310 bitsize = GET_MODE_BITSIZE (mode);
11311 unsignedp = TREE_UNSIGNED (elttype);
11313 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11314 /* * TYPE_SIZE_UNIT (elttype) */ );
11316 bc_store_field (elt, bitsize, bitpos, mode,
11317 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11318 /* The alignment of TARGET is
11319 at least what its type requires. */
11320 VOIDmode, 0,
11321 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11322 int_size_in_bytes (TREE_TYPE (constr)));
11329 /* Store the value of EXP (an expression tree) into member FIELD of
11330 structure at address on stack, which has type TYPE, mode MODE and
11331 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11332 structure.
11334 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11335 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11337 void
11338 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11339 value_mode, unsignedp, align, total_size)
11340 int bitsize, bitpos;
11341 enum machine_mode mode;
11342 tree field, exp, type;
11343 enum machine_mode value_mode;
11344 int unsignedp;
11345 int align;
11346 int total_size;
11349 /* Expand expression and copy pointer */
11350 bc_expand_expr (exp);
11351 bc_emit_instruction (over);
11354 /* If the component is a bit field, we cannot use addressing to access
11355 it. Use bit-field techniques to store in it. */
11357 if (DECL_BIT_FIELD (field))
11359 bc_store_bit_field (bitpos, bitsize, unsignedp);
11360 return;
11362 else
11363 /* Not bit field */
11365 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11367 /* Advance pointer to the desired member */
11368 if (offset)
11369 bc_emit_instruction (addconstPSI, offset);
11371 /* Store */
11372 bc_store_memory (type, field);
11377 /* Store SI/SU in bitfield */
11378 void
11379 bc_store_bit_field (offset, size, unsignedp)
11380 int offset, size, unsignedp;
11382 /* Push bitfield offset and size */
11383 bc_push_offset_and_size (offset, size);
11385 /* Store */
11386 bc_emit_instruction (sstoreBI);
11390 /* Load SI/SU from bitfield */
11391 void
11392 bc_load_bit_field (offset, size, unsignedp)
11393 int offset, size, unsignedp;
11395 /* Push bitfield offset and size */
11396 bc_push_offset_and_size (offset, size);
11398 /* Load: sign-extend if signed, else zero-extend */
11399 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11403 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11404 (adjust stack pointer upwards), negative means add that number of
11405 levels (adjust the stack pointer downwards). Only positive values
11406 normally make sense. */
11408 void
11409 bc_adjust_stack (nlevels)
11410 int nlevels;
11412 switch (nlevels)
11414 case 0:
11415 break;
11417 case 2:
11418 bc_emit_instruction (drop);
11420 case 1:
11421 bc_emit_instruction (drop);
11422 break;
11424 default:
11426 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11427 stack_depth -= nlevels;
11430 #if defined (VALIDATE_STACK_FOR_BC)
11431 VALIDATE_STACK_FOR_BC ();
11432 #endif