Import final gcc2 snapshot (990109)
[official-gcc.git] / gcc / expr.c
blobcfec566d692fe3114d2cd2e60bf933b7d387c62d
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "expr.h"
36 #include "insn-config.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "defaults.h"
42 #include "bytecode.h"
43 #include "bc-opcode.h"
44 #include "bc-typecd.h"
45 #include "bc-optab.h"
46 #include "bc-emit.h"
49 #define CEIL(x,y) (((x) + (y) - 1) / (y))
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
60 #define PUSH_ARGS_REVERSED /* If it's last to first */
61 #endif
63 #endif
65 #ifndef STACK_PUSH_CODE
66 #ifdef STACK_GROWS_DOWNWARD
67 #define STACK_PUSH_CODE PRE_DEC
68 #else
69 #define STACK_PUSH_CODE PRE_INC
70 #endif
71 #endif
73 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
74 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
84 /* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87 int do_preexpand_calls = 1;
89 /* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91 int pending_stack_adjust;
93 /* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97 int inhibit_defer_pop;
99 /* When temporaries are created by TARGET_EXPRs, they are created at
100 this level of temp_slot_level, so that they can remain allocated
101 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
102 of TARGET_EXPRs. */
103 int target_temp_slot_level;
105 /* Nonzero means __builtin_saveregs has already been done in this function.
106 The value is the pseudoreg containing the value __builtin_saveregs
107 returned. */
108 static rtx saveregs_value;
110 /* Similarly for __builtin_apply_args. */
111 static rtx apply_args_value;
113 /* Don't check memory usage, since code is being emitted to check a memory
114 usage. Used when current_function_check_memory_usage is true, to avoid
115 infinite recursion. */
116 static int in_check_memory_usage;
118 /* This structure is used by move_by_pieces to describe the move to
119 be performed. */
120 struct move_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 int to_struct;
127 int to_readonly;
128 rtx from;
129 rtx from_addr;
130 int autinc_from;
131 int explicit_inc_from;
132 int from_struct;
133 int from_readonly;
134 int len;
135 int offset;
136 int reverse;
139 /* This structure is used by clear_by_pieces to describe the clear to
140 be performed. */
142 struct clear_by_pieces
144 rtx to;
145 rtx to_addr;
146 int autinc_to;
147 int explicit_inc_to;
148 int to_struct;
149 int len;
150 int offset;
151 int reverse;
154 /* Used to generate bytecodes: keep track of size of local variables,
155 as well as depth of arithmetic stack. (Notice that variables are
156 stored on the machine's stack, not the arithmetic stack.) */
158 static rtx get_push_address PROTO ((int));
159 extern int local_vars_size;
160 extern int stack_depth;
161 extern int max_stack_depth;
162 extern struct obstack permanent_obstack;
163 extern rtx arg_pointer_save_area;
165 static rtx enqueue_insn PROTO((rtx, rtx));
166 static int queued_subexp_p PROTO((rtx));
167 static void init_queue PROTO((void));
168 static void move_by_pieces PROTO((rtx, rtx, int, int));
169 static int move_by_pieces_ninsns PROTO((unsigned int, int));
170 static void move_by_pieces_1 PROTO((rtx (*) PVPROTO((rtx, ...)),
171 enum machine_mode,
172 struct move_by_pieces *));
173 static void clear_by_pieces PROTO((rtx, int, int));
174 static void clear_by_pieces_1 PROTO((rtx (*) PVPROTO((rtx, ...)),
175 enum machine_mode,
176 struct clear_by_pieces *));
177 static int is_zeros_p PROTO((tree));
178 static int mostly_zeros_p PROTO((tree));
179 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
180 tree, tree, int, int));
181 static void store_constructor PROTO((tree, rtx, int, int));
182 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
183 enum machine_mode, int, int, int));
184 static tree save_noncopied_parts PROTO((tree, tree));
185 static tree init_noncopied_parts PROTO((tree, tree));
186 static int safe_from_p PROTO((rtx, tree, int));
187 static int fixed_type_p PROTO((tree));
188 static rtx var_rtx PROTO((tree));
189 static int get_pointer_alignment PROTO((tree, unsigned));
190 static tree string_constant PROTO((tree, tree *));
191 static tree c_strlen PROTO((tree));
192 static rtx expand_builtin PROTO((tree, rtx, rtx,
193 enum machine_mode, int));
194 static int apply_args_size PROTO((void));
195 static int apply_result_size PROTO((void));
196 static rtx result_vector PROTO((int, rtx));
197 static rtx expand_builtin_apply_args PROTO((void));
198 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
199 static void expand_builtin_return PROTO((rtx));
200 static rtx expand_increment PROTO((tree, int, int));
201 void bc_expand_increment PROTO((struct increment_operator *, tree));
202 rtx bc_allocate_local PROTO((int, int));
203 void bc_store_memory PROTO((tree, tree));
204 tree bc_expand_component_address PROTO((tree));
205 tree bc_expand_address PROTO((tree));
206 void bc_expand_constructor PROTO((tree));
207 void bc_adjust_stack PROTO((int));
208 tree bc_canonicalize_array_ref PROTO((tree));
209 void bc_load_memory PROTO((tree, tree));
210 void bc_load_externaddr PROTO((rtx));
211 void bc_load_externaddr_id PROTO((tree, int));
212 void bc_load_localaddr PROTO((rtx));
213 void bc_load_parmaddr PROTO((rtx));
214 static void preexpand_calls PROTO((tree));
215 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
216 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
217 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
218 void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
219 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
220 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
221 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
222 extern tree truthvalue_conversion PROTO((tree));
224 /* Record for each mode whether we can move a register directly to or
225 from an object of that mode in memory. If we can't, we won't try
226 to use that mode directly when accessing a field of that mode. */
228 static char direct_load[NUM_MACHINE_MODES];
229 static char direct_store[NUM_MACHINE_MODES];
231 /* If a memory-to-memory move would take MOVE_RATIO or more simple
232 move-instruction sequences, we will do a movstr or libcall instead. */
234 #ifndef MOVE_RATIO
235 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
236 #define MOVE_RATIO 2
237 #else
238 /* A value of around 6 would minimize code size; infinity would minimize
239 execution time. */
240 #define MOVE_RATIO 15
241 #endif
242 #endif
244 /* This array records the insn_code of insns to perform block moves. */
245 enum insn_code movstr_optab[NUM_MACHINE_MODES];
247 /* This array records the insn_code of insns to perform block clears. */
248 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
250 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
252 #ifndef SLOW_UNALIGNED_ACCESS
253 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
254 #endif
256 /* Register mappings for target machines without register windows. */
257 #ifndef INCOMING_REGNO
258 #define INCOMING_REGNO(OUT) (OUT)
259 #endif
260 #ifndef OUTGOING_REGNO
261 #define OUTGOING_REGNO(IN) (IN)
262 #endif
264 /* Maps used to convert modes to const, load, and store bytecodes. */
265 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
266 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
267 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
269 /* Initialize maps used to convert modes to const, load, and store
270 bytecodes. */
272 void
273 bc_init_mode_to_opcode_maps ()
275 int mode;
277 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
278 mode_to_const_map[mode]
279 = mode_to_load_map[mode]
280 = mode_to_store_map[mode] = neverneverland;
282 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
283 mode_to_const_map[(int) SYM] = CONST; \
284 mode_to_load_map[(int) SYM] = LOAD; \
285 mode_to_store_map[(int) SYM] = STORE;
287 #include "modemap.def"
288 #undef DEF_MODEMAP
291 /* This is run once per compilation to set up which modes can be used
292 directly in memory and to initialize the block move optab. */
294 void
295 init_expr_once ()
297 rtx insn, pat;
298 enum machine_mode mode;
299 rtx mem, mem1;
300 char *free_point;
302 start_sequence ();
304 /* Since we are on the permanent obstack, we must be sure we save this
305 spot AFTER we call start_sequence, since it will reuse the rtl it
306 makes. */
307 free_point = (char *) oballoc (0);
309 /* Try indexing by frame ptr and try by stack ptr.
310 It is known that on the Convex the stack ptr isn't a valid index.
311 With luck, one or the other is valid on any machine. */
312 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
313 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
315 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
316 pat = PATTERN (insn);
318 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
319 mode = (enum machine_mode) ((int) mode + 1))
321 int regno;
322 rtx reg;
323 int num_clobbers;
325 direct_load[(int) mode] = direct_store[(int) mode] = 0;
326 PUT_MODE (mem, mode);
327 PUT_MODE (mem1, mode);
329 /* See if there is some register that can be used in this mode and
330 directly loaded or stored from memory. */
332 if (mode != VOIDmode && mode != BLKmode)
333 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
334 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
335 regno++)
337 if (! HARD_REGNO_MODE_OK (regno, mode))
338 continue;
340 reg = gen_rtx_REG (mode, regno);
342 SET_SRC (pat) = mem;
343 SET_DEST (pat) = reg;
344 if (recog (pat, insn, &num_clobbers) >= 0)
345 direct_load[(int) mode] = 1;
347 SET_SRC (pat) = mem1;
348 SET_DEST (pat) = reg;
349 if (recog (pat, insn, &num_clobbers) >= 0)
350 direct_load[(int) mode] = 1;
352 SET_SRC (pat) = reg;
353 SET_DEST (pat) = mem;
354 if (recog (pat, insn, &num_clobbers) >= 0)
355 direct_store[(int) mode] = 1;
357 SET_SRC (pat) = reg;
358 SET_DEST (pat) = mem1;
359 if (recog (pat, insn, &num_clobbers) >= 0)
360 direct_store[(int) mode] = 1;
364 end_sequence ();
365 obfree (free_point);
368 /* This is run at the start of compiling a function. */
370 void
371 init_expr ()
373 init_queue ();
375 pending_stack_adjust = 0;
376 inhibit_defer_pop = 0;
377 saveregs_value = 0;
378 apply_args_value = 0;
379 forced_labels = 0;
382 /* Save all variables describing the current status into the structure *P.
383 This is used before starting a nested function. */
385 void
386 save_expr_status (p)
387 struct function *p;
389 /* Instead of saving the postincrement queue, empty it. */
390 emit_queue ();
392 p->pending_stack_adjust = pending_stack_adjust;
393 p->inhibit_defer_pop = inhibit_defer_pop;
394 p->saveregs_value = saveregs_value;
395 p->apply_args_value = apply_args_value;
396 p->forced_labels = forced_labels;
398 pending_stack_adjust = 0;
399 inhibit_defer_pop = 0;
400 saveregs_value = 0;
401 apply_args_value = 0;
402 forced_labels = 0;
405 /* Restore all variables describing the current status from the structure *P.
406 This is used after a nested function. */
408 void
409 restore_expr_status (p)
410 struct function *p;
412 pending_stack_adjust = p->pending_stack_adjust;
413 inhibit_defer_pop = p->inhibit_defer_pop;
414 saveregs_value = p->saveregs_value;
415 apply_args_value = p->apply_args_value;
416 forced_labels = p->forced_labels;
419 /* Manage the queue of increment instructions to be output
420 for POSTINCREMENT_EXPR expressions, etc. */
422 static rtx pending_chain;
424 /* Queue up to increment (or change) VAR later. BODY says how:
425 BODY should be the same thing you would pass to emit_insn
426 to increment right away. It will go to emit_insn later on.
428 The value is a QUEUED expression to be used in place of VAR
429 where you want to guarantee the pre-incrementation value of VAR. */
431 static rtx
432 enqueue_insn (var, body)
433 rtx var, body;
435 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
436 body, pending_chain);
437 return pending_chain;
440 /* Use protect_from_queue to convert a QUEUED expression
441 into something that you can put immediately into an instruction.
442 If the queued incrementation has not happened yet,
443 protect_from_queue returns the variable itself.
444 If the incrementation has happened, protect_from_queue returns a temp
445 that contains a copy of the old value of the variable.
447 Any time an rtx which might possibly be a QUEUED is to be put
448 into an instruction, it must be passed through protect_from_queue first.
449 QUEUED expressions are not meaningful in instructions.
451 Do not pass a value through protect_from_queue and then hold
452 on to it for a while before putting it in an instruction!
453 If the queue is flushed in between, incorrect code will result. */
456 protect_from_queue (x, modify)
457 register rtx x;
458 int modify;
460 register RTX_CODE code = GET_CODE (x);
462 #if 0 /* A QUEUED can hang around after the queue is forced out. */
463 /* Shortcut for most common case. */
464 if (pending_chain == 0)
465 return x;
466 #endif
468 if (code != QUEUED)
470 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
471 use of autoincrement. Make a copy of the contents of the memory
472 location rather than a copy of the address, but not if the value is
473 of mode BLKmode. Don't modify X in place since it might be
474 shared. */
475 if (code == MEM && GET_MODE (x) != BLKmode
476 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
478 register rtx y = XEXP (x, 0);
479 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
481 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
482 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
483 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
485 if (QUEUED_INSN (y))
487 register rtx temp = gen_reg_rtx (GET_MODE (new));
488 emit_insn_before (gen_move_insn (temp, new),
489 QUEUED_INSN (y));
490 return temp;
492 return new;
494 /* Otherwise, recursively protect the subexpressions of all
495 the kinds of rtx's that can contain a QUEUED. */
496 if (code == MEM)
498 rtx tem = protect_from_queue (XEXP (x, 0), 0);
499 if (tem != XEXP (x, 0))
501 x = copy_rtx (x);
502 XEXP (x, 0) = tem;
505 else if (code == PLUS || code == MULT)
507 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
508 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
509 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
511 x = copy_rtx (x);
512 XEXP (x, 0) = new0;
513 XEXP (x, 1) = new1;
516 return x;
518 /* If the increment has not happened, use the variable itself. */
519 if (QUEUED_INSN (x) == 0)
520 return QUEUED_VAR (x);
521 /* If the increment has happened and a pre-increment copy exists,
522 use that copy. */
523 if (QUEUED_COPY (x) != 0)
524 return QUEUED_COPY (x);
525 /* The increment has happened but we haven't set up a pre-increment copy.
526 Set one up now, and use it. */
527 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
528 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
529 QUEUED_INSN (x));
530 return QUEUED_COPY (x);
533 /* Return nonzero if X contains a QUEUED expression:
534 if it contains anything that will be altered by a queued increment.
535 We handle only combinations of MEM, PLUS, MINUS and MULT operators
536 since memory addresses generally contain only those. */
538 static int
539 queued_subexp_p (x)
540 rtx x;
542 register enum rtx_code code = GET_CODE (x);
543 switch (code)
545 case QUEUED:
546 return 1;
547 case MEM:
548 return queued_subexp_p (XEXP (x, 0));
549 case MULT:
550 case PLUS:
551 case MINUS:
552 return (queued_subexp_p (XEXP (x, 0))
553 || queued_subexp_p (XEXP (x, 1)));
554 default:
555 return 0;
559 /* Perform all the pending incrementations. */
561 void
562 emit_queue ()
564 register rtx p;
565 while (p = pending_chain)
567 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
568 pending_chain = QUEUED_NEXT (p);
572 static void
573 init_queue ()
575 if (pending_chain)
576 abort ();
579 /* Copy data from FROM to TO, where the machine modes are not the same.
580 Both modes may be integer, or both may be floating.
581 UNSIGNEDP should be nonzero if FROM is an unsigned type.
582 This causes zero-extension instead of sign-extension. */
584 void
585 convert_move (to, from, unsignedp)
586 register rtx to, from;
587 int unsignedp;
589 enum machine_mode to_mode = GET_MODE (to);
590 enum machine_mode from_mode = GET_MODE (from);
591 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
592 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
593 enum insn_code code;
594 rtx libcall;
596 /* rtx code for making an equivalent value. */
597 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
599 to = protect_from_queue (to, 1);
600 from = protect_from_queue (from, 0);
602 if (to_real != from_real)
603 abort ();
605 /* If FROM is a SUBREG that indicates that we have already done at least
606 the required extension, strip it. We don't handle such SUBREGs as
607 TO here. */
609 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
610 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
611 >= GET_MODE_SIZE (to_mode))
612 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
613 from = gen_lowpart (to_mode, from), from_mode = to_mode;
615 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
616 abort ();
618 if (to_mode == from_mode
619 || (from_mode == VOIDmode && CONSTANT_P (from)))
621 emit_move_insn (to, from);
622 return;
625 if (to_real)
627 rtx value;
629 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
631 /* Try converting directly if the insn is supported. */
632 if ((code = can_extend_p (to_mode, from_mode, 0))
633 != CODE_FOR_nothing)
635 emit_unop_insn (code, to, from, UNKNOWN);
636 return;
640 #ifdef HAVE_trunchfqf2
641 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
643 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
644 return;
646 #endif
647 #ifdef HAVE_trunctqfqf2
648 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
650 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
651 return;
653 #endif
654 #ifdef HAVE_truncsfqf2
655 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
657 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
658 return;
660 #endif
661 #ifdef HAVE_truncdfqf2
662 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
664 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
665 return;
667 #endif
668 #ifdef HAVE_truncxfqf2
669 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
671 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
672 return;
674 #endif
675 #ifdef HAVE_trunctfqf2
676 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
678 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
679 return;
681 #endif
683 #ifdef HAVE_trunctqfhf2
684 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
686 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
687 return;
689 #endif
690 #ifdef HAVE_truncsfhf2
691 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
693 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
694 return;
696 #endif
697 #ifdef HAVE_truncdfhf2
698 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
700 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
701 return;
703 #endif
704 #ifdef HAVE_truncxfhf2
705 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
707 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
708 return;
710 #endif
711 #ifdef HAVE_trunctfhf2
712 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
714 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
715 return;
717 #endif
719 #ifdef HAVE_truncsftqf2
720 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
722 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
723 return;
725 #endif
726 #ifdef HAVE_truncdftqf2
727 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
729 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
730 return;
732 #endif
733 #ifdef HAVE_truncxftqf2
734 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
736 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
737 return;
739 #endif
740 #ifdef HAVE_trunctftqf2
741 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
743 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
744 return;
746 #endif
748 #ifdef HAVE_truncdfsf2
749 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
751 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
752 return;
754 #endif
755 #ifdef HAVE_truncxfsf2
756 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
758 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
759 return;
761 #endif
762 #ifdef HAVE_trunctfsf2
763 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
765 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
766 return;
768 #endif
769 #ifdef HAVE_truncxfdf2
770 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
772 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
773 return;
775 #endif
776 #ifdef HAVE_trunctfdf2
777 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
779 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
780 return;
782 #endif
784 libcall = (rtx) 0;
785 switch (from_mode)
787 case SFmode:
788 switch (to_mode)
790 case DFmode:
791 libcall = extendsfdf2_libfunc;
792 break;
794 case XFmode:
795 libcall = extendsfxf2_libfunc;
796 break;
798 case TFmode:
799 libcall = extendsftf2_libfunc;
800 break;
802 default:
803 break;
805 break;
807 case DFmode:
808 switch (to_mode)
810 case SFmode:
811 libcall = truncdfsf2_libfunc;
812 break;
814 case XFmode:
815 libcall = extenddfxf2_libfunc;
816 break;
818 case TFmode:
819 libcall = extenddftf2_libfunc;
820 break;
822 default:
823 break;
825 break;
827 case XFmode:
828 switch (to_mode)
830 case SFmode:
831 libcall = truncxfsf2_libfunc;
832 break;
834 case DFmode:
835 libcall = truncxfdf2_libfunc;
836 break;
838 default:
839 break;
841 break;
843 case TFmode:
844 switch (to_mode)
846 case SFmode:
847 libcall = trunctfsf2_libfunc;
848 break;
850 case DFmode:
851 libcall = trunctfdf2_libfunc;
852 break;
854 default:
855 break;
857 break;
859 default:
860 break;
863 if (libcall == (rtx) 0)
864 /* This conversion is not implemented yet. */
865 abort ();
867 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
868 1, from, from_mode);
869 emit_move_insn (to, value);
870 return;
873 /* Now both modes are integers. */
875 /* Handle expanding beyond a word. */
876 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
877 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
879 rtx insns;
880 rtx lowpart;
881 rtx fill_value;
882 rtx lowfrom;
883 int i;
884 enum machine_mode lowpart_mode;
885 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
887 /* Try converting directly if the insn is supported. */
888 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
889 != CODE_FOR_nothing)
891 /* If FROM is a SUBREG, put it into a register. Do this
892 so that we always generate the same set of insns for
893 better cse'ing; if an intermediate assignment occurred,
894 we won't be doing the operation directly on the SUBREG. */
895 if (optimize > 0 && GET_CODE (from) == SUBREG)
896 from = force_reg (from_mode, from);
897 emit_unop_insn (code, to, from, equiv_code);
898 return;
900 /* Next, try converting via full word. */
901 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
902 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
903 != CODE_FOR_nothing))
905 if (GET_CODE (to) == REG)
906 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
907 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
908 emit_unop_insn (code, to,
909 gen_lowpart (word_mode, to), equiv_code);
910 return;
913 /* No special multiword conversion insn; do it by hand. */
914 start_sequence ();
916 /* Since we will turn this into a no conflict block, we must ensure
917 that the source does not overlap the target. */
919 if (reg_overlap_mentioned_p (to, from))
920 from = force_reg (from_mode, from);
922 /* Get a copy of FROM widened to a word, if necessary. */
923 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
924 lowpart_mode = word_mode;
925 else
926 lowpart_mode = from_mode;
928 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
930 lowpart = gen_lowpart (lowpart_mode, to);
931 emit_move_insn (lowpart, lowfrom);
933 /* Compute the value to put in each remaining word. */
934 if (unsignedp)
935 fill_value = const0_rtx;
936 else
938 #ifdef HAVE_slt
939 if (HAVE_slt
940 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
941 && STORE_FLAG_VALUE == -1)
943 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
944 lowpart_mode, 0, 0);
945 fill_value = gen_reg_rtx (word_mode);
946 emit_insn (gen_slt (fill_value));
948 else
949 #endif
951 fill_value
952 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
953 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
954 NULL_RTX, 0);
955 fill_value = convert_to_mode (word_mode, fill_value, 1);
959 /* Fill the remaining words. */
960 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
962 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
963 rtx subword = operand_subword (to, index, 1, to_mode);
965 if (subword == 0)
966 abort ();
968 if (fill_value != subword)
969 emit_move_insn (subword, fill_value);
972 insns = get_insns ();
973 end_sequence ();
975 emit_no_conflict_block (insns, to, from, NULL_RTX,
976 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
977 return;
980 /* Truncating multi-word to a word or less. */
981 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
982 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
984 if (!((GET_CODE (from) == MEM
985 && ! MEM_VOLATILE_P (from)
986 && direct_load[(int) to_mode]
987 && ! mode_dependent_address_p (XEXP (from, 0)))
988 || GET_CODE (from) == REG
989 || GET_CODE (from) == SUBREG))
990 from = force_reg (from_mode, from);
991 convert_move (to, gen_lowpart (word_mode, from), 0);
992 return;
995 /* Handle pointer conversion */ /* SPEE 900220 */
996 if (to_mode == PQImode)
998 if (from_mode != QImode)
999 from = convert_to_mode (QImode, from, unsignedp);
1001 #ifdef HAVE_truncqipqi2
1002 if (HAVE_truncqipqi2)
1004 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
1005 return;
1007 #endif /* HAVE_truncqipqi2 */
1008 abort ();
1011 if (from_mode == PQImode)
1013 if (to_mode != QImode)
1015 from = convert_to_mode (QImode, from, unsignedp);
1016 from_mode = QImode;
1018 else
1020 #ifdef HAVE_extendpqiqi2
1021 if (HAVE_extendpqiqi2)
1023 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1024 return;
1026 #endif /* HAVE_extendpqiqi2 */
1027 abort ();
1031 if (to_mode == PSImode)
1033 if (from_mode != SImode)
1034 from = convert_to_mode (SImode, from, unsignedp);
1036 #ifdef HAVE_truncsipsi2
1037 if (HAVE_truncsipsi2)
1039 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1040 return;
1042 #endif /* HAVE_truncsipsi2 */
1043 abort ();
1046 if (from_mode == PSImode)
1048 if (to_mode != SImode)
1050 from = convert_to_mode (SImode, from, unsignedp);
1051 from_mode = SImode;
1053 else
1055 #ifdef HAVE_extendpsisi2
1056 if (HAVE_extendpsisi2)
1058 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1059 return;
1061 #endif /* HAVE_extendpsisi2 */
1062 abort ();
1066 if (to_mode == PDImode)
1068 if (from_mode != DImode)
1069 from = convert_to_mode (DImode, from, unsignedp);
1071 #ifdef HAVE_truncdipdi2
1072 if (HAVE_truncdipdi2)
1074 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1075 return;
1077 #endif /* HAVE_truncdipdi2 */
1078 abort ();
1081 if (from_mode == PDImode)
1083 if (to_mode != DImode)
1085 from = convert_to_mode (DImode, from, unsignedp);
1086 from_mode = DImode;
1088 else
1090 #ifdef HAVE_extendpdidi2
1091 if (HAVE_extendpdidi2)
1093 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1094 return;
1096 #endif /* HAVE_extendpdidi2 */
1097 abort ();
1101 /* Now follow all the conversions between integers
1102 no more than a word long. */
1104 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1105 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1106 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1107 GET_MODE_BITSIZE (from_mode)))
1109 if (!((GET_CODE (from) == MEM
1110 && ! MEM_VOLATILE_P (from)
1111 && direct_load[(int) to_mode]
1112 && ! mode_dependent_address_p (XEXP (from, 0)))
1113 || GET_CODE (from) == REG
1114 || GET_CODE (from) == SUBREG))
1115 from = force_reg (from_mode, from);
1116 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1117 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1118 from = copy_to_reg (from);
1119 emit_move_insn (to, gen_lowpart (to_mode, from));
1120 return;
1123 /* Handle extension. */
1124 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1126 /* Convert directly if that works. */
1127 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1128 != CODE_FOR_nothing)
1130 emit_unop_insn (code, to, from, equiv_code);
1131 return;
1133 else
1135 enum machine_mode intermediate;
1137 /* Search for a mode to convert via. */
1138 for (intermediate = from_mode; intermediate != VOIDmode;
1139 intermediate = GET_MODE_WIDER_MODE (intermediate))
1140 if (((can_extend_p (to_mode, intermediate, unsignedp)
1141 != CODE_FOR_nothing)
1142 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1143 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1144 && (can_extend_p (intermediate, from_mode, unsignedp)
1145 != CODE_FOR_nothing))
1147 convert_move (to, convert_to_mode (intermediate, from,
1148 unsignedp), unsignedp);
1149 return;
1152 /* No suitable intermediate mode. */
1153 abort ();
1157 /* Support special truncate insns for certain modes. */
1159 if (from_mode == DImode && to_mode == SImode)
1161 #ifdef HAVE_truncdisi2
1162 if (HAVE_truncdisi2)
1164 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1165 return;
1167 #endif
1168 convert_move (to, force_reg (from_mode, from), unsignedp);
1169 return;
1172 if (from_mode == DImode && to_mode == HImode)
1174 #ifdef HAVE_truncdihi2
1175 if (HAVE_truncdihi2)
1177 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1178 return;
1180 #endif
1181 convert_move (to, force_reg (from_mode, from), unsignedp);
1182 return;
1185 if (from_mode == DImode && to_mode == QImode)
1187 #ifdef HAVE_truncdiqi2
1188 if (HAVE_truncdiqi2)
1190 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1191 return;
1193 #endif
1194 convert_move (to, force_reg (from_mode, from), unsignedp);
1195 return;
1198 if (from_mode == SImode && to_mode == HImode)
1200 #ifdef HAVE_truncsihi2
1201 if (HAVE_truncsihi2)
1203 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1204 return;
1206 #endif
1207 convert_move (to, force_reg (from_mode, from), unsignedp);
1208 return;
1211 if (from_mode == SImode && to_mode == QImode)
1213 #ifdef HAVE_truncsiqi2
1214 if (HAVE_truncsiqi2)
1216 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1217 return;
1219 #endif
1220 convert_move (to, force_reg (from_mode, from), unsignedp);
1221 return;
1224 if (from_mode == HImode && to_mode == QImode)
1226 #ifdef HAVE_trunchiqi2
1227 if (HAVE_trunchiqi2)
1229 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1230 return;
1232 #endif
1233 convert_move (to, force_reg (from_mode, from), unsignedp);
1234 return;
1237 if (from_mode == TImode && to_mode == DImode)
1239 #ifdef HAVE_trunctidi2
1240 if (HAVE_trunctidi2)
1242 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1243 return;
1245 #endif
1246 convert_move (to, force_reg (from_mode, from), unsignedp);
1247 return;
1250 if (from_mode == TImode && to_mode == SImode)
1252 #ifdef HAVE_trunctisi2
1253 if (HAVE_trunctisi2)
1255 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1256 return;
1258 #endif
1259 convert_move (to, force_reg (from_mode, from), unsignedp);
1260 return;
1263 if (from_mode == TImode && to_mode == HImode)
1265 #ifdef HAVE_trunctihi2
1266 if (HAVE_trunctihi2)
1268 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1269 return;
1271 #endif
1272 convert_move (to, force_reg (from_mode, from), unsignedp);
1273 return;
1276 if (from_mode == TImode && to_mode == QImode)
1278 #ifdef HAVE_trunctiqi2
1279 if (HAVE_trunctiqi2)
1281 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1282 return;
1284 #endif
1285 convert_move (to, force_reg (from_mode, from), unsignedp);
1286 return;
1289 /* Handle truncation of volatile memrefs, and so on;
1290 the things that couldn't be truncated directly,
1291 and for which there was no special instruction. */
1292 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1294 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1295 emit_move_insn (to, temp);
1296 return;
1299 /* Mode combination is not recognized. */
1300 abort ();
1303 /* Return an rtx for a value that would result
1304 from converting X to mode MODE.
1305 Both X and MODE may be floating, or both integer.
1306 UNSIGNEDP is nonzero if X is an unsigned value.
1307 This can be done by referring to a part of X in place
1308 or by copying to a new temporary with conversion.
1310 This function *must not* call protect_from_queue
1311 except when putting X into an insn (in which case convert_move does it). */
1314 convert_to_mode (mode, x, unsignedp)
1315 enum machine_mode mode;
1316 rtx x;
1317 int unsignedp;
1319 return convert_modes (mode, VOIDmode, x, unsignedp);
1322 /* Return an rtx for a value that would result
1323 from converting X from mode OLDMODE to mode MODE.
1324 Both modes may be floating, or both integer.
1325 UNSIGNEDP is nonzero if X is an unsigned value.
1327 This can be done by referring to a part of X in place
1328 or by copying to a new temporary with conversion.
1330 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1332 This function *must not* call protect_from_queue
1333 except when putting X into an insn (in which case convert_move does it). */
1336 convert_modes (mode, oldmode, x, unsignedp)
1337 enum machine_mode mode, oldmode;
1338 rtx x;
1339 int unsignedp;
1341 register rtx temp;
1343 /* If FROM is a SUBREG that indicates that we have already done at least
1344 the required extension, strip it. */
1346 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1347 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1348 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1349 x = gen_lowpart (mode, x);
1351 if (GET_MODE (x) != VOIDmode)
1352 oldmode = GET_MODE (x);
1354 if (mode == oldmode)
1355 return x;
1357 /* There is one case that we must handle specially: If we are converting
1358 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1359 we are to interpret the constant as unsigned, gen_lowpart will do
1360 the wrong if the constant appears negative. What we want to do is
1361 make the high-order word of the constant zero, not all ones. */
1363 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1364 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1365 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1367 HOST_WIDE_INT val = INTVAL (x);
1369 if (oldmode != VOIDmode
1370 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1372 int width = GET_MODE_BITSIZE (oldmode);
1374 /* We need to zero extend VAL. */
1375 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1378 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1381 /* We can do this with a gen_lowpart if both desired and current modes
1382 are integer, and this is either a constant integer, a register, or a
1383 non-volatile MEM. Except for the constant case where MODE is no
1384 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1386 if ((GET_CODE (x) == CONST_INT
1387 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1388 || (GET_MODE_CLASS (mode) == MODE_INT
1389 && GET_MODE_CLASS (oldmode) == MODE_INT
1390 && (GET_CODE (x) == CONST_DOUBLE
1391 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1392 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1393 && direct_load[(int) mode])
1394 || (GET_CODE (x) == REG
1395 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1396 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1398 /* ?? If we don't know OLDMODE, we have to assume here that
1399 X does not need sign- or zero-extension. This may not be
1400 the case, but it's the best we can do. */
1401 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1402 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1404 HOST_WIDE_INT val = INTVAL (x);
1405 int width = GET_MODE_BITSIZE (oldmode);
1407 /* We must sign or zero-extend in this case. Start by
1408 zero-extending, then sign extend if we need to. */
1409 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1410 if (! unsignedp
1411 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1412 val |= (HOST_WIDE_INT) (-1) << width;
1414 return GEN_INT (val);
1417 return gen_lowpart (mode, x);
1420 temp = gen_reg_rtx (mode);
1421 convert_move (temp, x, unsignedp);
1422 return temp;
1425 /* Generate several move instructions to copy LEN bytes
1426 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1427 The caller must pass FROM and TO
1428 through protect_from_queue before calling.
1429 ALIGN (in bytes) is maximum alignment we can assume. */
1431 static void
1432 move_by_pieces (to, from, len, align)
1433 rtx to, from;
1434 int len, align;
1436 struct move_by_pieces data;
1437 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1438 int max_size = MOVE_MAX + 1;
1440 data.offset = 0;
1441 data.to_addr = to_addr;
1442 data.from_addr = from_addr;
1443 data.to = to;
1444 data.from = from;
1445 data.autinc_to
1446 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1447 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1448 data.autinc_from
1449 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1450 || GET_CODE (from_addr) == POST_INC
1451 || GET_CODE (from_addr) == POST_DEC);
1453 data.explicit_inc_from = 0;
1454 data.explicit_inc_to = 0;
1455 data.reverse
1456 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1457 if (data.reverse) data.offset = len;
1458 data.len = len;
1460 data.to_struct = MEM_IN_STRUCT_P (to);
1461 data.from_struct = MEM_IN_STRUCT_P (from);
1462 data.to_readonly = RTX_UNCHANGING_P (to);
1463 data.from_readonly = RTX_UNCHANGING_P (from);
1465 /* If copying requires more than two move insns,
1466 copy addresses to registers (to make displacements shorter)
1467 and use post-increment if available. */
1468 if (!(data.autinc_from && data.autinc_to)
1469 && move_by_pieces_ninsns (len, align) > 2)
1471 #ifdef HAVE_PRE_DECREMENT
1472 if (data.reverse && ! data.autinc_from)
1474 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1475 data.autinc_from = 1;
1476 data.explicit_inc_from = -1;
1478 #endif
1479 #ifdef HAVE_POST_INCREMENT
1480 if (! data.autinc_from)
1482 data.from_addr = copy_addr_to_reg (from_addr);
1483 data.autinc_from = 1;
1484 data.explicit_inc_from = 1;
1486 #endif
1487 if (!data.autinc_from && CONSTANT_P (from_addr))
1488 data.from_addr = copy_addr_to_reg (from_addr);
1489 #ifdef HAVE_PRE_DECREMENT
1490 if (data.reverse && ! data.autinc_to)
1492 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1493 data.autinc_to = 1;
1494 data.explicit_inc_to = -1;
1496 #endif
1497 #ifdef HAVE_POST_INCREMENT
1498 if (! data.reverse && ! data.autinc_to)
1500 data.to_addr = copy_addr_to_reg (to_addr);
1501 data.autinc_to = 1;
1502 data.explicit_inc_to = 1;
1504 #endif
1505 if (!data.autinc_to && CONSTANT_P (to_addr))
1506 data.to_addr = copy_addr_to_reg (to_addr);
1509 if (! SLOW_UNALIGNED_ACCESS
1510 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1511 align = MOVE_MAX;
1513 /* First move what we can in the largest integer mode, then go to
1514 successively smaller modes. */
1516 while (max_size > 1)
1518 enum machine_mode mode = VOIDmode, tmode;
1519 enum insn_code icode;
1521 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1522 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1523 if (GET_MODE_SIZE (tmode) < max_size)
1524 mode = tmode;
1526 if (mode == VOIDmode)
1527 break;
1529 icode = mov_optab->handlers[(int) mode].insn_code;
1530 if (icode != CODE_FOR_nothing
1531 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1532 GET_MODE_SIZE (mode)))
1533 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1535 max_size = GET_MODE_SIZE (mode);
1538 /* The code above should have handled everything. */
1539 if (data.len > 0)
1540 abort ();
1543 /* Return number of insns required to move L bytes by pieces.
1544 ALIGN (in bytes) is maximum alignment we can assume. */
1546 static int
1547 move_by_pieces_ninsns (l, align)
1548 unsigned int l;
1549 int align;
1551 register int n_insns = 0;
1552 int max_size = MOVE_MAX + 1;
1554 if (! SLOW_UNALIGNED_ACCESS
1555 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1556 align = MOVE_MAX;
1558 while (max_size > 1)
1560 enum machine_mode mode = VOIDmode, tmode;
1561 enum insn_code icode;
1563 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1564 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1565 if (GET_MODE_SIZE (tmode) < max_size)
1566 mode = tmode;
1568 if (mode == VOIDmode)
1569 break;
1571 icode = mov_optab->handlers[(int) mode].insn_code;
1572 if (icode != CODE_FOR_nothing
1573 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1574 GET_MODE_SIZE (mode)))
1575 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1577 max_size = GET_MODE_SIZE (mode);
1580 return n_insns;
1583 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1584 with move instructions for mode MODE. GENFUN is the gen_... function
1585 to make a move insn for that mode. DATA has all the other info. */
1587 static void
1588 move_by_pieces_1 (genfun, mode, data)
1589 rtx (*genfun) PVPROTO((rtx, ...));
1590 enum machine_mode mode;
1591 struct move_by_pieces *data;
1593 register int size = GET_MODE_SIZE (mode);
1594 register rtx to1, from1;
1596 while (data->len >= size)
1598 if (data->reverse) data->offset -= size;
1600 to1 = (data->autinc_to
1601 ? gen_rtx_MEM (mode, data->to_addr)
1602 : copy_rtx (change_address (data->to, mode,
1603 plus_constant (data->to_addr,
1604 data->offset))));
1605 MEM_IN_STRUCT_P (to1) = data->to_struct;
1606 RTX_UNCHANGING_P (to1) = data->to_readonly;
1608 from1
1609 = (data->autinc_from
1610 ? gen_rtx_MEM (mode, data->from_addr)
1611 : copy_rtx (change_address (data->from, mode,
1612 plus_constant (data->from_addr,
1613 data->offset))));
1614 MEM_IN_STRUCT_P (from1) = data->from_struct;
1615 RTX_UNCHANGING_P (from1) = data->from_readonly;
1617 #ifdef HAVE_PRE_DECREMENT
1618 if (data->explicit_inc_to < 0)
1619 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1620 if (data->explicit_inc_from < 0)
1621 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1622 #endif
1624 emit_insn ((*genfun) (to1, from1));
1625 #ifdef HAVE_POST_INCREMENT
1626 if (data->explicit_inc_to > 0)
1627 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1628 if (data->explicit_inc_from > 0)
1629 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1630 #endif
1632 if (! data->reverse) data->offset += size;
1634 data->len -= size;
1638 /* Emit code to move a block Y to a block X.
1639 This may be done with string-move instructions,
1640 with multiple scalar move instructions, or with a library call.
1642 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1643 with mode BLKmode.
1644 SIZE is an rtx that says how long they are.
1645 ALIGN is the maximum alignment we can assume they have,
1646 measured in bytes.
1648 Return the address of the new block, if memcpy is called and returns it,
1649 0 otherwise. */
1652 emit_block_move (x, y, size, align)
1653 rtx x, y;
1654 rtx size;
1655 int align;
1657 rtx retval = 0;
1659 if (GET_MODE (x) != BLKmode)
1660 abort ();
1662 if (GET_MODE (y) != BLKmode)
1663 abort ();
1665 x = protect_from_queue (x, 1);
1666 y = protect_from_queue (y, 0);
1667 size = protect_from_queue (size, 0);
1669 if (GET_CODE (x) != MEM)
1670 abort ();
1671 if (GET_CODE (y) != MEM)
1672 abort ();
1673 if (size == 0)
1674 abort ();
1676 if (GET_CODE (size) == CONST_INT
1677 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1678 move_by_pieces (x, y, INTVAL (size), align);
1679 else
1681 /* Try the most limited insn first, because there's no point
1682 including more than one in the machine description unless
1683 the more limited one has some advantage. */
1685 rtx opalign = GEN_INT (align);
1686 enum machine_mode mode;
1688 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1689 mode = GET_MODE_WIDER_MODE (mode))
1691 enum insn_code code = movstr_optab[(int) mode];
1693 if (code != CODE_FOR_nothing
1694 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1695 here because if SIZE is less than the mode mask, as it is
1696 returned by the macro, it will definitely be less than the
1697 actual mode mask. */
1698 && ((GET_CODE (size) == CONST_INT
1699 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1700 <= (GET_MODE_MASK (mode) >> 1)))
1701 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1702 && (insn_operand_predicate[(int) code][0] == 0
1703 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1704 && (insn_operand_predicate[(int) code][1] == 0
1705 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1706 && (insn_operand_predicate[(int) code][3] == 0
1707 || (*insn_operand_predicate[(int) code][3]) (opalign,
1708 VOIDmode)))
1710 rtx op2;
1711 rtx last = get_last_insn ();
1712 rtx pat;
1714 op2 = convert_to_mode (mode, size, 1);
1715 if (insn_operand_predicate[(int) code][2] != 0
1716 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1717 op2 = copy_to_mode_reg (mode, op2);
1719 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1720 if (pat)
1722 emit_insn (pat);
1723 return 0;
1725 else
1726 delete_insns_since (last);
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 retval
1732 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1733 ptr_mode, 3, XEXP (x, 0), Pmode,
1734 XEXP (y, 0), Pmode,
1735 convert_to_mode (TYPE_MODE (sizetype), size,
1736 TREE_UNSIGNED (sizetype)),
1737 TYPE_MODE (sizetype));
1738 #else
1739 emit_library_call (bcopy_libfunc, 0,
1740 VOIDmode, 3, XEXP (y, 0), Pmode,
1741 XEXP (x, 0), Pmode,
1742 convert_to_mode (TYPE_MODE (integer_type_node), size,
1743 TREE_UNSIGNED (integer_type_node)),
1744 TYPE_MODE (integer_type_node));
1745 #endif
1748 return retval;
1751 /* Copy all or part of a value X into registers starting at REGNO.
1752 The number of registers to be filled is NREGS. */
1754 void
1755 move_block_to_reg (regno, x, nregs, mode)
1756 int regno;
1757 rtx x;
1758 int nregs;
1759 enum machine_mode mode;
1761 int i;
1762 rtx pat, last;
1764 if (nregs == 0)
1765 return;
1767 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1768 x = validize_mem (force_const_mem (mode, x));
1770 /* See if the machine can do this with a load multiple insn. */
1771 #ifdef HAVE_load_multiple
1772 if (HAVE_load_multiple)
1774 last = get_last_insn ();
1775 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1776 GEN_INT (nregs));
1777 if (pat)
1779 emit_insn (pat);
1780 return;
1782 else
1783 delete_insns_since (last);
1785 #endif
1787 for (i = 0; i < nregs; i++)
1788 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1789 operand_subword_force (x, i, mode));
1792 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1793 The number of registers to be filled is NREGS. SIZE indicates the number
1794 of bytes in the object X. */
1797 void
1798 move_block_from_reg (regno, x, nregs, size)
1799 int regno;
1800 rtx x;
1801 int nregs;
1802 int size;
1804 int i;
1805 rtx pat, last;
1806 enum machine_mode mode;
1808 /* If SIZE is that of a mode no bigger than a word, just use that
1809 mode's store operation. */
1810 if (size <= UNITS_PER_WORD
1811 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1813 emit_move_insn (change_address (x, mode, NULL),
1814 gen_rtx_REG (mode, regno));
1815 return;
1818 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1819 to the left before storing to memory. Note that the previous test
1820 doesn't handle all cases (e.g. SIZE == 3). */
1821 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1823 rtx tem = operand_subword (x, 0, 1, BLKmode);
1824 rtx shift;
1826 if (tem == 0)
1827 abort ();
1829 shift = expand_shift (LSHIFT_EXPR, word_mode,
1830 gen_rtx_REG (word_mode, regno),
1831 build_int_2 ((UNITS_PER_WORD - size)
1832 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1833 emit_move_insn (tem, shift);
1834 return;
1837 /* See if the machine can do this with a store multiple insn. */
1838 #ifdef HAVE_store_multiple
1839 if (HAVE_store_multiple)
1841 last = get_last_insn ();
1842 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1843 GEN_INT (nregs));
1844 if (pat)
1846 emit_insn (pat);
1847 return;
1849 else
1850 delete_insns_since (last);
1852 #endif
1854 for (i = 0; i < nregs; i++)
1856 rtx tem = operand_subword (x, i, 1, BLKmode);
1858 if (tem == 0)
1859 abort ();
1861 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1865 /* Emit code to move a block Y to a block X, where X is non-consecutive
1866 registers represented by a PARALLEL. */
1868 void
1869 emit_group_load (x, y)
1870 rtx x, y;
1872 rtx target_reg, source;
1873 int i;
1875 if (GET_CODE (x) != PARALLEL)
1876 abort ();
1878 /* Check for a NULL entry, used to indicate that the parameter goes
1879 both on the stack and in registers. */
1880 if (XEXP (XVECEXP (x, 0, 0), 0))
1881 i = 0;
1882 else
1883 i = 1;
1885 for (; i < XVECLEN (x, 0); i++)
1887 rtx element = XVECEXP (x, 0, i);
1889 target_reg = XEXP (element, 0);
1891 if (GET_CODE (y) == MEM)
1892 source = change_address (y, GET_MODE (target_reg),
1893 plus_constant (XEXP (y, 0),
1894 INTVAL (XEXP (element, 1))));
1895 else if (XEXP (element, 1) == const0_rtx)
1897 if (GET_MODE (target_reg) == GET_MODE (y))
1898 source = y;
1899 /* Allow for the target_reg to be smaller than the input register
1900 to allow for AIX with 4 DF arguments after a single SI arg. The
1901 last DF argument will only load 1 word into the integer registers,
1902 but load a DF value into the float registers. */
1903 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1904 <= GET_MODE_SIZE (GET_MODE (y)))
1905 && GET_MODE (target_reg) == word_mode)
1906 /* This might be a const_double, so we can't just use SUBREG. */
1907 source = operand_subword (y, 0, 0, VOIDmode);
1908 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1909 == GET_MODE_SIZE (GET_MODE (y)))
1910 source = gen_lowpart (GET_MODE (target_reg), y);
1911 else
1912 abort ();
1914 else
1915 abort ();
1917 emit_move_insn (target_reg, source);
1921 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1922 registers represented by a PARALLEL. */
1924 void
1925 emit_group_store (x, y)
1926 rtx x, y;
1928 rtx source_reg, target;
1929 int i;
1931 if (GET_CODE (y) != PARALLEL)
1932 abort ();
1934 /* Check for a NULL entry, used to indicate that the parameter goes
1935 both on the stack and in registers. */
1936 if (XEXP (XVECEXP (y, 0, 0), 0))
1937 i = 0;
1938 else
1939 i = 1;
1941 for (; i < XVECLEN (y, 0); i++)
1943 rtx element = XVECEXP (y, 0, i);
1945 source_reg = XEXP (element, 0);
1947 if (GET_CODE (x) == MEM)
1948 target = change_address (x, GET_MODE (source_reg),
1949 plus_constant (XEXP (x, 0),
1950 INTVAL (XEXP (element, 1))));
1951 else if (XEXP (element, 1) == const0_rtx)
1953 target = x;
1954 if (GET_MODE (target) != GET_MODE (source_reg))
1955 target = gen_lowpart (GET_MODE (source_reg), target);
1957 else
1958 abort ();
1960 emit_move_insn (target, source_reg);
1964 /* Add a USE expression for REG to the (possibly empty) list pointed
1965 to by CALL_FUSAGE. REG must denote a hard register. */
1967 void
1968 use_reg (call_fusage, reg)
1969 rtx *call_fusage, reg;
1971 if (GET_CODE (reg) != REG
1972 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1973 abort();
1975 *call_fusage
1976 = gen_rtx_EXPR_LIST (VOIDmode,
1977 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1980 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1981 starting at REGNO. All of these registers must be hard registers. */
1983 void
1984 use_regs (call_fusage, regno, nregs)
1985 rtx *call_fusage;
1986 int regno;
1987 int nregs;
1989 int i;
1991 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1992 abort ();
1994 for (i = 0; i < nregs; i++)
1995 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
1998 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1999 PARALLEL REGS. This is for calls that pass values in multiple
2000 non-contiguous locations. The Irix 6 ABI has examples of this. */
2002 void
2003 use_group_regs (call_fusage, regs)
2004 rtx *call_fusage;
2005 rtx regs;
2007 int i;
2009 for (i = 0; i < XVECLEN (regs, 0); i++)
2011 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2013 /* A NULL entry means the parameter goes both on the stack and in
2014 registers. This can also be a MEM for targets that pass values
2015 partially on the stack and partially in registers. */
2016 if (reg != 0 && GET_CODE (reg) == REG)
2017 use_reg (call_fusage, reg);
2021 /* Generate several move instructions to clear LEN bytes of block TO.
2022 (A MEM rtx with BLKmode). The caller must pass TO through
2023 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2024 we can assume. */
2026 static void
2027 clear_by_pieces (to, len, align)
2028 rtx to;
2029 int len, align;
2031 struct clear_by_pieces data;
2032 rtx to_addr = XEXP (to, 0);
2033 int max_size = MOVE_MAX + 1;
2035 data.offset = 0;
2036 data.to_addr = to_addr;
2037 data.to = to;
2038 data.autinc_to
2039 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2040 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2042 data.explicit_inc_to = 0;
2043 data.reverse
2044 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2045 if (data.reverse) data.offset = len;
2046 data.len = len;
2048 data.to_struct = MEM_IN_STRUCT_P (to);
2050 /* If copying requires more than two move insns,
2051 copy addresses to registers (to make displacements shorter)
2052 and use post-increment if available. */
2053 if (!data.autinc_to
2054 && move_by_pieces_ninsns (len, align) > 2)
2056 #ifdef HAVE_PRE_DECREMENT
2057 if (data.reverse && ! data.autinc_to)
2059 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2060 data.autinc_to = 1;
2061 data.explicit_inc_to = -1;
2063 #endif
2064 #ifdef HAVE_POST_INCREMENT
2065 if (! data.reverse && ! data.autinc_to)
2067 data.to_addr = copy_addr_to_reg (to_addr);
2068 data.autinc_to = 1;
2069 data.explicit_inc_to = 1;
2071 #endif
2072 if (!data.autinc_to && CONSTANT_P (to_addr))
2073 data.to_addr = copy_addr_to_reg (to_addr);
2076 if (! SLOW_UNALIGNED_ACCESS
2077 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2078 align = MOVE_MAX;
2080 /* First move what we can in the largest integer mode, then go to
2081 successively smaller modes. */
2083 while (max_size > 1)
2085 enum machine_mode mode = VOIDmode, tmode;
2086 enum insn_code icode;
2088 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2089 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2090 if (GET_MODE_SIZE (tmode) < max_size)
2091 mode = tmode;
2093 if (mode == VOIDmode)
2094 break;
2096 icode = mov_optab->handlers[(int) mode].insn_code;
2097 if (icode != CODE_FOR_nothing
2098 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2099 GET_MODE_SIZE (mode)))
2100 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2102 max_size = GET_MODE_SIZE (mode);
2105 /* The code above should have handled everything. */
2106 if (data.len != 0)
2107 abort ();
2110 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2111 with move instructions for mode MODE. GENFUN is the gen_... function
2112 to make a move insn for that mode. DATA has all the other info. */
2114 static void
2115 clear_by_pieces_1 (genfun, mode, data)
2116 rtx (*genfun) PVPROTO((rtx, ...));
2117 enum machine_mode mode;
2118 struct clear_by_pieces *data;
2120 register int size = GET_MODE_SIZE (mode);
2121 register rtx to1;
2123 while (data->len >= size)
2125 if (data->reverse) data->offset -= size;
2127 to1 = (data->autinc_to
2128 ? gen_rtx_MEM (mode, data->to_addr)
2129 : copy_rtx (change_address (data->to, mode,
2130 plus_constant (data->to_addr,
2131 data->offset))));
2132 MEM_IN_STRUCT_P (to1) = data->to_struct;
2134 #ifdef HAVE_PRE_DECREMENT
2135 if (data->explicit_inc_to < 0)
2136 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2137 #endif
2139 emit_insn ((*genfun) (to1, const0_rtx));
2140 #ifdef HAVE_POST_INCREMENT
2141 if (data->explicit_inc_to > 0)
2142 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2143 #endif
2145 if (! data->reverse) data->offset += size;
2147 data->len -= size;
2151 /* Write zeros through the storage of OBJECT.
2152 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2153 the maximum alignment we can is has, measured in bytes.
2155 If we call a function that returns the length of the block, return it. */
2158 clear_storage (object, size, align)
2159 rtx object;
2160 rtx size;
2161 int align;
2163 rtx retval = 0;
2165 if (GET_MODE (object) == BLKmode)
2167 object = protect_from_queue (object, 1);
2168 size = protect_from_queue (size, 0);
2170 if (GET_CODE (size) == CONST_INT
2171 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2172 clear_by_pieces (object, INTVAL (size), align);
2174 else
2176 /* Try the most limited insn first, because there's no point
2177 including more than one in the machine description unless
2178 the more limited one has some advantage. */
2180 rtx opalign = GEN_INT (align);
2181 enum machine_mode mode;
2183 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2184 mode = GET_MODE_WIDER_MODE (mode))
2186 enum insn_code code = clrstr_optab[(int) mode];
2188 if (code != CODE_FOR_nothing
2189 /* We don't need MODE to be narrower than
2190 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2191 the mode mask, as it is returned by the macro, it will
2192 definitely be less than the actual mode mask. */
2193 && ((GET_CODE (size) == CONST_INT
2194 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2195 <= (GET_MODE_MASK (mode) >> 1)))
2196 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2197 && (insn_operand_predicate[(int) code][0] == 0
2198 || (*insn_operand_predicate[(int) code][0]) (object,
2199 BLKmode))
2200 && (insn_operand_predicate[(int) code][2] == 0
2201 || (*insn_operand_predicate[(int) code][2]) (opalign,
2202 VOIDmode)))
2204 rtx op1;
2205 rtx last = get_last_insn ();
2206 rtx pat;
2208 op1 = convert_to_mode (mode, size, 1);
2209 if (insn_operand_predicate[(int) code][1] != 0
2210 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2211 mode))
2212 op1 = copy_to_mode_reg (mode, op1);
2214 pat = GEN_FCN ((int) code) (object, op1, opalign);
2215 if (pat)
2217 emit_insn (pat);
2218 return 0;
2220 else
2221 delete_insns_since (last);
2226 #ifdef TARGET_MEM_FUNCTIONS
2227 retval
2228 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2229 ptr_mode, 3,
2230 XEXP (object, 0), Pmode,
2231 const0_rtx,
2232 TYPE_MODE (integer_type_node),
2233 convert_to_mode
2234 (TYPE_MODE (sizetype), size,
2235 TREE_UNSIGNED (sizetype)),
2236 TYPE_MODE (sizetype));
2237 #else
2238 emit_library_call (bzero_libfunc, 0,
2239 VOIDmode, 2,
2240 XEXP (object, 0), Pmode,
2241 convert_to_mode
2242 (TYPE_MODE (integer_type_node), size,
2243 TREE_UNSIGNED (integer_type_node)),
2244 TYPE_MODE (integer_type_node));
2245 #endif
2248 else
2249 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2251 return retval;
2254 /* Generate code to copy Y into X.
2255 Both Y and X must have the same mode, except that
2256 Y can be a constant with VOIDmode.
2257 This mode cannot be BLKmode; use emit_block_move for that.
2259 Return the last instruction emitted. */
2262 emit_move_insn (x, y)
2263 rtx x, y;
2265 enum machine_mode mode = GET_MODE (x);
2267 x = protect_from_queue (x, 1);
2268 y = protect_from_queue (y, 0);
2270 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2271 abort ();
2273 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2274 y = force_const_mem (mode, y);
2276 /* If X or Y are memory references, verify that their addresses are valid
2277 for the machine. */
2278 if (GET_CODE (x) == MEM
2279 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2280 && ! push_operand (x, GET_MODE (x)))
2281 || (flag_force_addr
2282 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2283 x = change_address (x, VOIDmode, XEXP (x, 0));
2285 if (GET_CODE (y) == MEM
2286 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2287 || (flag_force_addr
2288 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2289 y = change_address (y, VOIDmode, XEXP (y, 0));
2291 if (mode == BLKmode)
2292 abort ();
2294 return emit_move_insn_1 (x, y);
2297 /* Low level part of emit_move_insn.
2298 Called just like emit_move_insn, but assumes X and Y
2299 are basically valid. */
2302 emit_move_insn_1 (x, y)
2303 rtx x, y;
2305 enum machine_mode mode = GET_MODE (x);
2306 enum machine_mode submode;
2307 enum mode_class class = GET_MODE_CLASS (mode);
2308 int i;
2310 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2311 return
2312 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2314 /* Expand complex moves by moving real part and imag part, if possible. */
2315 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2316 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2317 * BITS_PER_UNIT),
2318 (class == MODE_COMPLEX_INT
2319 ? MODE_INT : MODE_FLOAT),
2321 && (mov_optab->handlers[(int) submode].insn_code
2322 != CODE_FOR_nothing))
2324 /* Don't split destination if it is a stack push. */
2325 int stack = push_operand (x, GET_MODE (x));
2326 rtx insns;
2328 /* If this is a stack, push the highpart first, so it
2329 will be in the argument order.
2331 In that case, change_address is used only to convert
2332 the mode, not to change the address. */
2333 if (stack)
2335 /* Note that the real part always precedes the imag part in memory
2336 regardless of machine's endianness. */
2337 #ifdef STACK_GROWS_DOWNWARD
2338 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2339 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2340 gen_imagpart (submode, y)));
2341 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2342 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2343 gen_realpart (submode, y)));
2344 #else
2345 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2346 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2347 gen_realpart (submode, y)));
2348 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2349 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2350 gen_imagpart (submode, y)));
2351 #endif
2353 else
2355 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2356 (gen_realpart (submode, x), gen_realpart (submode, y)));
2357 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2358 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2361 return get_last_insn ();
2364 /* This will handle any multi-word mode that lacks a move_insn pattern.
2365 However, you will get better code if you define such patterns,
2366 even if they must turn into multiple assembler instructions. */
2367 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2369 rtx last_insn = 0;
2370 rtx insns;
2372 #ifdef PUSH_ROUNDING
2374 /* If X is a push on the stack, do the push now and replace
2375 X with a reference to the stack pointer. */
2376 if (push_operand (x, GET_MODE (x)))
2378 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2379 x = change_address (x, VOIDmode, stack_pointer_rtx);
2381 #endif
2383 /* Show the output dies here. */
2384 if (x != y)
2385 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2387 for (i = 0;
2388 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2389 i++)
2391 rtx xpart = operand_subword (x, i, 1, mode);
2392 rtx ypart = operand_subword (y, i, 1, mode);
2394 /* If we can't get a part of Y, put Y into memory if it is a
2395 constant. Otherwise, force it into a register. If we still
2396 can't get a part of Y, abort. */
2397 if (ypart == 0 && CONSTANT_P (y))
2399 y = force_const_mem (mode, y);
2400 ypart = operand_subword (y, i, 1, mode);
2402 else if (ypart == 0)
2403 ypart = operand_subword_force (y, i, mode);
2405 if (xpart == 0 || ypart == 0)
2406 abort ();
2408 last_insn = emit_move_insn (xpart, ypart);
2411 return last_insn;
2413 else
2414 abort ();
2417 /* Pushing data onto the stack. */
2419 /* Push a block of length SIZE (perhaps variable)
2420 and return an rtx to address the beginning of the block.
2421 Note that it is not possible for the value returned to be a QUEUED.
2422 The value may be virtual_outgoing_args_rtx.
2424 EXTRA is the number of bytes of padding to push in addition to SIZE.
2425 BELOW nonzero means this padding comes at low addresses;
2426 otherwise, the padding comes at high addresses. */
2429 push_block (size, extra, below)
2430 rtx size;
2431 int extra, below;
2433 register rtx temp;
2435 size = convert_modes (Pmode, ptr_mode, size, 1);
2436 if (CONSTANT_P (size))
2437 anti_adjust_stack (plus_constant (size, extra));
2438 else if (GET_CODE (size) == REG && extra == 0)
2439 anti_adjust_stack (size);
2440 else
2442 rtx temp = copy_to_mode_reg (Pmode, size);
2443 if (extra != 0)
2444 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2445 temp, 0, OPTAB_LIB_WIDEN);
2446 anti_adjust_stack (temp);
2449 #ifdef STACK_GROWS_DOWNWARD
2450 temp = virtual_outgoing_args_rtx;
2451 if (extra != 0 && below)
2452 temp = plus_constant (temp, extra);
2453 #else
2454 if (GET_CODE (size) == CONST_INT)
2455 temp = plus_constant (virtual_outgoing_args_rtx,
2456 - INTVAL (size) - (below ? 0 : extra));
2457 else if (extra != 0 && !below)
2458 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2459 negate_rtx (Pmode, plus_constant (size, extra)));
2460 else
2461 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2462 negate_rtx (Pmode, size));
2463 #endif
2465 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2469 gen_push_operand ()
2471 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2474 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2475 block of SIZE bytes. */
2477 static rtx
2478 get_push_address (size)
2479 int size;
2481 register rtx temp;
2483 if (STACK_PUSH_CODE == POST_DEC)
2484 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2485 else if (STACK_PUSH_CODE == POST_INC)
2486 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2487 else
2488 temp = stack_pointer_rtx;
2490 return copy_to_reg (temp);
2493 /* Generate code to push X onto the stack, assuming it has mode MODE and
2494 type TYPE.
2495 MODE is redundant except when X is a CONST_INT (since they don't
2496 carry mode info).
2497 SIZE is an rtx for the size of data to be copied (in bytes),
2498 needed only if X is BLKmode.
2500 ALIGN (in bytes) is maximum alignment we can assume.
2502 If PARTIAL and REG are both nonzero, then copy that many of the first
2503 words of X into registers starting with REG, and push the rest of X.
2504 The amount of space pushed is decreased by PARTIAL words,
2505 rounded *down* to a multiple of PARM_BOUNDARY.
2506 REG must be a hard register in this case.
2507 If REG is zero but PARTIAL is not, take any all others actions for an
2508 argument partially in registers, but do not actually load any
2509 registers.
2511 EXTRA is the amount in bytes of extra space to leave next to this arg.
2512 This is ignored if an argument block has already been allocated.
2514 On a machine that lacks real push insns, ARGS_ADDR is the address of
2515 the bottom of the argument block for this call. We use indexing off there
2516 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2517 argument block has not been preallocated.
2519 ARGS_SO_FAR is the size of args previously pushed for this call.
2521 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2522 for arguments passed in registers. If nonzero, it will be the number
2523 of bytes required. */
2525 void
2526 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2527 args_addr, args_so_far, reg_parm_stack_space)
2528 register rtx x;
2529 enum machine_mode mode;
2530 tree type;
2531 rtx size;
2532 int align;
2533 int partial;
2534 rtx reg;
2535 int extra;
2536 rtx args_addr;
2537 rtx args_so_far;
2538 int reg_parm_stack_space;
2540 rtx xinner;
2541 enum direction stack_direction
2542 #ifdef STACK_GROWS_DOWNWARD
2543 = downward;
2544 #else
2545 = upward;
2546 #endif
2548 /* Decide where to pad the argument: `downward' for below,
2549 `upward' for above, or `none' for don't pad it.
2550 Default is below for small data on big-endian machines; else above. */
2551 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2553 /* Invert direction if stack is post-update. */
2554 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2555 if (where_pad != none)
2556 where_pad = (where_pad == downward ? upward : downward);
2558 xinner = x = protect_from_queue (x, 0);
2560 if (mode == BLKmode)
2562 /* Copy a block into the stack, entirely or partially. */
2564 register rtx temp;
2565 int used = partial * UNITS_PER_WORD;
2566 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2567 int skip;
2569 if (size == 0)
2570 abort ();
2572 used -= offset;
2574 /* USED is now the # of bytes we need not copy to the stack
2575 because registers will take care of them. */
2577 if (partial != 0)
2578 xinner = change_address (xinner, BLKmode,
2579 plus_constant (XEXP (xinner, 0), used));
2581 /* If the partial register-part of the arg counts in its stack size,
2582 skip the part of stack space corresponding to the registers.
2583 Otherwise, start copying to the beginning of the stack space,
2584 by setting SKIP to 0. */
2585 skip = (reg_parm_stack_space == 0) ? 0 : used;
2587 #ifdef PUSH_ROUNDING
2588 /* Do it with several push insns if that doesn't take lots of insns
2589 and if there is no difficulty with push insns that skip bytes
2590 on the stack for alignment purposes. */
2591 if (args_addr == 0
2592 && GET_CODE (size) == CONST_INT
2593 && skip == 0
2594 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2595 < MOVE_RATIO)
2596 /* Here we avoid the case of a structure whose weak alignment
2597 forces many pushes of a small amount of data,
2598 and such small pushes do rounding that causes trouble. */
2599 && ((! SLOW_UNALIGNED_ACCESS)
2600 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2601 || PUSH_ROUNDING (align) == align)
2602 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2604 /* Push padding now if padding above and stack grows down,
2605 or if padding below and stack grows up.
2606 But if space already allocated, this has already been done. */
2607 if (extra && args_addr == 0
2608 && where_pad != none && where_pad != stack_direction)
2609 anti_adjust_stack (GEN_INT (extra));
2611 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2612 INTVAL (size) - used, align);
2614 if (current_function_check_memory_usage && ! in_check_memory_usage)
2616 rtx temp;
2618 in_check_memory_usage = 1;
2619 temp = get_push_address (INTVAL(size) - used);
2620 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2621 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2622 temp, ptr_mode,
2623 XEXP (xinner, 0), ptr_mode,
2624 GEN_INT (INTVAL(size) - used),
2625 TYPE_MODE (sizetype));
2626 else
2627 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2628 temp, ptr_mode,
2629 GEN_INT (INTVAL(size) - used),
2630 TYPE_MODE (sizetype),
2631 GEN_INT (MEMORY_USE_RW),
2632 TYPE_MODE (integer_type_node));
2633 in_check_memory_usage = 0;
2636 else
2637 #endif /* PUSH_ROUNDING */
2639 /* Otherwise make space on the stack and copy the data
2640 to the address of that space. */
2642 /* Deduct words put into registers from the size we must copy. */
2643 if (partial != 0)
2645 if (GET_CODE (size) == CONST_INT)
2646 size = GEN_INT (INTVAL (size) - used);
2647 else
2648 size = expand_binop (GET_MODE (size), sub_optab, size,
2649 GEN_INT (used), NULL_RTX, 0,
2650 OPTAB_LIB_WIDEN);
2653 /* Get the address of the stack space.
2654 In this case, we do not deal with EXTRA separately.
2655 A single stack adjust will do. */
2656 if (! args_addr)
2658 temp = push_block (size, extra, where_pad == downward);
2659 extra = 0;
2661 else if (GET_CODE (args_so_far) == CONST_INT)
2662 temp = memory_address (BLKmode,
2663 plus_constant (args_addr,
2664 skip + INTVAL (args_so_far)));
2665 else
2666 temp = memory_address (BLKmode,
2667 plus_constant (gen_rtx_PLUS (Pmode,
2668 args_addr,
2669 args_so_far),
2670 skip));
2671 if (current_function_check_memory_usage && ! in_check_memory_usage)
2673 rtx target;
2675 in_check_memory_usage = 1;
2676 target = copy_to_reg (temp);
2677 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2678 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2679 target, ptr_mode,
2680 XEXP (xinner, 0), ptr_mode,
2681 size, TYPE_MODE (sizetype));
2682 else
2683 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2684 target, ptr_mode,
2685 size, TYPE_MODE (sizetype),
2686 GEN_INT (MEMORY_USE_RW),
2687 TYPE_MODE (integer_type_node));
2688 in_check_memory_usage = 0;
2691 /* TEMP is the address of the block. Copy the data there. */
2692 if (GET_CODE (size) == CONST_INT
2693 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2694 < MOVE_RATIO))
2696 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2697 INTVAL (size), align);
2698 goto ret;
2700 else
2702 rtx opalign = GEN_INT (align);
2703 enum machine_mode mode;
2704 rtx target = gen_rtx_MEM (BLKmode, temp);
2706 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2707 mode != VOIDmode;
2708 mode = GET_MODE_WIDER_MODE (mode))
2710 enum insn_code code = movstr_optab[(int) mode];
2712 if (code != CODE_FOR_nothing
2713 && ((GET_CODE (size) == CONST_INT
2714 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2715 <= (GET_MODE_MASK (mode) >> 1)))
2716 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2717 && (insn_operand_predicate[(int) code][0] == 0
2718 || ((*insn_operand_predicate[(int) code][0])
2719 (target, BLKmode)))
2720 && (insn_operand_predicate[(int) code][1] == 0
2721 || ((*insn_operand_predicate[(int) code][1])
2722 (xinner, BLKmode)))
2723 && (insn_operand_predicate[(int) code][3] == 0
2724 || ((*insn_operand_predicate[(int) code][3])
2725 (opalign, VOIDmode))))
2727 rtx op2 = convert_to_mode (mode, size, 1);
2728 rtx last = get_last_insn ();
2729 rtx pat;
2731 if (insn_operand_predicate[(int) code][2] != 0
2732 && ! ((*insn_operand_predicate[(int) code][2])
2733 (op2, mode)))
2734 op2 = copy_to_mode_reg (mode, op2);
2736 pat = GEN_FCN ((int) code) (target, xinner,
2737 op2, opalign);
2738 if (pat)
2740 emit_insn (pat);
2741 goto ret;
2743 else
2744 delete_insns_since (last);
2749 #ifndef ACCUMULATE_OUTGOING_ARGS
2750 /* If the source is referenced relative to the stack pointer,
2751 copy it to another register to stabilize it. We do not need
2752 to do this if we know that we won't be changing sp. */
2754 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2755 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2756 temp = copy_to_reg (temp);
2757 #endif
2759 /* Make inhibit_defer_pop nonzero around the library call
2760 to force it to pop the bcopy-arguments right away. */
2761 NO_DEFER_POP;
2762 #ifdef TARGET_MEM_FUNCTIONS
2763 emit_library_call (memcpy_libfunc, 0,
2764 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2765 convert_to_mode (TYPE_MODE (sizetype),
2766 size, TREE_UNSIGNED (sizetype)),
2767 TYPE_MODE (sizetype));
2768 #else
2769 emit_library_call (bcopy_libfunc, 0,
2770 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2771 convert_to_mode (TYPE_MODE (integer_type_node),
2772 size,
2773 TREE_UNSIGNED (integer_type_node)),
2774 TYPE_MODE (integer_type_node));
2775 #endif
2776 OK_DEFER_POP;
2779 else if (partial > 0)
2781 /* Scalar partly in registers. */
2783 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2784 int i;
2785 int not_stack;
2786 /* # words of start of argument
2787 that we must make space for but need not store. */
2788 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2789 int args_offset = INTVAL (args_so_far);
2790 int skip;
2792 /* Push padding now if padding above and stack grows down,
2793 or if padding below and stack grows up.
2794 But if space already allocated, this has already been done. */
2795 if (extra && args_addr == 0
2796 && where_pad != none && where_pad != stack_direction)
2797 anti_adjust_stack (GEN_INT (extra));
2799 /* If we make space by pushing it, we might as well push
2800 the real data. Otherwise, we can leave OFFSET nonzero
2801 and leave the space uninitialized. */
2802 if (args_addr == 0)
2803 offset = 0;
2805 /* Now NOT_STACK gets the number of words that we don't need to
2806 allocate on the stack. */
2807 not_stack = partial - offset;
2809 /* If the partial register-part of the arg counts in its stack size,
2810 skip the part of stack space corresponding to the registers.
2811 Otherwise, start copying to the beginning of the stack space,
2812 by setting SKIP to 0. */
2813 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
2815 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2816 x = validize_mem (force_const_mem (mode, x));
2818 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2819 SUBREGs of such registers are not allowed. */
2820 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2821 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2822 x = copy_to_reg (x);
2824 /* Loop over all the words allocated on the stack for this arg. */
2825 /* We can do it by words, because any scalar bigger than a word
2826 has a size a multiple of a word. */
2827 #ifndef PUSH_ARGS_REVERSED
2828 for (i = not_stack; i < size; i++)
2829 #else
2830 for (i = size - 1; i >= not_stack; i--)
2831 #endif
2832 if (i >= not_stack + offset)
2833 emit_push_insn (operand_subword_force (x, i, mode),
2834 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2835 0, args_addr,
2836 GEN_INT (args_offset + ((i - not_stack + skip)
2837 * UNITS_PER_WORD)),
2838 reg_parm_stack_space);
2840 else
2842 rtx addr;
2843 rtx target = NULL_RTX;
2845 /* Push padding now if padding above and stack grows down,
2846 or if padding below and stack grows up.
2847 But if space already allocated, this has already been done. */
2848 if (extra && args_addr == 0
2849 && where_pad != none && where_pad != stack_direction)
2850 anti_adjust_stack (GEN_INT (extra));
2852 #ifdef PUSH_ROUNDING
2853 if (args_addr == 0)
2854 addr = gen_push_operand ();
2855 else
2856 #endif
2858 if (GET_CODE (args_so_far) == CONST_INT)
2859 addr
2860 = memory_address (mode,
2861 plus_constant (args_addr,
2862 INTVAL (args_so_far)));
2863 else
2864 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2865 args_so_far));
2866 target = addr;
2869 emit_move_insn (gen_rtx_MEM (mode, addr), x);
2871 if (current_function_check_memory_usage && ! in_check_memory_usage)
2873 in_check_memory_usage = 1;
2874 if (target == 0)
2875 target = get_push_address (GET_MODE_SIZE (mode));
2877 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2878 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2879 target, ptr_mode,
2880 XEXP (x, 0), ptr_mode,
2881 GEN_INT (GET_MODE_SIZE (mode)),
2882 TYPE_MODE (sizetype));
2883 else
2884 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2885 target, ptr_mode,
2886 GEN_INT (GET_MODE_SIZE (mode)),
2887 TYPE_MODE (sizetype),
2888 GEN_INT (MEMORY_USE_RW),
2889 TYPE_MODE (integer_type_node));
2890 in_check_memory_usage = 0;
2894 ret:
2895 /* If part should go in registers, copy that part
2896 into the appropriate registers. Do this now, at the end,
2897 since mem-to-mem copies above may do function calls. */
2898 if (partial > 0 && reg != 0)
2900 /* Handle calls that pass values in multiple non-contiguous locations.
2901 The Irix 6 ABI has examples of this. */
2902 if (GET_CODE (reg) == PARALLEL)
2903 emit_group_load (reg, x);
2904 else
2905 move_block_to_reg (REGNO (reg), x, partial, mode);
2908 if (extra && args_addr == 0 && where_pad == stack_direction)
2909 anti_adjust_stack (GEN_INT (extra));
2912 /* Expand an assignment that stores the value of FROM into TO.
2913 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2914 (This may contain a QUEUED rtx;
2915 if the value is constant, this rtx is a constant.)
2916 Otherwise, the returned value is NULL_RTX.
2918 SUGGEST_REG is no longer actually used.
2919 It used to mean, copy the value through a register
2920 and return that register, if that is possible.
2921 We now use WANT_VALUE to decide whether to do this. */
2924 expand_assignment (to, from, want_value, suggest_reg)
2925 tree to, from;
2926 int want_value;
2927 int suggest_reg;
2929 register rtx to_rtx = 0;
2930 rtx result;
2932 /* Don't crash if the lhs of the assignment was erroneous. */
2934 if (TREE_CODE (to) == ERROR_MARK)
2936 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2937 return want_value ? result : NULL_RTX;
2940 if (output_bytecode)
2942 tree dest_innermost;
2944 bc_expand_expr (from);
2945 bc_emit_instruction (duplicate);
2947 dest_innermost = bc_expand_address (to);
2949 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2950 take care of it here. */
2952 bc_store_memory (TREE_TYPE (to), dest_innermost);
2953 return NULL;
2956 /* Assignment of a structure component needs special treatment
2957 if the structure component's rtx is not simply a MEM.
2958 Assignment of an array element at a constant index, and assignment of
2959 an array element in an unaligned packed structure field, has the same
2960 problem. */
2962 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2963 || TREE_CODE (to) == ARRAY_REF)
2965 enum machine_mode mode1;
2966 int bitsize;
2967 int bitpos;
2968 tree offset;
2969 int unsignedp;
2970 int volatilep = 0;
2971 tree tem;
2972 int alignment;
2974 push_temp_slots ();
2975 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2976 &unsignedp, &volatilep, &alignment);
2978 /* If we are going to use store_bit_field and extract_bit_field,
2979 make sure to_rtx will be safe for multiple use. */
2981 if (mode1 == VOIDmode && want_value)
2982 tem = stabilize_reference (tem);
2984 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2985 if (offset != 0)
2987 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2989 if (GET_CODE (to_rtx) != MEM)
2990 abort ();
2991 to_rtx = change_address (to_rtx, VOIDmode,
2992 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2993 force_reg (ptr_mode,
2994 offset_rtx)));
2997 if (volatilep)
2999 if (GET_CODE (to_rtx) == MEM)
3001 /* When the offset is zero, to_rtx is the address of the
3002 structure we are storing into, and hence may be shared.
3003 We must make a new MEM before setting the volatile bit. */
3004 if (offset == 0)
3005 to_rtx = copy_rtx (to_rtx);
3007 MEM_VOLATILE_P (to_rtx) = 1;
3009 #if 0 /* This was turned off because, when a field is volatile
3010 in an object which is not volatile, the object may be in a register,
3011 and then we would abort over here. */
3012 else
3013 abort ();
3014 #endif
3017 if (TREE_CODE (to) == COMPONENT_REF
3018 && TREE_READONLY (TREE_OPERAND (to, 1)))
3020 if (offset == 0)
3021 to_rtx = copy_rtx (to_rtx);
3023 RTX_UNCHANGING_P (to_rtx) = 1;
3026 /* Check the access. */
3027 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3029 rtx to_addr;
3030 int size;
3031 int best_mode_size;
3032 enum machine_mode best_mode;
3034 best_mode = get_best_mode (bitsize, bitpos,
3035 TYPE_ALIGN (TREE_TYPE (tem)),
3036 mode1, volatilep);
3037 if (best_mode == VOIDmode)
3038 best_mode = QImode;
3040 best_mode_size = GET_MODE_BITSIZE (best_mode);
3041 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3042 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3043 size *= GET_MODE_SIZE (best_mode);
3045 /* Check the access right of the pointer. */
3046 if (size)
3047 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3048 to_addr, ptr_mode,
3049 GEN_INT (size), TYPE_MODE (sizetype),
3050 GEN_INT (MEMORY_USE_WO),
3051 TYPE_MODE (integer_type_node));
3054 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3055 (want_value
3056 /* Spurious cast makes HPUX compiler happy. */
3057 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3058 : VOIDmode),
3059 unsignedp,
3060 /* Required alignment of containing datum. */
3061 alignment,
3062 int_size_in_bytes (TREE_TYPE (tem)));
3063 preserve_temp_slots (result);
3064 free_temp_slots ();
3065 pop_temp_slots ();
3067 /* If the value is meaningful, convert RESULT to the proper mode.
3068 Otherwise, return nothing. */
3069 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3070 TYPE_MODE (TREE_TYPE (from)),
3071 result,
3072 TREE_UNSIGNED (TREE_TYPE (to)))
3073 : NULL_RTX);
3076 /* If the rhs is a function call and its value is not an aggregate,
3077 call the function before we start to compute the lhs.
3078 This is needed for correct code for cases such as
3079 val = setjmp (buf) on machines where reference to val
3080 requires loading up part of an address in a separate insn.
3082 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3083 a promoted variable where the zero- or sign- extension needs to be done.
3084 Handling this in the normal way is safe because no computation is done
3085 before the call. */
3086 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3087 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3088 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3090 rtx value;
3092 push_temp_slots ();
3093 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3094 if (to_rtx == 0)
3095 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3097 /* Handle calls that return values in multiple non-contiguous locations.
3098 The Irix 6 ABI has examples of this. */
3099 if (GET_CODE (to_rtx) == PARALLEL)
3100 emit_group_load (to_rtx, value);
3101 else if (GET_MODE (to_rtx) == BLKmode)
3102 emit_block_move (to_rtx, value, expr_size (from),
3103 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3104 else
3105 emit_move_insn (to_rtx, value);
3106 preserve_temp_slots (to_rtx);
3107 free_temp_slots ();
3108 pop_temp_slots ();
3109 return want_value ? to_rtx : NULL_RTX;
3112 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3113 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3115 if (to_rtx == 0)
3116 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3118 /* Don't move directly into a return register. */
3119 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3121 rtx temp;
3123 push_temp_slots ();
3124 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3125 emit_move_insn (to_rtx, temp);
3126 preserve_temp_slots (to_rtx);
3127 free_temp_slots ();
3128 pop_temp_slots ();
3129 return want_value ? to_rtx : NULL_RTX;
3132 /* In case we are returning the contents of an object which overlaps
3133 the place the value is being stored, use a safe function when copying
3134 a value through a pointer into a structure value return block. */
3135 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3136 && current_function_returns_struct
3137 && !current_function_returns_pcc_struct)
3139 rtx from_rtx, size;
3141 push_temp_slots ();
3142 size = expr_size (from);
3143 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3144 EXPAND_MEMORY_USE_DONT);
3146 /* Copy the rights of the bitmap. */
3147 if (current_function_check_memory_usage)
3148 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3149 XEXP (to_rtx, 0), ptr_mode,
3150 XEXP (from_rtx, 0), ptr_mode,
3151 convert_to_mode (TYPE_MODE (sizetype),
3152 size, TREE_UNSIGNED (sizetype)),
3153 TYPE_MODE (sizetype));
3155 #ifdef TARGET_MEM_FUNCTIONS
3156 emit_library_call (memcpy_libfunc, 0,
3157 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3158 XEXP (from_rtx, 0), Pmode,
3159 convert_to_mode (TYPE_MODE (sizetype),
3160 size, TREE_UNSIGNED (sizetype)),
3161 TYPE_MODE (sizetype));
3162 #else
3163 emit_library_call (bcopy_libfunc, 0,
3164 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3165 XEXP (to_rtx, 0), Pmode,
3166 convert_to_mode (TYPE_MODE (integer_type_node),
3167 size, TREE_UNSIGNED (integer_type_node)),
3168 TYPE_MODE (integer_type_node));
3169 #endif
3171 preserve_temp_slots (to_rtx);
3172 free_temp_slots ();
3173 pop_temp_slots ();
3174 return want_value ? to_rtx : NULL_RTX;
3177 /* Compute FROM and store the value in the rtx we got. */
3179 push_temp_slots ();
3180 result = store_expr (from, to_rtx, want_value);
3181 preserve_temp_slots (result);
3182 free_temp_slots ();
3183 pop_temp_slots ();
3184 return want_value ? result : NULL_RTX;
3187 /* Generate code for computing expression EXP,
3188 and storing the value into TARGET.
3189 TARGET may contain a QUEUED rtx.
3191 If WANT_VALUE is nonzero, return a copy of the value
3192 not in TARGET, so that we can be sure to use the proper
3193 value in a containing expression even if TARGET has something
3194 else stored in it. If possible, we copy the value through a pseudo
3195 and return that pseudo. Or, if the value is constant, we try to
3196 return the constant. In some cases, we return a pseudo
3197 copied *from* TARGET.
3199 If the mode is BLKmode then we may return TARGET itself.
3200 It turns out that in BLKmode it doesn't cause a problem.
3201 because C has no operators that could combine two different
3202 assignments into the same BLKmode object with different values
3203 with no sequence point. Will other languages need this to
3204 be more thorough?
3206 If WANT_VALUE is 0, we return NULL, to make sure
3207 to catch quickly any cases where the caller uses the value
3208 and fails to set WANT_VALUE. */
3211 store_expr (exp, target, want_value)
3212 register tree exp;
3213 register rtx target;
3214 int want_value;
3216 register rtx temp;
3217 int dont_return_target = 0;
3219 if (TREE_CODE (exp) == COMPOUND_EXPR)
3221 /* Perform first part of compound expression, then assign from second
3222 part. */
3223 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3224 emit_queue ();
3225 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3227 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3229 /* For conditional expression, get safe form of the target. Then
3230 test the condition, doing the appropriate assignment on either
3231 side. This avoids the creation of unnecessary temporaries.
3232 For non-BLKmode, it is more efficient not to do this. */
3234 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3236 emit_queue ();
3237 target = protect_from_queue (target, 1);
3239 do_pending_stack_adjust ();
3240 NO_DEFER_POP;
3241 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3242 start_cleanup_deferral ();
3243 store_expr (TREE_OPERAND (exp, 1), target, 0);
3244 end_cleanup_deferral ();
3245 emit_queue ();
3246 emit_jump_insn (gen_jump (lab2));
3247 emit_barrier ();
3248 emit_label (lab1);
3249 start_cleanup_deferral ();
3250 store_expr (TREE_OPERAND (exp, 2), target, 0);
3251 end_cleanup_deferral ();
3252 emit_queue ();
3253 emit_label (lab2);
3254 OK_DEFER_POP;
3256 return want_value ? target : NULL_RTX;
3258 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3259 && GET_MODE (target) != BLKmode)
3260 /* If target is in memory and caller wants value in a register instead,
3261 arrange that. Pass TARGET as target for expand_expr so that,
3262 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3263 We know expand_expr will not use the target in that case.
3264 Don't do this if TARGET is volatile because we are supposed
3265 to write it and then read it. */
3267 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3268 GET_MODE (target), 0);
3269 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3270 temp = copy_to_reg (temp);
3271 dont_return_target = 1;
3273 else if (queued_subexp_p (target))
3274 /* If target contains a postincrement, let's not risk
3275 using it as the place to generate the rhs. */
3277 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3279 /* Expand EXP into a new pseudo. */
3280 temp = gen_reg_rtx (GET_MODE (target));
3281 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3283 else
3284 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3286 /* If target is volatile, ANSI requires accessing the value
3287 *from* the target, if it is accessed. So make that happen.
3288 In no case return the target itself. */
3289 if (! MEM_VOLATILE_P (target) && want_value)
3290 dont_return_target = 1;
3292 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3293 /* If this is an scalar in a register that is stored in a wider mode
3294 than the declared mode, compute the result into its declared mode
3295 and then convert to the wider mode. Our value is the computed
3296 expression. */
3298 /* If we don't want a value, we can do the conversion inside EXP,
3299 which will often result in some optimizations. Do the conversion
3300 in two steps: first change the signedness, if needed, then
3301 the extend. But don't do this if the type of EXP is a subtype
3302 of something else since then the conversion might involve
3303 more than just converting modes. */
3304 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3305 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3307 if (TREE_UNSIGNED (TREE_TYPE (exp))
3308 != SUBREG_PROMOTED_UNSIGNED_P (target))
3310 = convert
3311 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3312 TREE_TYPE (exp)),
3313 exp);
3315 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3316 SUBREG_PROMOTED_UNSIGNED_P (target)),
3317 exp);
3320 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3322 /* If TEMP is a volatile MEM and we want a result value, make
3323 the access now so it gets done only once. Likewise if
3324 it contains TARGET. */
3325 if (GET_CODE (temp) == MEM && want_value
3326 && (MEM_VOLATILE_P (temp)
3327 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3328 temp = copy_to_reg (temp);
3330 /* If TEMP is a VOIDmode constant, use convert_modes to make
3331 sure that we properly convert it. */
3332 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3333 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3334 TYPE_MODE (TREE_TYPE (exp)), temp,
3335 SUBREG_PROMOTED_UNSIGNED_P (target));
3337 convert_move (SUBREG_REG (target), temp,
3338 SUBREG_PROMOTED_UNSIGNED_P (target));
3339 return want_value ? temp : NULL_RTX;
3341 else
3343 temp = expand_expr (exp, target, GET_MODE (target), 0);
3344 /* Return TARGET if it's a specified hardware register.
3345 If TARGET is a volatile mem ref, either return TARGET
3346 or return a reg copied *from* TARGET; ANSI requires this.
3348 Otherwise, if TEMP is not TARGET, return TEMP
3349 if it is constant (for efficiency),
3350 or if we really want the correct value. */
3351 if (!(target && GET_CODE (target) == REG
3352 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3353 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3354 && ! rtx_equal_p (temp, target)
3355 && (CONSTANT_P (temp) || want_value))
3356 dont_return_target = 1;
3359 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3360 the same as that of TARGET, adjust the constant. This is needed, for
3361 example, in case it is a CONST_DOUBLE and we want only a word-sized
3362 value. */
3363 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3364 && TREE_CODE (exp) != ERROR_MARK
3365 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3366 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3367 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3369 if (current_function_check_memory_usage
3370 && GET_CODE (target) == MEM
3371 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3373 if (GET_CODE (temp) == MEM)
3374 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3375 XEXP (target, 0), ptr_mode,
3376 XEXP (temp, 0), ptr_mode,
3377 expr_size (exp), TYPE_MODE (sizetype));
3378 else
3379 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3380 XEXP (target, 0), ptr_mode,
3381 expr_size (exp), TYPE_MODE (sizetype),
3382 GEN_INT (MEMORY_USE_WO),
3383 TYPE_MODE (integer_type_node));
3386 /* If value was not generated in the target, store it there.
3387 Convert the value to TARGET's type first if nec. */
3389 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3391 target = protect_from_queue (target, 1);
3392 if (GET_MODE (temp) != GET_MODE (target)
3393 && GET_MODE (temp) != VOIDmode)
3395 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3396 if (dont_return_target)
3398 /* In this case, we will return TEMP,
3399 so make sure it has the proper mode.
3400 But don't forget to store the value into TARGET. */
3401 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3402 emit_move_insn (target, temp);
3404 else
3405 convert_move (target, temp, unsignedp);
3408 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3410 /* Handle copying a string constant into an array.
3411 The string constant may be shorter than the array.
3412 So copy just the string's actual length, and clear the rest. */
3413 rtx size;
3414 rtx addr;
3416 /* Get the size of the data type of the string,
3417 which is actually the size of the target. */
3418 size = expr_size (exp);
3419 if (GET_CODE (size) == CONST_INT
3420 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3421 emit_block_move (target, temp, size,
3422 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3423 else
3425 /* Compute the size of the data to copy from the string. */
3426 tree copy_size
3427 = size_binop (MIN_EXPR,
3428 make_tree (sizetype, size),
3429 convert (sizetype,
3430 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3431 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3432 VOIDmode, 0);
3433 rtx label = 0;
3435 /* Copy that much. */
3436 emit_block_move (target, temp, copy_size_rtx,
3437 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3439 /* Figure out how much is left in TARGET that we have to clear.
3440 Do all calculations in ptr_mode. */
3442 addr = XEXP (target, 0);
3443 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3445 if (GET_CODE (copy_size_rtx) == CONST_INT)
3447 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3448 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3450 else
3452 addr = force_reg (ptr_mode, addr);
3453 addr = expand_binop (ptr_mode, add_optab, addr,
3454 copy_size_rtx, NULL_RTX, 0,
3455 OPTAB_LIB_WIDEN);
3457 size = expand_binop (ptr_mode, sub_optab, size,
3458 copy_size_rtx, NULL_RTX, 0,
3459 OPTAB_LIB_WIDEN);
3461 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3462 GET_MODE (size), 0, 0);
3463 label = gen_label_rtx ();
3464 emit_jump_insn (gen_blt (label));
3467 if (size != const0_rtx)
3469 /* Be sure we can write on ADDR. */
3470 if (current_function_check_memory_usage)
3471 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3472 addr, ptr_mode,
3473 size, TYPE_MODE (sizetype),
3474 GEN_INT (MEMORY_USE_WO),
3475 TYPE_MODE (integer_type_node));
3476 #ifdef TARGET_MEM_FUNCTIONS
3477 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3478 addr, ptr_mode,
3479 const0_rtx, TYPE_MODE (integer_type_node),
3480 convert_to_mode (TYPE_MODE (sizetype),
3481 size,
3482 TREE_UNSIGNED (sizetype)),
3483 TYPE_MODE (sizetype));
3484 #else
3485 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3486 addr, ptr_mode,
3487 convert_to_mode (TYPE_MODE (integer_type_node),
3488 size,
3489 TREE_UNSIGNED (integer_type_node)),
3490 TYPE_MODE (integer_type_node));
3491 #endif
3494 if (label)
3495 emit_label (label);
3498 /* Handle calls that return values in multiple non-contiguous locations.
3499 The Irix 6 ABI has examples of this. */
3500 else if (GET_CODE (target) == PARALLEL)
3501 emit_group_load (target, temp);
3502 else if (GET_MODE (temp) == BLKmode)
3503 emit_block_move (target, temp, expr_size (exp),
3504 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3505 else
3506 emit_move_insn (target, temp);
3509 /* If we don't want a value, return NULL_RTX. */
3510 if (! want_value)
3511 return NULL_RTX;
3513 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3514 ??? The latter test doesn't seem to make sense. */
3515 else if (dont_return_target && GET_CODE (temp) != MEM)
3516 return temp;
3518 /* Return TARGET itself if it is a hard register. */
3519 else if (want_value && GET_MODE (target) != BLKmode
3520 && ! (GET_CODE (target) == REG
3521 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3522 return copy_to_reg (target);
3524 else
3525 return target;
3528 /* Return 1 if EXP just contains zeros. */
3530 static int
3531 is_zeros_p (exp)
3532 tree exp;
3534 tree elt;
3536 switch (TREE_CODE (exp))
3538 case CONVERT_EXPR:
3539 case NOP_EXPR:
3540 case NON_LVALUE_EXPR:
3541 return is_zeros_p (TREE_OPERAND (exp, 0));
3543 case INTEGER_CST:
3544 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3546 case COMPLEX_CST:
3547 return
3548 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3550 case REAL_CST:
3551 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3553 case CONSTRUCTOR:
3554 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3555 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3556 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3557 if (! is_zeros_p (TREE_VALUE (elt)))
3558 return 0;
3560 return 1;
3562 default:
3563 return 0;
3567 /* Return 1 if EXP contains mostly (3/4) zeros. */
3569 static int
3570 mostly_zeros_p (exp)
3571 tree exp;
3573 if (TREE_CODE (exp) == CONSTRUCTOR)
3575 int elts = 0, zeros = 0;
3576 tree elt = CONSTRUCTOR_ELTS (exp);
3577 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3579 /* If there are no ranges of true bits, it is all zero. */
3580 return elt == NULL_TREE;
3582 for (; elt; elt = TREE_CHAIN (elt))
3584 /* We do not handle the case where the index is a RANGE_EXPR,
3585 so the statistic will be somewhat inaccurate.
3586 We do make a more accurate count in store_constructor itself,
3587 so since this function is only used for nested array elements,
3588 this should be close enough. */
3589 if (mostly_zeros_p (TREE_VALUE (elt)))
3590 zeros++;
3591 elts++;
3594 return 4 * zeros >= 3 * elts;
3597 return is_zeros_p (exp);
3600 /* Helper function for store_constructor.
3601 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3602 TYPE is the type of the CONSTRUCTOR, not the element type.
3603 ALIGN and CLEARED are as for store_constructor.
3605 This provides a recursive shortcut back to store_constructor when it isn't
3606 necessary to go through store_field. This is so that we can pass through
3607 the cleared field to let store_constructor know that we may not have to
3608 clear a substructure if the outer structure has already been cleared. */
3610 static void
3611 store_constructor_field (target, bitsize, bitpos,
3612 mode, exp, type, align, cleared)
3613 rtx target;
3614 int bitsize, bitpos;
3615 enum machine_mode mode;
3616 tree exp, type;
3617 int align;
3618 int cleared;
3620 if (TREE_CODE (exp) == CONSTRUCTOR
3621 && bitpos % BITS_PER_UNIT == 0
3622 /* If we have a non-zero bitpos for a register target, then we just
3623 let store_field do the bitfield handling. This is unlikely to
3624 generate unnecessary clear instructions anyways. */
3625 && (bitpos == 0 || GET_CODE (target) == MEM))
3627 if (bitpos != 0)
3628 target = change_address (target, VOIDmode,
3629 plus_constant (XEXP (target, 0),
3630 bitpos / BITS_PER_UNIT));
3631 store_constructor (exp, target, align, cleared);
3633 else
3634 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
3635 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
3636 int_size_in_bytes (type));
3639 /* Store the value of constructor EXP into the rtx TARGET.
3640 TARGET is either a REG or a MEM.
3641 ALIGN is the maximum known alignment for TARGET, in bits.
3642 CLEARED is true if TARGET is known to have been zero'd. */
3644 static void
3645 store_constructor (exp, target, align, cleared)
3646 tree exp;
3647 rtx target;
3648 int cleared;
3650 tree type = TREE_TYPE (exp);
3652 /* We know our target cannot conflict, since safe_from_p has been called. */
3653 #if 0
3654 /* Don't try copying piece by piece into a hard register
3655 since that is vulnerable to being clobbered by EXP.
3656 Instead, construct in a pseudo register and then copy it all. */
3657 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3659 rtx temp = gen_reg_rtx (GET_MODE (target));
3660 store_constructor (exp, temp, 0);
3661 emit_move_insn (target, temp);
3662 return;
3664 #endif
3666 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3667 || TREE_CODE (type) == QUAL_UNION_TYPE)
3669 register tree elt;
3671 /* Inform later passes that the whole union value is dead. */
3672 if (TREE_CODE (type) == UNION_TYPE
3673 || TREE_CODE (type) == QUAL_UNION_TYPE)
3674 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3676 /* If we are building a static constructor into a register,
3677 set the initial value as zero so we can fold the value into
3678 a constant. But if more than one register is involved,
3679 this probably loses. */
3680 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3681 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3683 if (! cleared)
3684 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3686 cleared = 1;
3689 /* If the constructor has fewer fields than the structure
3690 or if we are initializing the structure to mostly zeros,
3691 clear the whole structure first. */
3692 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3693 != list_length (TYPE_FIELDS (type)))
3694 || mostly_zeros_p (exp))
3696 if (! cleared)
3697 clear_storage (target, expr_size (exp),
3698 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
3700 cleared = 1;
3702 else
3703 /* Inform later passes that the old value is dead. */
3704 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3706 /* Store each element of the constructor into
3707 the corresponding field of TARGET. */
3709 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3711 register tree field = TREE_PURPOSE (elt);
3712 register enum machine_mode mode;
3713 int bitsize;
3714 int bitpos = 0;
3715 int unsignedp;
3716 tree pos, constant = 0, offset = 0;
3717 rtx to_rtx = target;
3719 /* Just ignore missing fields.
3720 We cleared the whole structure, above,
3721 if any fields are missing. */
3722 if (field == 0)
3723 continue;
3725 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3726 continue;
3728 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3729 unsignedp = TREE_UNSIGNED (field);
3730 mode = DECL_MODE (field);
3731 if (DECL_BIT_FIELD (field))
3732 mode = VOIDmode;
3734 pos = DECL_FIELD_BITPOS (field);
3735 if (TREE_CODE (pos) == INTEGER_CST)
3736 constant = pos;
3737 else if (TREE_CODE (pos) == PLUS_EXPR
3738 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3739 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3740 else
3741 offset = pos;
3743 if (constant)
3744 bitpos = TREE_INT_CST_LOW (constant);
3746 if (offset)
3748 rtx offset_rtx;
3750 if (contains_placeholder_p (offset))
3751 offset = build (WITH_RECORD_EXPR, sizetype,
3752 offset, make_tree (TREE_TYPE (exp), target));
3754 offset = size_binop (FLOOR_DIV_EXPR, offset,
3755 size_int (BITS_PER_UNIT));
3757 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3758 if (GET_CODE (to_rtx) != MEM)
3759 abort ();
3761 to_rtx
3762 = change_address (to_rtx, VOIDmode,
3763 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3764 force_reg (ptr_mode,
3765 offset_rtx)));
3768 if (TREE_READONLY (field))
3770 if (GET_CODE (to_rtx) == MEM)
3771 to_rtx = copy_rtx (to_rtx);
3773 RTX_UNCHANGING_P (to_rtx) = 1;
3776 store_constructor_field (to_rtx, bitsize, bitpos, mode,
3777 TREE_VALUE (elt), type,
3778 MIN (align,
3779 DECL_ALIGN (TREE_PURPOSE (elt))),
3780 cleared);
3783 else if (TREE_CODE (type) == ARRAY_TYPE)
3785 register tree elt;
3786 register int i;
3787 int need_to_clear;
3788 tree domain = TYPE_DOMAIN (type);
3789 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3790 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3791 tree elttype = TREE_TYPE (type);
3793 /* If the constructor has fewer elements than the array,
3794 clear the whole array first. Similarly if this this is
3795 static constructor of a non-BLKmode object. */
3796 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3797 need_to_clear = 1;
3798 else
3800 HOST_WIDE_INT count = 0, zero_count = 0;
3801 need_to_clear = 0;
3802 /* This loop is a more accurate version of the loop in
3803 mostly_zeros_p (it handles RANGE_EXPR in an index).
3804 It is also needed to check for missing elements. */
3805 for (elt = CONSTRUCTOR_ELTS (exp);
3806 elt != NULL_TREE;
3807 elt = TREE_CHAIN (elt))
3809 tree index = TREE_PURPOSE (elt);
3810 HOST_WIDE_INT this_node_count;
3811 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3813 tree lo_index = TREE_OPERAND (index, 0);
3814 tree hi_index = TREE_OPERAND (index, 1);
3815 if (TREE_CODE (lo_index) != INTEGER_CST
3816 || TREE_CODE (hi_index) != INTEGER_CST)
3818 need_to_clear = 1;
3819 break;
3821 this_node_count = TREE_INT_CST_LOW (hi_index)
3822 - TREE_INT_CST_LOW (lo_index) + 1;
3824 else
3825 this_node_count = 1;
3826 count += this_node_count;
3827 if (mostly_zeros_p (TREE_VALUE (elt)))
3828 zero_count += this_node_count;
3830 /* Clear the entire array first if there are any missing elements,
3831 or if the incidence of zero elements is >= 75%. */
3832 if (count < maxelt - minelt + 1
3833 || 4 * zero_count >= 3 * count)
3834 need_to_clear = 1;
3836 if (need_to_clear)
3838 if (! cleared)
3839 clear_storage (target, expr_size (exp),
3840 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
3841 cleared = 1;
3843 else
3844 /* Inform later passes that the old value is dead. */
3845 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3847 /* Store each element of the constructor into
3848 the corresponding element of TARGET, determined
3849 by counting the elements. */
3850 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3851 elt;
3852 elt = TREE_CHAIN (elt), i++)
3854 register enum machine_mode mode;
3855 int bitsize;
3856 int bitpos;
3857 int unsignedp;
3858 tree value = TREE_VALUE (elt);
3859 int align = TYPE_ALIGN (TREE_TYPE (value));
3860 tree index = TREE_PURPOSE (elt);
3861 rtx xtarget = target;
3863 if (cleared && is_zeros_p (value))
3864 continue;
3866 mode = TYPE_MODE (elttype);
3867 bitsize = GET_MODE_BITSIZE (mode);
3868 unsignedp = TREE_UNSIGNED (elttype);
3870 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3872 tree lo_index = TREE_OPERAND (index, 0);
3873 tree hi_index = TREE_OPERAND (index, 1);
3874 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3875 struct nesting *loop;
3876 HOST_WIDE_INT lo, hi, count;
3877 tree position;
3879 /* If the range is constant and "small", unroll the loop. */
3880 if (TREE_CODE (lo_index) == INTEGER_CST
3881 && TREE_CODE (hi_index) == INTEGER_CST
3882 && (lo = TREE_INT_CST_LOW (lo_index),
3883 hi = TREE_INT_CST_LOW (hi_index),
3884 count = hi - lo + 1,
3885 (GET_CODE (target) != MEM
3886 || count <= 2
3887 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3888 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3889 <= 40 * 8))))
3891 lo -= minelt; hi -= minelt;
3892 for (; lo <= hi; lo++)
3894 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3895 store_constructor_field (target, bitsize, bitpos, mode,
3896 value, type, align, cleared);
3899 else
3901 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3902 loop_top = gen_label_rtx ();
3903 loop_end = gen_label_rtx ();
3905 unsignedp = TREE_UNSIGNED (domain);
3907 index = build_decl (VAR_DECL, NULL_TREE, domain);
3909 DECL_RTL (index) = index_r
3910 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3911 &unsignedp, 0));
3913 if (TREE_CODE (value) == SAVE_EXPR
3914 && SAVE_EXPR_RTL (value) == 0)
3916 /* Make sure value gets expanded once before the
3917 loop. */
3918 expand_expr (value, const0_rtx, VOIDmode, 0);
3919 emit_queue ();
3921 store_expr (lo_index, index_r, 0);
3922 loop = expand_start_loop (0);
3924 /* Assign value to element index. */
3925 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3926 size_int (BITS_PER_UNIT));
3927 position = size_binop (MULT_EXPR,
3928 size_binop (MINUS_EXPR, index,
3929 TYPE_MIN_VALUE (domain)),
3930 position);
3931 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3932 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3933 xtarget = change_address (target, mode, addr);
3934 if (TREE_CODE (value) == CONSTRUCTOR)
3935 store_constructor (value, xtarget, align, cleared);
3936 else
3937 store_expr (value, xtarget, 0);
3939 expand_exit_loop_if_false (loop,
3940 build (LT_EXPR, integer_type_node,
3941 index, hi_index));
3943 expand_increment (build (PREINCREMENT_EXPR,
3944 TREE_TYPE (index),
3945 index, integer_one_node), 0, 0);
3946 expand_end_loop ();
3947 emit_label (loop_end);
3949 /* Needed by stupid register allocation. to extend the
3950 lifetime of pseudo-regs used by target past the end
3951 of the loop. */
3952 emit_insn (gen_rtx_USE (GET_MODE (target), target));
3955 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3956 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3958 rtx pos_rtx, addr;
3959 tree position;
3961 if (index == 0)
3962 index = size_int (i);
3964 if (minelt)
3965 index = size_binop (MINUS_EXPR, index,
3966 TYPE_MIN_VALUE (domain));
3967 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3968 size_int (BITS_PER_UNIT));
3969 position = size_binop (MULT_EXPR, index, position);
3970 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3971 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3972 xtarget = change_address (target, mode, addr);
3973 store_expr (value, xtarget, 0);
3975 else
3977 if (index != 0)
3978 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3979 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3980 else
3981 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3982 store_constructor_field (target, bitsize, bitpos, mode, value,
3983 type, align, cleared);
3987 /* set constructor assignments */
3988 else if (TREE_CODE (type) == SET_TYPE)
3990 tree elt = CONSTRUCTOR_ELTS (exp);
3991 rtx xtarget = XEXP (target, 0);
3992 int set_word_size = TYPE_ALIGN (type);
3993 int nbytes = int_size_in_bytes (type), nbits;
3994 tree domain = TYPE_DOMAIN (type);
3995 tree domain_min, domain_max, bitlength;
3997 /* The default implementation strategy is to extract the constant
3998 parts of the constructor, use that to initialize the target,
3999 and then "or" in whatever non-constant ranges we need in addition.
4001 If a large set is all zero or all ones, it is
4002 probably better to set it using memset (if available) or bzero.
4003 Also, if a large set has just a single range, it may also be
4004 better to first clear all the first clear the set (using
4005 bzero/memset), and set the bits we want. */
4007 /* Check for all zeros. */
4008 if (elt == NULL_TREE)
4010 if (!cleared)
4011 clear_storage (target, expr_size (exp),
4012 TYPE_ALIGN (type) / BITS_PER_UNIT);
4013 return;
4016 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4017 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4018 bitlength = size_binop (PLUS_EXPR,
4019 size_binop (MINUS_EXPR, domain_max, domain_min),
4020 size_one_node);
4022 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4023 abort ();
4024 nbits = TREE_INT_CST_LOW (bitlength);
4026 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4027 are "complicated" (more than one range), initialize (the
4028 constant parts) by copying from a constant. */
4029 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4030 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4032 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4033 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4034 char *bit_buffer = (char *) alloca (nbits);
4035 HOST_WIDE_INT word = 0;
4036 int bit_pos = 0;
4037 int ibit = 0;
4038 int offset = 0; /* In bytes from beginning of set. */
4039 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4040 for (;;)
4042 if (bit_buffer[ibit])
4044 if (BYTES_BIG_ENDIAN)
4045 word |= (1 << (set_word_size - 1 - bit_pos));
4046 else
4047 word |= 1 << bit_pos;
4049 bit_pos++; ibit++;
4050 if (bit_pos >= set_word_size || ibit == nbits)
4052 if (word != 0 || ! cleared)
4054 rtx datum = GEN_INT (word);
4055 rtx to_rtx;
4056 /* The assumption here is that it is safe to use
4057 XEXP if the set is multi-word, but not if
4058 it's single-word. */
4059 if (GET_CODE (target) == MEM)
4061 to_rtx = plus_constant (XEXP (target, 0), offset);
4062 to_rtx = change_address (target, mode, to_rtx);
4064 else if (offset == 0)
4065 to_rtx = target;
4066 else
4067 abort ();
4068 emit_move_insn (to_rtx, datum);
4070 if (ibit == nbits)
4071 break;
4072 word = 0;
4073 bit_pos = 0;
4074 offset += set_word_size / BITS_PER_UNIT;
4078 else if (!cleared)
4080 /* Don't bother clearing storage if the set is all ones. */
4081 if (TREE_CHAIN (elt) != NULL_TREE
4082 || (TREE_PURPOSE (elt) == NULL_TREE
4083 ? nbits != 1
4084 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4085 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4086 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4087 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4088 != nbits))))
4089 clear_storage (target, expr_size (exp),
4090 TYPE_ALIGN (type) / BITS_PER_UNIT);
4093 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4095 /* start of range of element or NULL */
4096 tree startbit = TREE_PURPOSE (elt);
4097 /* end of range of element, or element value */
4098 tree endbit = TREE_VALUE (elt);
4099 HOST_WIDE_INT startb, endb;
4100 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4102 bitlength_rtx = expand_expr (bitlength,
4103 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4105 /* handle non-range tuple element like [ expr ] */
4106 if (startbit == NULL_TREE)
4108 startbit = save_expr (endbit);
4109 endbit = startbit;
4111 startbit = convert (sizetype, startbit);
4112 endbit = convert (sizetype, endbit);
4113 if (! integer_zerop (domain_min))
4115 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4116 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4118 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4119 EXPAND_CONST_ADDRESS);
4120 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4121 EXPAND_CONST_ADDRESS);
4123 if (REG_P (target))
4125 targetx = assign_stack_temp (GET_MODE (target),
4126 GET_MODE_SIZE (GET_MODE (target)),
4128 emit_move_insn (targetx, target);
4130 else if (GET_CODE (target) == MEM)
4131 targetx = target;
4132 else
4133 abort ();
4135 #ifdef TARGET_MEM_FUNCTIONS
4136 /* Optimization: If startbit and endbit are
4137 constants divisible by BITS_PER_UNIT,
4138 call memset instead. */
4139 if (TREE_CODE (startbit) == INTEGER_CST
4140 && TREE_CODE (endbit) == INTEGER_CST
4141 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4142 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4144 emit_library_call (memset_libfunc, 0,
4145 VOIDmode, 3,
4146 plus_constant (XEXP (targetx, 0),
4147 startb / BITS_PER_UNIT),
4148 Pmode,
4149 constm1_rtx, TYPE_MODE (integer_type_node),
4150 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4151 TYPE_MODE (sizetype));
4153 else
4154 #endif
4156 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4157 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4158 bitlength_rtx, TYPE_MODE (sizetype),
4159 startbit_rtx, TYPE_MODE (sizetype),
4160 endbit_rtx, TYPE_MODE (sizetype));
4162 if (REG_P (target))
4163 emit_move_insn (target, targetx);
4167 else
4168 abort ();
4171 /* Store the value of EXP (an expression tree)
4172 into a subfield of TARGET which has mode MODE and occupies
4173 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4174 If MODE is VOIDmode, it means that we are storing into a bit-field.
4176 If VALUE_MODE is VOIDmode, return nothing in particular.
4177 UNSIGNEDP is not used in this case.
4179 Otherwise, return an rtx for the value stored. This rtx
4180 has mode VALUE_MODE if that is convenient to do.
4181 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4183 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4184 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4186 static rtx
4187 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4188 unsignedp, align, total_size)
4189 rtx target;
4190 int bitsize, bitpos;
4191 enum machine_mode mode;
4192 tree exp;
4193 enum machine_mode value_mode;
4194 int unsignedp;
4195 int align;
4196 int total_size;
4198 HOST_WIDE_INT width_mask = 0;
4200 if (TREE_CODE (exp) == ERROR_MARK)
4201 return const0_rtx;
4203 if (bitsize < HOST_BITS_PER_WIDE_INT)
4204 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4206 /* If we are storing into an unaligned field of an aligned union that is
4207 in a register, we may have the mode of TARGET being an integer mode but
4208 MODE == BLKmode. In that case, get an aligned object whose size and
4209 alignment are the same as TARGET and store TARGET into it (we can avoid
4210 the store if the field being stored is the entire width of TARGET). Then
4211 call ourselves recursively to store the field into a BLKmode version of
4212 that object. Finally, load from the object into TARGET. This is not
4213 very efficient in general, but should only be slightly more expensive
4214 than the otherwise-required unaligned accesses. Perhaps this can be
4215 cleaned up later. */
4217 if (mode == BLKmode
4218 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4220 rtx object = assign_stack_temp (GET_MODE (target),
4221 GET_MODE_SIZE (GET_MODE (target)), 0);
4222 rtx blk_object = copy_rtx (object);
4224 MEM_IN_STRUCT_P (object) = 1;
4225 MEM_IN_STRUCT_P (blk_object) = 1;
4226 PUT_MODE (blk_object, BLKmode);
4228 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4229 emit_move_insn (object, target);
4231 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4232 align, total_size);
4234 /* Even though we aren't returning target, we need to
4235 give it the updated value. */
4236 emit_move_insn (target, object);
4238 return blk_object;
4241 /* If the structure is in a register or if the component
4242 is a bit field, we cannot use addressing to access it.
4243 Use bit-field techniques or SUBREG to store in it. */
4245 if (mode == VOIDmode
4246 || (mode != BLKmode && ! direct_store[(int) mode])
4247 || GET_CODE (target) == REG
4248 || GET_CODE (target) == SUBREG
4249 /* If the field isn't aligned enough to store as an ordinary memref,
4250 store it as a bit field. */
4251 || (SLOW_UNALIGNED_ACCESS
4252 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4253 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4255 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4257 /* If BITSIZE is narrower than the size of the type of EXP
4258 we will be narrowing TEMP. Normally, what's wanted are the
4259 low-order bits. However, if EXP's type is a record and this is
4260 big-endian machine, we want the upper BITSIZE bits. */
4261 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4262 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4263 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4264 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4265 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4266 - bitsize),
4267 temp, 1);
4269 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4270 MODE. */
4271 if (mode != VOIDmode && mode != BLKmode
4272 && mode != TYPE_MODE (TREE_TYPE (exp)))
4273 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4275 /* If the modes of TARGET and TEMP are both BLKmode, both
4276 must be in memory and BITPOS must be aligned on a byte
4277 boundary. If so, we simply do a block copy. */
4278 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4280 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4281 || bitpos % BITS_PER_UNIT != 0)
4282 abort ();
4284 target = change_address (target, VOIDmode,
4285 plus_constant (XEXP (target, 0),
4286 bitpos / BITS_PER_UNIT));
4288 emit_block_move (target, temp,
4289 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4290 / BITS_PER_UNIT),
4293 return value_mode == VOIDmode ? const0_rtx : target;
4296 /* Store the value in the bitfield. */
4297 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4298 if (value_mode != VOIDmode)
4300 /* The caller wants an rtx for the value. */
4301 /* If possible, avoid refetching from the bitfield itself. */
4302 if (width_mask != 0
4303 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4305 tree count;
4306 enum machine_mode tmode;
4308 if (unsignedp)
4309 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4310 tmode = GET_MODE (temp);
4311 if (tmode == VOIDmode)
4312 tmode = value_mode;
4313 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4314 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4315 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4317 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4318 NULL_RTX, value_mode, 0, align,
4319 total_size);
4321 return const0_rtx;
4323 else
4325 rtx addr = XEXP (target, 0);
4326 rtx to_rtx;
4328 /* If a value is wanted, it must be the lhs;
4329 so make the address stable for multiple use. */
4331 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4332 && ! CONSTANT_ADDRESS_P (addr)
4333 /* A frame-pointer reference is already stable. */
4334 && ! (GET_CODE (addr) == PLUS
4335 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4336 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4337 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4338 addr = copy_to_reg (addr);
4340 /* Now build a reference to just the desired component. */
4342 to_rtx = copy_rtx (change_address (target, mode,
4343 plus_constant (addr,
4344 (bitpos
4345 / BITS_PER_UNIT))));
4346 MEM_IN_STRUCT_P (to_rtx) = 1;
4348 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4352 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4353 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4354 ARRAY_REFs and find the ultimate containing object, which we return.
4356 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4357 bit position, and *PUNSIGNEDP to the signedness of the field.
4358 If the position of the field is variable, we store a tree
4359 giving the variable offset (in units) in *POFFSET.
4360 This offset is in addition to the bit position.
4361 If the position is not variable, we store 0 in *POFFSET.
4362 We set *PALIGNMENT to the alignment in bytes of the address that will be
4363 computed. This is the alignment of the thing we return if *POFFSET
4364 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4366 If any of the extraction expressions is volatile,
4367 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4369 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4370 is a mode that can be used to access the field. In that case, *PBITSIZE
4371 is redundant.
4373 If the field describes a variable-sized object, *PMODE is set to
4374 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4375 this case, but the address of the object can be found. */
4377 tree
4378 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4379 punsignedp, pvolatilep, palignment)
4380 tree exp;
4381 int *pbitsize;
4382 int *pbitpos;
4383 tree *poffset;
4384 enum machine_mode *pmode;
4385 int *punsignedp;
4386 int *pvolatilep;
4387 int *palignment;
4389 tree orig_exp = exp;
4390 tree size_tree = 0;
4391 enum machine_mode mode = VOIDmode;
4392 tree offset = integer_zero_node;
4393 int alignment = BIGGEST_ALIGNMENT;
4395 if (TREE_CODE (exp) == COMPONENT_REF)
4397 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4398 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4399 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4400 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4402 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4404 size_tree = TREE_OPERAND (exp, 1);
4405 *punsignedp = TREE_UNSIGNED (exp);
4407 else
4409 mode = TYPE_MODE (TREE_TYPE (exp));
4410 if (mode == BLKmode)
4411 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4413 *pbitsize = GET_MODE_BITSIZE (mode);
4414 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4417 if (size_tree)
4419 if (TREE_CODE (size_tree) != INTEGER_CST)
4420 mode = BLKmode, *pbitsize = -1;
4421 else
4422 *pbitsize = TREE_INT_CST_LOW (size_tree);
4425 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4426 and find the ultimate containing object. */
4428 *pbitpos = 0;
4430 while (1)
4432 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4434 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4435 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4436 : TREE_OPERAND (exp, 2));
4437 tree constant = integer_zero_node, var = pos;
4439 /* If this field hasn't been filled in yet, don't go
4440 past it. This should only happen when folding expressions
4441 made during type construction. */
4442 if (pos == 0)
4443 break;
4445 /* Assume here that the offset is a multiple of a unit.
4446 If not, there should be an explicitly added constant. */
4447 if (TREE_CODE (pos) == PLUS_EXPR
4448 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4449 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4450 else if (TREE_CODE (pos) == INTEGER_CST)
4451 constant = pos, var = integer_zero_node;
4453 *pbitpos += TREE_INT_CST_LOW (constant);
4454 offset = size_binop (PLUS_EXPR, offset,
4455 size_binop (EXACT_DIV_EXPR, var,
4456 size_int (BITS_PER_UNIT)));
4459 else if (TREE_CODE (exp) == ARRAY_REF)
4461 /* This code is based on the code in case ARRAY_REF in expand_expr
4462 below. We assume here that the size of an array element is
4463 always an integral multiple of BITS_PER_UNIT. */
4465 tree index = TREE_OPERAND (exp, 1);
4466 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4467 tree low_bound
4468 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4469 tree index_type = TREE_TYPE (index);
4471 if (! integer_zerop (low_bound))
4472 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4474 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4476 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4477 index);
4478 index_type = TREE_TYPE (index);
4481 index = fold (build (MULT_EXPR, index_type, index,
4482 convert (index_type,
4483 TYPE_SIZE (TREE_TYPE (exp)))));
4485 if (TREE_CODE (index) == INTEGER_CST
4486 && TREE_INT_CST_HIGH (index) == 0)
4487 *pbitpos += TREE_INT_CST_LOW (index);
4488 else
4490 if (contains_placeholder_p (index))
4491 index = build (WITH_RECORD_EXPR, sizetype, index, exp);
4493 offset = size_binop (PLUS_EXPR, offset,
4494 size_binop (FLOOR_DIV_EXPR, index,
4495 size_int (BITS_PER_UNIT)));
4498 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4499 && ! ((TREE_CODE (exp) == NOP_EXPR
4500 || TREE_CODE (exp) == CONVERT_EXPR)
4501 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4502 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4503 != UNION_TYPE))
4504 && (TYPE_MODE (TREE_TYPE (exp))
4505 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4506 break;
4508 /* If any reference in the chain is volatile, the effect is volatile. */
4509 if (TREE_THIS_VOLATILE (exp))
4510 *pvolatilep = 1;
4512 /* If the offset is non-constant already, then we can't assume any
4513 alignment more than the alignment here. */
4514 if (! integer_zerop (offset))
4515 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4517 exp = TREE_OPERAND (exp, 0);
4520 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4521 alignment = MIN (alignment, DECL_ALIGN (exp));
4522 else if (TREE_TYPE (exp) != 0)
4523 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4525 if (integer_zerop (offset))
4526 offset = 0;
4528 if (offset != 0 && contains_placeholder_p (offset))
4529 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4531 *pmode = mode;
4532 *poffset = offset;
4533 *palignment = alignment / BITS_PER_UNIT;
4534 return exp;
4537 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4538 static enum memory_use_mode
4539 get_memory_usage_from_modifier (modifier)
4540 enum expand_modifier modifier;
4542 switch (modifier)
4544 case EXPAND_NORMAL:
4545 case EXPAND_SUM:
4546 return MEMORY_USE_RO;
4547 break;
4548 case EXPAND_MEMORY_USE_WO:
4549 return MEMORY_USE_WO;
4550 break;
4551 case EXPAND_MEMORY_USE_RW:
4552 return MEMORY_USE_RW;
4553 break;
4554 case EXPAND_MEMORY_USE_DONT:
4555 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4556 MEMORY_USE_DONT, because they are modifiers to a call of
4557 expand_expr in the ADDR_EXPR case of expand_expr. */
4558 case EXPAND_CONST_ADDRESS:
4559 case EXPAND_INITIALIZER:
4560 return MEMORY_USE_DONT;
4561 case EXPAND_MEMORY_USE_BAD:
4562 default:
4563 abort ();
4567 /* Given an rtx VALUE that may contain additions and multiplications,
4568 return an equivalent value that just refers to a register or memory.
4569 This is done by generating instructions to perform the arithmetic
4570 and returning a pseudo-register containing the value.
4572 The returned value may be a REG, SUBREG, MEM or constant. */
4575 force_operand (value, target)
4576 rtx value, target;
4578 register optab binoptab = 0;
4579 /* Use a temporary to force order of execution of calls to
4580 `force_operand'. */
4581 rtx tmp;
4582 register rtx op2;
4583 /* Use subtarget as the target for operand 0 of a binary operation. */
4584 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4586 if (GET_CODE (value) == PLUS)
4587 binoptab = add_optab;
4588 else if (GET_CODE (value) == MINUS)
4589 binoptab = sub_optab;
4590 else if (GET_CODE (value) == MULT)
4592 op2 = XEXP (value, 1);
4593 if (!CONSTANT_P (op2)
4594 && !(GET_CODE (op2) == REG && op2 != subtarget))
4595 subtarget = 0;
4596 tmp = force_operand (XEXP (value, 0), subtarget);
4597 return expand_mult (GET_MODE (value), tmp,
4598 force_operand (op2, NULL_RTX),
4599 target, 0);
4602 if (binoptab)
4604 op2 = XEXP (value, 1);
4605 if (!CONSTANT_P (op2)
4606 && !(GET_CODE (op2) == REG && op2 != subtarget))
4607 subtarget = 0;
4608 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4610 binoptab = add_optab;
4611 op2 = negate_rtx (GET_MODE (value), op2);
4614 /* Check for an addition with OP2 a constant integer and our first
4615 operand a PLUS of a virtual register and something else. In that
4616 case, we want to emit the sum of the virtual register and the
4617 constant first and then add the other value. This allows virtual
4618 register instantiation to simply modify the constant rather than
4619 creating another one around this addition. */
4620 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4621 && GET_CODE (XEXP (value, 0)) == PLUS
4622 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4623 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4624 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4626 rtx temp = expand_binop (GET_MODE (value), binoptab,
4627 XEXP (XEXP (value, 0), 0), op2,
4628 subtarget, 0, OPTAB_LIB_WIDEN);
4629 return expand_binop (GET_MODE (value), binoptab, temp,
4630 force_operand (XEXP (XEXP (value, 0), 1), 0),
4631 target, 0, OPTAB_LIB_WIDEN);
4634 tmp = force_operand (XEXP (value, 0), subtarget);
4635 return expand_binop (GET_MODE (value), binoptab, tmp,
4636 force_operand (op2, NULL_RTX),
4637 target, 0, OPTAB_LIB_WIDEN);
4638 /* We give UNSIGNEDP = 0 to expand_binop
4639 because the only operations we are expanding here are signed ones. */
4641 return value;
4644 /* Subroutine of expand_expr:
4645 save the non-copied parts (LIST) of an expr (LHS), and return a list
4646 which can restore these values to their previous values,
4647 should something modify their storage. */
4649 static tree
4650 save_noncopied_parts (lhs, list)
4651 tree lhs;
4652 tree list;
4654 tree tail;
4655 tree parts = 0;
4657 for (tail = list; tail; tail = TREE_CHAIN (tail))
4658 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4659 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4660 else
4662 tree part = TREE_VALUE (tail);
4663 tree part_type = TREE_TYPE (part);
4664 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4665 rtx target = assign_temp (part_type, 0, 1, 1);
4666 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4667 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4668 parts = tree_cons (to_be_saved,
4669 build (RTL_EXPR, part_type, NULL_TREE,
4670 (tree) target),
4671 parts);
4672 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4674 return parts;
4677 /* Subroutine of expand_expr:
4678 record the non-copied parts (LIST) of an expr (LHS), and return a list
4679 which specifies the initial values of these parts. */
4681 static tree
4682 init_noncopied_parts (lhs, list)
4683 tree lhs;
4684 tree list;
4686 tree tail;
4687 tree parts = 0;
4689 for (tail = list; tail; tail = TREE_CHAIN (tail))
4690 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4691 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4692 else
4694 tree part = TREE_VALUE (tail);
4695 tree part_type = TREE_TYPE (part);
4696 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4697 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4699 return parts;
4702 /* Subroutine of expand_expr: return nonzero iff there is no way that
4703 EXP can reference X, which is being modified. TOP_P is nonzero if this
4704 call is going to be used to determine whether we need a temporary
4705 for EXP, as opposed to a recursive call to this function. */
4707 static int
4708 safe_from_p (x, exp, top_p)
4709 rtx x;
4710 tree exp;
4711 int top_p;
4713 rtx exp_rtl = 0;
4714 int i, nops;
4716 if (x == 0
4717 /* If EXP has varying size, we MUST use a target since we currently
4718 have no way of allocating temporaries of variable size
4719 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4720 So we assume here that something at a higher level has prevented a
4721 clash. This is somewhat bogus, but the best we can do. Only
4722 do this when X is BLKmode and when we are at the top level. */
4723 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4724 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4725 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4726 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4727 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4728 != INTEGER_CST)
4729 && GET_MODE (x) == BLKmode))
4730 return 1;
4732 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4733 find the underlying pseudo. */
4734 if (GET_CODE (x) == SUBREG)
4736 x = SUBREG_REG (x);
4737 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4738 return 0;
4741 /* If X is a location in the outgoing argument area, it is always safe. */
4742 if (GET_CODE (x) == MEM
4743 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4744 || (GET_CODE (XEXP (x, 0)) == PLUS
4745 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4746 return 1;
4748 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4750 case 'd':
4751 exp_rtl = DECL_RTL (exp);
4752 break;
4754 case 'c':
4755 return 1;
4757 case 'x':
4758 if (TREE_CODE (exp) == TREE_LIST)
4759 return ((TREE_VALUE (exp) == 0
4760 || safe_from_p (x, TREE_VALUE (exp), 0))
4761 && (TREE_CHAIN (exp) == 0
4762 || safe_from_p (x, TREE_CHAIN (exp), 0)));
4763 else
4764 return 0;
4766 case '1':
4767 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4769 case '2':
4770 case '<':
4771 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4772 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
4774 case 'e':
4775 case 'r':
4776 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4777 the expression. If it is set, we conflict iff we are that rtx or
4778 both are in memory. Otherwise, we check all operands of the
4779 expression recursively. */
4781 switch (TREE_CODE (exp))
4783 case ADDR_EXPR:
4784 return (staticp (TREE_OPERAND (exp, 0))
4785 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4786 || TREE_STATIC (exp));
4788 case INDIRECT_REF:
4789 if (GET_CODE (x) == MEM)
4790 return 0;
4791 break;
4793 case CALL_EXPR:
4794 exp_rtl = CALL_EXPR_RTL (exp);
4795 if (exp_rtl == 0)
4797 /* Assume that the call will clobber all hard registers and
4798 all of memory. */
4799 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4800 || GET_CODE (x) == MEM)
4801 return 0;
4804 break;
4806 case RTL_EXPR:
4807 /* If a sequence exists, we would have to scan every instruction
4808 in the sequence to see if it was safe. This is probably not
4809 worthwhile. */
4810 if (RTL_EXPR_SEQUENCE (exp))
4811 return 0;
4813 exp_rtl = RTL_EXPR_RTL (exp);
4814 break;
4816 case WITH_CLEANUP_EXPR:
4817 exp_rtl = RTL_EXPR_RTL (exp);
4818 break;
4820 case CLEANUP_POINT_EXPR:
4821 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4823 case SAVE_EXPR:
4824 exp_rtl = SAVE_EXPR_RTL (exp);
4825 break;
4827 case BIND_EXPR:
4828 /* The only operand we look at is operand 1. The rest aren't
4829 part of the expression. */
4830 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
4832 case METHOD_CALL_EXPR:
4833 /* This takes a rtx argument, but shouldn't appear here. */
4834 abort ();
4836 default:
4837 break;
4840 /* If we have an rtx, we do not need to scan our operands. */
4841 if (exp_rtl)
4842 break;
4844 nops = tree_code_length[(int) TREE_CODE (exp)];
4845 for (i = 0; i < nops; i++)
4846 if (TREE_OPERAND (exp, i) != 0
4847 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
4848 return 0;
4851 /* If we have an rtl, find any enclosed object. Then see if we conflict
4852 with it. */
4853 if (exp_rtl)
4855 if (GET_CODE (exp_rtl) == SUBREG)
4857 exp_rtl = SUBREG_REG (exp_rtl);
4858 if (GET_CODE (exp_rtl) == REG
4859 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4860 return 0;
4863 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4864 are memory and EXP is not readonly. */
4865 return ! (rtx_equal_p (x, exp_rtl)
4866 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4867 && ! TREE_READONLY (exp)));
4870 /* If we reach here, it is safe. */
4871 return 1;
4874 /* Subroutine of expand_expr: return nonzero iff EXP is an
4875 expression whose type is statically determinable. */
4877 static int
4878 fixed_type_p (exp)
4879 tree exp;
4881 if (TREE_CODE (exp) == PARM_DECL
4882 || TREE_CODE (exp) == VAR_DECL
4883 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4884 || TREE_CODE (exp) == COMPONENT_REF
4885 || TREE_CODE (exp) == ARRAY_REF)
4886 return 1;
4887 return 0;
4890 /* Subroutine of expand_expr: return rtx if EXP is a
4891 variable or parameter; else return 0. */
4893 static rtx
4894 var_rtx (exp)
4895 tree exp;
4897 STRIP_NOPS (exp);
4898 switch (TREE_CODE (exp))
4900 case PARM_DECL:
4901 case VAR_DECL:
4902 return DECL_RTL (exp);
4903 default:
4904 return 0;
4908 /* expand_expr: generate code for computing expression EXP.
4909 An rtx for the computed value is returned. The value is never null.
4910 In the case of a void EXP, const0_rtx is returned.
4912 The value may be stored in TARGET if TARGET is nonzero.
4913 TARGET is just a suggestion; callers must assume that
4914 the rtx returned may not be the same as TARGET.
4916 If TARGET is CONST0_RTX, it means that the value will be ignored.
4918 If TMODE is not VOIDmode, it suggests generating the
4919 result in mode TMODE. But this is done only when convenient.
4920 Otherwise, TMODE is ignored and the value generated in its natural mode.
4921 TMODE is just a suggestion; callers must assume that
4922 the rtx returned may not have mode TMODE.
4924 Note that TARGET may have neither TMODE nor MODE. In that case, it
4925 probably will not be used.
4927 If MODIFIER is EXPAND_SUM then when EXP is an addition
4928 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4929 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4930 products as above, or REG or MEM, or constant.
4931 Ordinarily in such cases we would output mul or add instructions
4932 and then return a pseudo reg containing the sum.
4934 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4935 it also marks a label as absolutely required (it can't be dead).
4936 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4937 This is used for outputting expressions used in initializers.
4939 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4940 with a constant address even if that address is not normally legitimate.
4941 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4944 expand_expr (exp, target, tmode, modifier)
4945 register tree exp;
4946 rtx target;
4947 enum machine_mode tmode;
4948 enum expand_modifier modifier;
4950 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4951 This is static so it will be accessible to our recursive callees. */
4952 static tree placeholder_list = 0;
4953 register rtx op0, op1, temp;
4954 tree type = TREE_TYPE (exp);
4955 int unsignedp = TREE_UNSIGNED (type);
4956 register enum machine_mode mode = TYPE_MODE (type);
4957 register enum tree_code code = TREE_CODE (exp);
4958 optab this_optab;
4959 /* Use subtarget as the target for operand 0 of a binary operation. */
4960 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4961 rtx original_target = target;
4962 /* Maybe defer this until sure not doing bytecode? */
4963 int ignore = (target == const0_rtx
4964 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4965 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4966 || code == COND_EXPR)
4967 && TREE_CODE (type) == VOID_TYPE));
4968 tree context;
4969 /* Used by check-memory-usage to make modifier read only. */
4970 enum expand_modifier ro_modifier;
4972 /* Make a read-only version of the modifier. */
4973 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4974 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4975 ro_modifier = modifier;
4976 else
4977 ro_modifier = EXPAND_NORMAL;
4979 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4981 bc_expand_expr (exp);
4982 return NULL;
4985 /* Don't use hard regs as subtargets, because the combiner
4986 can only handle pseudo regs. */
4987 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4988 subtarget = 0;
4989 /* Avoid subtargets inside loops,
4990 since they hide some invariant expressions. */
4991 if (preserve_subexpressions_p ())
4992 subtarget = 0;
4994 /* If we are going to ignore this result, we need only do something
4995 if there is a side-effect somewhere in the expression. If there
4996 is, short-circuit the most common cases here. Note that we must
4997 not call expand_expr with anything but const0_rtx in case this
4998 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5000 if (ignore)
5002 if (! TREE_SIDE_EFFECTS (exp))
5003 return const0_rtx;
5005 /* Ensure we reference a volatile object even if value is ignored. */
5006 if (TREE_THIS_VOLATILE (exp)
5007 && TREE_CODE (exp) != FUNCTION_DECL
5008 && mode != VOIDmode && mode != BLKmode)
5010 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5011 if (GET_CODE (temp) == MEM)
5012 temp = copy_to_reg (temp);
5013 return const0_rtx;
5016 if (TREE_CODE_CLASS (code) == '1')
5017 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5018 VOIDmode, ro_modifier);
5019 else if (TREE_CODE_CLASS (code) == '2'
5020 || TREE_CODE_CLASS (code) == '<')
5022 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5023 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5024 return const0_rtx;
5026 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5027 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5028 /* If the second operand has no side effects, just evaluate
5029 the first. */
5030 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5031 VOIDmode, ro_modifier);
5033 target = 0;
5036 /* If will do cse, generate all results into pseudo registers
5037 since 1) that allows cse to find more things
5038 and 2) otherwise cse could produce an insn the machine
5039 cannot support. */
5041 if (! cse_not_expected && mode != BLKmode && target
5042 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5043 target = subtarget;
5045 switch (code)
5047 case LABEL_DECL:
5049 tree function = decl_function_context (exp);
5050 /* Handle using a label in a containing function. */
5051 if (function != current_function_decl
5052 && function != inline_function_decl && function != 0)
5054 struct function *p = find_function_data (function);
5055 /* Allocate in the memory associated with the function
5056 that the label is in. */
5057 push_obstacks (p->function_obstack,
5058 p->function_maybepermanent_obstack);
5060 p->forced_labels
5061 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5062 p->forced_labels);
5063 p->addresses_labels = 1;
5064 pop_obstacks ();
5066 else
5068 current_function_addresses_labels = 1;
5069 if (modifier == EXPAND_INITIALIZER)
5070 forced_labels
5071 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp), forced_labels);
5074 temp = gen_rtx_MEM (FUNCTION_MODE,
5075 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5076 if (function != current_function_decl
5077 && function != inline_function_decl && function != 0)
5078 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5079 return temp;
5082 case PARM_DECL:
5083 if (DECL_RTL (exp) == 0)
5085 error_with_decl (exp, "prior parameter's size depends on `%s'");
5086 return CONST0_RTX (mode);
5089 /* ... fall through ... */
5091 case VAR_DECL:
5092 /* If a static var's type was incomplete when the decl was written,
5093 but the type is complete now, lay out the decl now. */
5094 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5095 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5097 push_obstacks_nochange ();
5098 end_temporary_allocation ();
5099 layout_decl (exp, 0);
5100 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5101 pop_obstacks ();
5104 /* Although static-storage variables start off initialized, according to
5105 ANSI C, a memcpy could overwrite them with uninitialized values. So
5106 we check them too. This also lets us check for read-only variables
5107 accessed via a non-const declaration, in case it won't be detected
5108 any other way (e.g., in an embedded system or OS kernel without
5109 memory protection).
5111 Aggregates are not checked here; they're handled elsewhere. */
5112 if (current_function_check_memory_usage && code == VAR_DECL
5113 && GET_CODE (DECL_RTL (exp)) == MEM
5114 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5116 enum memory_use_mode memory_usage;
5117 memory_usage = get_memory_usage_from_modifier (modifier);
5119 if (memory_usage != MEMORY_USE_DONT)
5120 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5121 XEXP (DECL_RTL (exp), 0), ptr_mode,
5122 GEN_INT (int_size_in_bytes (type)),
5123 TYPE_MODE (sizetype),
5124 GEN_INT (memory_usage),
5125 TYPE_MODE (integer_type_node));
5128 /* ... fall through ... */
5130 case FUNCTION_DECL:
5131 case RESULT_DECL:
5132 if (DECL_RTL (exp) == 0)
5133 abort ();
5135 /* Ensure variable marked as used even if it doesn't go through
5136 a parser. If it hasn't be used yet, write out an external
5137 definition. */
5138 if (! TREE_USED (exp))
5140 assemble_external (exp);
5141 TREE_USED (exp) = 1;
5144 /* Show we haven't gotten RTL for this yet. */
5145 temp = 0;
5147 /* Handle variables inherited from containing functions. */
5148 context = decl_function_context (exp);
5150 /* We treat inline_function_decl as an alias for the current function
5151 because that is the inline function whose vars, types, etc.
5152 are being merged into the current function.
5153 See expand_inline_function. */
5155 if (context != 0 && context != current_function_decl
5156 && context != inline_function_decl
5157 /* If var is static, we don't need a static chain to access it. */
5158 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5159 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5161 rtx addr;
5163 /* Mark as non-local and addressable. */
5164 DECL_NONLOCAL (exp) = 1;
5165 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5166 abort ();
5167 mark_addressable (exp);
5168 if (GET_CODE (DECL_RTL (exp)) != MEM)
5169 abort ();
5170 addr = XEXP (DECL_RTL (exp), 0);
5171 if (GET_CODE (addr) == MEM)
5172 addr = gen_rtx_MEM (Pmode,
5173 fix_lexical_addr (XEXP (addr, 0), exp));
5174 else
5175 addr = fix_lexical_addr (addr, exp);
5176 temp = change_address (DECL_RTL (exp), mode, addr);
5179 /* This is the case of an array whose size is to be determined
5180 from its initializer, while the initializer is still being parsed.
5181 See expand_decl. */
5183 else if (GET_CODE (DECL_RTL (exp)) == MEM
5184 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5185 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5186 XEXP (DECL_RTL (exp), 0));
5188 /* If DECL_RTL is memory, we are in the normal case and either
5189 the address is not valid or it is not a register and -fforce-addr
5190 is specified, get the address into a register. */
5192 else if (GET_CODE (DECL_RTL (exp)) == MEM
5193 && modifier != EXPAND_CONST_ADDRESS
5194 && modifier != EXPAND_SUM
5195 && modifier != EXPAND_INITIALIZER
5196 && (! memory_address_p (DECL_MODE (exp),
5197 XEXP (DECL_RTL (exp), 0))
5198 || (flag_force_addr
5199 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5200 temp = change_address (DECL_RTL (exp), VOIDmode,
5201 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5203 /* If we got something, return it. But first, set the alignment
5204 the address is a register. */
5205 if (temp != 0)
5207 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5208 mark_reg_pointer (XEXP (temp, 0),
5209 DECL_ALIGN (exp) / BITS_PER_UNIT);
5211 return temp;
5214 /* If the mode of DECL_RTL does not match that of the decl, it
5215 must be a promoted value. We return a SUBREG of the wanted mode,
5216 but mark it so that we know that it was already extended. */
5218 if (GET_CODE (DECL_RTL (exp)) == REG
5219 && GET_MODE (DECL_RTL (exp)) != mode)
5221 /* Get the signedness used for this variable. Ensure we get the
5222 same mode we got when the variable was declared. */
5223 if (GET_MODE (DECL_RTL (exp))
5224 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5225 abort ();
5227 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5228 SUBREG_PROMOTED_VAR_P (temp) = 1;
5229 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5230 return temp;
5233 return DECL_RTL (exp);
5235 case INTEGER_CST:
5236 return immed_double_const (TREE_INT_CST_LOW (exp),
5237 TREE_INT_CST_HIGH (exp),
5238 mode);
5240 case CONST_DECL:
5241 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5242 EXPAND_MEMORY_USE_BAD);
5244 case REAL_CST:
5245 /* If optimized, generate immediate CONST_DOUBLE
5246 which will be turned into memory by reload if necessary.
5248 We used to force a register so that loop.c could see it. But
5249 this does not allow gen_* patterns to perform optimizations with
5250 the constants. It also produces two insns in cases like "x = 1.0;".
5251 On most machines, floating-point constants are not permitted in
5252 many insns, so we'd end up copying it to a register in any case.
5254 Now, we do the copying in expand_binop, if appropriate. */
5255 return immed_real_const (exp);
5257 case COMPLEX_CST:
5258 case STRING_CST:
5259 if (! TREE_CST_RTL (exp))
5260 output_constant_def (exp);
5262 /* TREE_CST_RTL probably contains a constant address.
5263 On RISC machines where a constant address isn't valid,
5264 make some insns to get that address into a register. */
5265 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5266 && modifier != EXPAND_CONST_ADDRESS
5267 && modifier != EXPAND_INITIALIZER
5268 && modifier != EXPAND_SUM
5269 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5270 || (flag_force_addr
5271 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5272 return change_address (TREE_CST_RTL (exp), VOIDmode,
5273 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5274 return TREE_CST_RTL (exp);
5276 case SAVE_EXPR:
5277 context = decl_function_context (exp);
5279 /* If this SAVE_EXPR was at global context, assume we are an
5280 initialization function and move it into our context. */
5281 if (context == 0)
5282 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5284 /* We treat inline_function_decl as an alias for the current function
5285 because that is the inline function whose vars, types, etc.
5286 are being merged into the current function.
5287 See expand_inline_function. */
5288 if (context == current_function_decl || context == inline_function_decl)
5289 context = 0;
5291 /* If this is non-local, handle it. */
5292 if (context)
5294 /* The following call just exists to abort if the context is
5295 not of a containing function. */
5296 find_function_data (context);
5298 temp = SAVE_EXPR_RTL (exp);
5299 if (temp && GET_CODE (temp) == REG)
5301 put_var_into_stack (exp);
5302 temp = SAVE_EXPR_RTL (exp);
5304 if (temp == 0 || GET_CODE (temp) != MEM)
5305 abort ();
5306 return change_address (temp, mode,
5307 fix_lexical_addr (XEXP (temp, 0), exp));
5309 if (SAVE_EXPR_RTL (exp) == 0)
5311 if (mode == VOIDmode)
5312 temp = const0_rtx;
5313 else
5314 temp = assign_temp (type, 3, 0, 0);
5316 SAVE_EXPR_RTL (exp) = temp;
5317 if (!optimize && GET_CODE (temp) == REG)
5318 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5319 save_expr_regs);
5321 /* If the mode of TEMP does not match that of the expression, it
5322 must be a promoted value. We pass store_expr a SUBREG of the
5323 wanted mode but mark it so that we know that it was already
5324 extended. Note that `unsignedp' was modified above in
5325 this case. */
5327 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5329 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5330 SUBREG_PROMOTED_VAR_P (temp) = 1;
5331 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5334 if (temp == const0_rtx)
5335 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5336 EXPAND_MEMORY_USE_BAD);
5337 else
5338 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5340 TREE_USED (exp) = 1;
5343 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5344 must be a promoted value. We return a SUBREG of the wanted mode,
5345 but mark it so that we know that it was already extended. */
5347 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5348 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5350 /* Compute the signedness and make the proper SUBREG. */
5351 promote_mode (type, mode, &unsignedp, 0);
5352 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5353 SUBREG_PROMOTED_VAR_P (temp) = 1;
5354 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5355 return temp;
5358 return SAVE_EXPR_RTL (exp);
5360 case UNSAVE_EXPR:
5362 rtx temp;
5363 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5364 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5365 return temp;
5368 case PLACEHOLDER_EXPR:
5370 tree placeholder_expr;
5372 /* If there is an object on the head of the placeholder list,
5373 see if some object in it of type TYPE or a pointer to it. For
5374 further information, see tree.def. */
5375 for (placeholder_expr = placeholder_list;
5376 placeholder_expr != 0;
5377 placeholder_expr = TREE_CHAIN (placeholder_expr))
5379 tree need_type = TYPE_MAIN_VARIANT (type);
5380 tree object = 0;
5381 tree old_list = placeholder_list;
5382 tree elt;
5384 /* Find the outermost reference that is of the type we want.
5385 If none, see if any object has a type that is a pointer to
5386 the type we want. */
5387 for (elt = TREE_PURPOSE (placeholder_expr);
5388 elt != 0 && object == 0;
5390 = ((TREE_CODE (elt) == COMPOUND_EXPR
5391 || TREE_CODE (elt) == COND_EXPR)
5392 ? TREE_OPERAND (elt, 1)
5393 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5394 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5395 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5396 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5397 ? TREE_OPERAND (elt, 0) : 0))
5398 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5399 object = elt;
5401 for (elt = TREE_PURPOSE (placeholder_expr);
5402 elt != 0 && object == 0;
5404 = ((TREE_CODE (elt) == COMPOUND_EXPR
5405 || TREE_CODE (elt) == COND_EXPR)
5406 ? TREE_OPERAND (elt, 1)
5407 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5408 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5409 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5410 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5411 ? TREE_OPERAND (elt, 0) : 0))
5412 if (POINTER_TYPE_P (TREE_TYPE (elt))
5413 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5414 == need_type))
5415 object = build1 (INDIRECT_REF, need_type, elt);
5417 if (object != 0)
5419 /* Expand this object skipping the list entries before
5420 it was found in case it is also a PLACEHOLDER_EXPR.
5421 In that case, we want to translate it using subsequent
5422 entries. */
5423 placeholder_list = TREE_CHAIN (placeholder_expr);
5424 temp = expand_expr (object, original_target, tmode,
5425 ro_modifier);
5426 placeholder_list = old_list;
5427 return temp;
5432 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5433 abort ();
5435 case WITH_RECORD_EXPR:
5436 /* Put the object on the placeholder list, expand our first operand,
5437 and pop the list. */
5438 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5439 placeholder_list);
5440 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5441 tmode, ro_modifier);
5442 placeholder_list = TREE_CHAIN (placeholder_list);
5443 return target;
5445 case EXIT_EXPR:
5446 expand_exit_loop_if_false (NULL_PTR,
5447 invert_truthvalue (TREE_OPERAND (exp, 0)));
5448 return const0_rtx;
5450 case LOOP_EXPR:
5451 push_temp_slots ();
5452 expand_start_loop (1);
5453 expand_expr_stmt (TREE_OPERAND (exp, 0));
5454 expand_end_loop ();
5455 pop_temp_slots ();
5457 return const0_rtx;
5459 case BIND_EXPR:
5461 tree vars = TREE_OPERAND (exp, 0);
5462 int vars_need_expansion = 0;
5464 /* Need to open a binding contour here because
5465 if there are any cleanups they must be contained here. */
5466 expand_start_bindings (0);
5468 /* Mark the corresponding BLOCK for output in its proper place. */
5469 if (TREE_OPERAND (exp, 2) != 0
5470 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5471 insert_block (TREE_OPERAND (exp, 2));
5473 /* If VARS have not yet been expanded, expand them now. */
5474 while (vars)
5476 if (DECL_RTL (vars) == 0)
5478 vars_need_expansion = 1;
5479 expand_decl (vars);
5481 expand_decl_init (vars);
5482 vars = TREE_CHAIN (vars);
5485 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5487 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5489 return temp;
5492 case RTL_EXPR:
5493 if (RTL_EXPR_SEQUENCE (exp))
5495 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5496 abort ();
5497 emit_insns (RTL_EXPR_SEQUENCE (exp));
5498 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5500 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5501 free_temps_for_rtl_expr (exp);
5502 return RTL_EXPR_RTL (exp);
5504 case CONSTRUCTOR:
5505 /* If we don't need the result, just ensure we evaluate any
5506 subexpressions. */
5507 if (ignore)
5509 tree elt;
5510 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5511 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5512 EXPAND_MEMORY_USE_BAD);
5513 return const0_rtx;
5516 /* All elts simple constants => refer to a constant in memory. But
5517 if this is a non-BLKmode mode, let it store a field at a time
5518 since that should make a CONST_INT or CONST_DOUBLE when we
5519 fold. Likewise, if we have a target we can use, it is best to
5520 store directly into the target unless the type is large enough
5521 that memcpy will be used. If we are making an initializer and
5522 all operands are constant, put it in memory as well. */
5523 else if ((TREE_STATIC (exp)
5524 && ((mode == BLKmode
5525 && ! (target != 0 && safe_from_p (target, exp, 1)))
5526 || TREE_ADDRESSABLE (exp)
5527 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5528 && (move_by_pieces_ninsns
5529 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5530 TYPE_ALIGN (type) / BITS_PER_UNIT)
5531 >= MOVE_RATIO)
5532 && ! mostly_zeros_p (exp))))
5533 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5535 rtx constructor = output_constant_def (exp);
5536 if (modifier != EXPAND_CONST_ADDRESS
5537 && modifier != EXPAND_INITIALIZER
5538 && modifier != EXPAND_SUM
5539 && (! memory_address_p (GET_MODE (constructor),
5540 XEXP (constructor, 0))
5541 || (flag_force_addr
5542 && GET_CODE (XEXP (constructor, 0)) != REG)))
5543 constructor = change_address (constructor, VOIDmode,
5544 XEXP (constructor, 0));
5545 return constructor;
5548 else
5550 /* Handle calls that pass values in multiple non-contiguous
5551 locations. The Irix 6 ABI has examples of this. */
5552 if (target == 0 || ! safe_from_p (target, exp, 1)
5553 || GET_CODE (target) == PARALLEL)
5555 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5556 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5557 else
5558 target = assign_temp (type, 0, 1, 1);
5561 if (TREE_READONLY (exp))
5563 if (GET_CODE (target) == MEM)
5564 target = copy_rtx (target);
5566 RTX_UNCHANGING_P (target) = 1;
5569 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
5570 return target;
5573 case INDIRECT_REF:
5575 tree exp1 = TREE_OPERAND (exp, 0);
5576 tree exp2;
5578 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5579 op0 = memory_address (mode, op0);
5581 if (current_function_check_memory_usage
5582 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5584 enum memory_use_mode memory_usage;
5585 memory_usage = get_memory_usage_from_modifier (modifier);
5587 if (memory_usage != MEMORY_USE_DONT)
5589 in_check_memory_usage = 1;
5590 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5591 op0, ptr_mode,
5592 GEN_INT (int_size_in_bytes (type)),
5593 TYPE_MODE (sizetype),
5594 GEN_INT (memory_usage),
5595 TYPE_MODE (integer_type_node));
5596 in_check_memory_usage = 0;
5600 temp = gen_rtx_MEM (mode, op0);
5601 /* If address was computed by addition,
5602 mark this as an element of an aggregate. */
5603 if (TREE_CODE (exp1) == PLUS_EXPR
5604 || (TREE_CODE (exp1) == SAVE_EXPR
5605 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
5606 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5607 /* If the pointer is actually a REFERENCE_TYPE, this could
5608 be pointing into some aggregate too. */
5609 || TREE_CODE (TREE_TYPE (exp1)) == REFERENCE_TYPE
5610 || (TREE_CODE (exp1) == ADDR_EXPR
5611 && (exp2 = TREE_OPERAND (exp1, 0))
5612 && AGGREGATE_TYPE_P (TREE_TYPE (exp2)))
5613 /* This may have been an array reference to the first element
5614 that was optimized away from being an addition. */
5615 || (TREE_CODE (exp1) == NOP_EXPR
5616 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
5617 == REFERENCE_TYPE)
5618 || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
5619 == POINTER_TYPE)
5620 && (AGGREGATE_TYPE_P
5621 (TREE_TYPE (TREE_TYPE
5622 (TREE_OPERAND (exp1, 0)))))))))
5623 MEM_IN_STRUCT_P (temp) = 1;
5624 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5626 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5627 here, because, in C and C++, the fact that a location is accessed
5628 through a pointer to const does not mean that the value there can
5629 never change. Languages where it can never change should
5630 also set TREE_STATIC. */
5631 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5632 return temp;
5635 case ARRAY_REF:
5636 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5637 abort ();
5640 tree array = TREE_OPERAND (exp, 0);
5641 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5642 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5643 tree index = TREE_OPERAND (exp, 1);
5644 tree index_type = TREE_TYPE (index);
5645 HOST_WIDE_INT i;
5647 /* Optimize the special-case of a zero lower bound.
5649 We convert the low_bound to sizetype to avoid some problems
5650 with constant folding. (E.g. suppose the lower bound is 1,
5651 and its mode is QI. Without the conversion, (ARRAY
5652 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5653 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5655 But sizetype isn't quite right either (especially if
5656 the lowbound is negative). FIXME */
5658 if (! integer_zerop (low_bound))
5659 index = fold (build (MINUS_EXPR, index_type, index,
5660 convert (sizetype, low_bound)));
5662 /* Fold an expression like: "foo"[2].
5663 This is not done in fold so it won't happen inside &.
5664 Don't fold if this is for wide characters since it's too
5665 difficult to do correctly and this is a very rare case. */
5667 if (TREE_CODE (array) == STRING_CST
5668 && TREE_CODE (index) == INTEGER_CST
5669 && !TREE_INT_CST_HIGH (index)
5670 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5671 && GET_MODE_CLASS (mode) == MODE_INT
5672 && GET_MODE_SIZE (mode) == 1)
5673 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5675 /* If this is a constant index into a constant array,
5676 just get the value from the array. Handle both the cases when
5677 we have an explicit constructor and when our operand is a variable
5678 that was declared const. */
5680 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5682 if (TREE_CODE (index) == INTEGER_CST
5683 && TREE_INT_CST_HIGH (index) == 0)
5685 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5687 i = TREE_INT_CST_LOW (index);
5688 while (elem && i--)
5689 elem = TREE_CHAIN (elem);
5690 if (elem)
5691 return expand_expr (fold (TREE_VALUE (elem)), target,
5692 tmode, ro_modifier);
5696 else if (optimize >= 1
5697 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5698 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5699 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5701 if (TREE_CODE (index) == INTEGER_CST)
5703 tree init = DECL_INITIAL (array);
5705 i = TREE_INT_CST_LOW (index);
5706 if (TREE_CODE (init) == CONSTRUCTOR)
5708 tree elem = CONSTRUCTOR_ELTS (init);
5710 while (elem
5711 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5712 elem = TREE_CHAIN (elem);
5713 if (elem)
5714 return expand_expr (fold (TREE_VALUE (elem)), target,
5715 tmode, ro_modifier);
5717 else if (TREE_CODE (init) == STRING_CST
5718 && TREE_INT_CST_HIGH (index) == 0
5719 && (TREE_INT_CST_LOW (index)
5720 < TREE_STRING_LENGTH (init)))
5721 return (GEN_INT
5722 (TREE_STRING_POINTER
5723 (init)[TREE_INT_CST_LOW (index)]));
5728 /* ... fall through ... */
5730 case COMPONENT_REF:
5731 case BIT_FIELD_REF:
5732 /* If the operand is a CONSTRUCTOR, we can just extract the
5733 appropriate field if it is present. Don't do this if we have
5734 already written the data since we want to refer to that copy
5735 and varasm.c assumes that's what we'll do. */
5736 if (code != ARRAY_REF
5737 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5738 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5740 tree elt;
5742 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5743 elt = TREE_CHAIN (elt))
5744 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5745 /* We can normally use the value of the field in the
5746 CONSTRUCTOR. However, if this is a bitfield in
5747 an integral mode that we can fit in a HOST_WIDE_INT,
5748 we must mask only the number of bits in the bitfield,
5749 since this is done implicitly by the constructor. If
5750 the bitfield does not meet either of those conditions,
5751 we can't do this optimization. */
5752 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5753 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5754 == MODE_INT)
5755 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5756 <= HOST_BITS_PER_WIDE_INT))))
5758 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5759 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5761 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5763 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5765 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5766 op0 = expand_and (op0, op1, target);
5768 else
5770 enum machine_mode imode
5771 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5772 tree count
5773 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
5776 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5777 target, 0);
5778 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5779 target, 0);
5783 return op0;
5788 enum machine_mode mode1;
5789 int bitsize;
5790 int bitpos;
5791 tree offset;
5792 int volatilep = 0;
5793 int alignment;
5794 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5795 &mode1, &unsignedp, &volatilep,
5796 &alignment);
5798 /* If we got back the original object, something is wrong. Perhaps
5799 we are evaluating an expression too early. In any event, don't
5800 infinitely recurse. */
5801 if (tem == exp)
5802 abort ();
5804 /* If TEM's type is a union of variable size, pass TARGET to the inner
5805 computation, since it will need a temporary and TARGET is known
5806 to have to do. This occurs in unchecked conversion in Ada. */
5808 op0 = expand_expr (tem,
5809 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5810 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5811 != INTEGER_CST)
5812 ? target : NULL_RTX),
5813 VOIDmode,
5814 modifier == EXPAND_INITIALIZER
5815 ? modifier : EXPAND_NORMAL);
5817 /* If this is a constant, put it into a register if it is a
5818 legitimate constant and memory if it isn't. */
5819 if (CONSTANT_P (op0))
5821 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5822 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5823 op0 = force_reg (mode, op0);
5824 else
5825 op0 = validize_mem (force_const_mem (mode, op0));
5828 if (offset != 0)
5830 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5832 if (GET_CODE (op0) != MEM)
5833 abort ();
5834 op0 = change_address (op0, VOIDmode,
5835 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5836 force_reg (ptr_mode,
5837 offset_rtx)));
5840 /* Don't forget about volatility even if this is a bitfield. */
5841 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5843 op0 = copy_rtx (op0);
5844 MEM_VOLATILE_P (op0) = 1;
5847 /* Check the access. */
5848 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
5850 enum memory_use_mode memory_usage;
5851 memory_usage = get_memory_usage_from_modifier (modifier);
5853 if (memory_usage != MEMORY_USE_DONT)
5855 rtx to;
5856 int size;
5858 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5859 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5861 /* Check the access right of the pointer. */
5862 if (size > BITS_PER_UNIT)
5863 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5864 to, ptr_mode,
5865 GEN_INT (size / BITS_PER_UNIT),
5866 TYPE_MODE (sizetype),
5867 GEN_INT (memory_usage),
5868 TYPE_MODE (integer_type_node));
5872 /* In cases where an aligned union has an unaligned object
5873 as a field, we might be extracting a BLKmode value from
5874 an integer-mode (e.g., SImode) object. Handle this case
5875 by doing the extract into an object as wide as the field
5876 (which we know to be the width of a basic mode), then
5877 storing into memory, and changing the mode to BLKmode.
5878 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5879 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5880 if (mode1 == VOIDmode
5881 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5882 || (modifier != EXPAND_CONST_ADDRESS
5883 && modifier != EXPAND_INITIALIZER
5884 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5885 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5886 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5887 /* If the field isn't aligned enough to fetch as a memref,
5888 fetch it as a bit field. */
5889 || (SLOW_UNALIGNED_ACCESS
5890 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5891 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5893 enum machine_mode ext_mode = mode;
5895 if (ext_mode == BLKmode)
5896 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5898 if (ext_mode == BLKmode)
5900 /* In this case, BITPOS must start at a byte boundary and
5901 TARGET, if specified, must be a MEM. */
5902 if (GET_CODE (op0) != MEM
5903 || (target != 0 && GET_CODE (target) != MEM)
5904 || bitpos % BITS_PER_UNIT != 0)
5905 abort ();
5907 op0 = change_address (op0, VOIDmode,
5908 plus_constant (XEXP (op0, 0),
5909 bitpos / BITS_PER_UNIT));
5910 if (target == 0)
5911 target = assign_temp (type, 0, 1, 1);
5913 emit_block_move (target, op0,
5914 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5915 / BITS_PER_UNIT),
5918 return target;
5921 op0 = validize_mem (op0);
5923 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5924 mark_reg_pointer (XEXP (op0, 0), alignment);
5926 op0 = extract_bit_field (op0, bitsize, bitpos,
5927 unsignedp, target, ext_mode, ext_mode,
5928 alignment,
5929 int_size_in_bytes (TREE_TYPE (tem)));
5931 /* If the result is a record type and BITSIZE is narrower than
5932 the mode of OP0, an integral mode, and this is a big endian
5933 machine, we must put the field into the high-order bits. */
5934 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5935 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5936 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5937 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5938 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5939 - bitsize),
5940 op0, 1);
5942 if (mode == BLKmode)
5944 rtx new = assign_stack_temp (ext_mode,
5945 bitsize / BITS_PER_UNIT, 0);
5947 emit_move_insn (new, op0);
5948 op0 = copy_rtx (new);
5949 PUT_MODE (op0, BLKmode);
5950 MEM_IN_STRUCT_P (op0) = 1;
5953 return op0;
5956 /* If the result is BLKmode, use that to access the object
5957 now as well. */
5958 if (mode == BLKmode)
5959 mode1 = BLKmode;
5961 /* Get a reference to just this component. */
5962 if (modifier == EXPAND_CONST_ADDRESS
5963 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5964 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5965 (bitpos / BITS_PER_UNIT)));
5966 else
5967 op0 = change_address (op0, mode1,
5968 plus_constant (XEXP (op0, 0),
5969 (bitpos / BITS_PER_UNIT)));
5970 if (GET_CODE (XEXP (op0, 0)) == REG)
5971 mark_reg_pointer (XEXP (op0, 0), alignment);
5973 MEM_IN_STRUCT_P (op0) = 1;
5974 MEM_VOLATILE_P (op0) |= volatilep;
5975 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5976 || modifier == EXPAND_CONST_ADDRESS
5977 || modifier == EXPAND_INITIALIZER)
5978 return op0;
5979 else if (target == 0)
5980 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5982 convert_move (target, op0, unsignedp);
5983 return target;
5986 /* Intended for a reference to a buffer of a file-object in Pascal.
5987 But it's not certain that a special tree code will really be
5988 necessary for these. INDIRECT_REF might work for them. */
5989 case BUFFER_REF:
5990 abort ();
5992 case IN_EXPR:
5994 /* Pascal set IN expression.
5996 Algorithm:
5997 rlo = set_low - (set_low%bits_per_word);
5998 the_word = set [ (index - rlo)/bits_per_word ];
5999 bit_index = index % bits_per_word;
6000 bitmask = 1 << bit_index;
6001 return !!(the_word & bitmask); */
6003 tree set = TREE_OPERAND (exp, 0);
6004 tree index = TREE_OPERAND (exp, 1);
6005 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6006 tree set_type = TREE_TYPE (set);
6007 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6008 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6009 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6010 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6011 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6012 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6013 rtx setaddr = XEXP (setval, 0);
6014 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6015 rtx rlow;
6016 rtx diff, quo, rem, addr, bit, result;
6018 preexpand_calls (exp);
6020 /* If domain is empty, answer is no. Likewise if index is constant
6021 and out of bounds. */
6022 if ((TREE_CODE (set_high_bound) == INTEGER_CST
6023 && TREE_CODE (set_low_bound) == INTEGER_CST
6024 && tree_int_cst_lt (set_high_bound, set_low_bound)
6025 || (TREE_CODE (index) == INTEGER_CST
6026 && TREE_CODE (set_low_bound) == INTEGER_CST
6027 && tree_int_cst_lt (index, set_low_bound))
6028 || (TREE_CODE (set_high_bound) == INTEGER_CST
6029 && TREE_CODE (index) == INTEGER_CST
6030 && tree_int_cst_lt (set_high_bound, index))))
6031 return const0_rtx;
6033 if (target == 0)
6034 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6036 /* If we get here, we have to generate the code for both cases
6037 (in range and out of range). */
6039 op0 = gen_label_rtx ();
6040 op1 = gen_label_rtx ();
6042 if (! (GET_CODE (index_val) == CONST_INT
6043 && GET_CODE (lo_r) == CONST_INT))
6045 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6046 GET_MODE (index_val), iunsignedp, 0);
6047 emit_jump_insn (gen_blt (op1));
6050 if (! (GET_CODE (index_val) == CONST_INT
6051 && GET_CODE (hi_r) == CONST_INT))
6053 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6054 GET_MODE (index_val), iunsignedp, 0);
6055 emit_jump_insn (gen_bgt (op1));
6058 /* Calculate the element number of bit zero in the first word
6059 of the set. */
6060 if (GET_CODE (lo_r) == CONST_INT)
6061 rlow = GEN_INT (INTVAL (lo_r)
6062 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6063 else
6064 rlow = expand_binop (index_mode, and_optab, lo_r,
6065 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6066 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6068 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6069 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6071 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6072 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6073 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6074 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6076 addr = memory_address (byte_mode,
6077 expand_binop (index_mode, add_optab, diff,
6078 setaddr, NULL_RTX, iunsignedp,
6079 OPTAB_LIB_WIDEN));
6081 /* Extract the bit we want to examine */
6082 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6083 gen_rtx_MEM (byte_mode, addr),
6084 make_tree (TREE_TYPE (index), rem),
6085 NULL_RTX, 1);
6086 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6087 GET_MODE (target) == byte_mode ? target : 0,
6088 1, OPTAB_LIB_WIDEN);
6090 if (result != target)
6091 convert_move (target, result, 1);
6093 /* Output the code to handle the out-of-range case. */
6094 emit_jump (op0);
6095 emit_label (op1);
6096 emit_move_insn (target, const0_rtx);
6097 emit_label (op0);
6098 return target;
6101 case WITH_CLEANUP_EXPR:
6102 if (RTL_EXPR_RTL (exp) == 0)
6104 RTL_EXPR_RTL (exp)
6105 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6106 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6108 /* That's it for this cleanup. */
6109 TREE_OPERAND (exp, 2) = 0;
6111 return RTL_EXPR_RTL (exp);
6113 case CLEANUP_POINT_EXPR:
6115 extern int temp_slot_level;
6116 /* Start a new binding layer that will keep track of all cleanup
6117 actions to be performed. */
6118 expand_start_bindings (0);
6120 target_temp_slot_level = temp_slot_level;
6122 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6123 /* If we're going to use this value, load it up now. */
6124 if (! ignore)
6125 op0 = force_not_mem (op0);
6126 preserve_temp_slots (op0);
6127 expand_end_bindings (NULL_TREE, 0, 0);
6129 return op0;
6131 case CALL_EXPR:
6132 /* Check for a built-in function. */
6133 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6134 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6135 == FUNCTION_DECL)
6136 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6137 return expand_builtin (exp, target, subtarget, tmode, ignore);
6139 /* If this call was expanded already by preexpand_calls,
6140 just return the result we got. */
6141 if (CALL_EXPR_RTL (exp) != 0)
6142 return CALL_EXPR_RTL (exp);
6144 return expand_call (exp, target, ignore);
6146 case NON_LVALUE_EXPR:
6147 case NOP_EXPR:
6148 case CONVERT_EXPR:
6149 case REFERENCE_EXPR:
6150 if (TREE_CODE (type) == UNION_TYPE)
6152 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6153 if (target == 0)
6155 if (mode != BLKmode)
6156 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6157 else
6158 target = assign_temp (type, 0, 1, 1);
6161 if (GET_CODE (target) == MEM)
6162 /* Store data into beginning of memory target. */
6163 store_expr (TREE_OPERAND (exp, 0),
6164 change_address (target, TYPE_MODE (valtype), 0), 0);
6166 else if (GET_CODE (target) == REG)
6167 /* Store this field into a union of the proper type. */
6168 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6169 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6170 VOIDmode, 0, 1,
6171 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6172 else
6173 abort ();
6175 /* Return the entire union. */
6176 return target;
6179 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6181 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6182 ro_modifier);
6184 /* If the signedness of the conversion differs and OP0 is
6185 a promoted SUBREG, clear that indication since we now
6186 have to do the proper extension. */
6187 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6188 && GET_CODE (op0) == SUBREG)
6189 SUBREG_PROMOTED_VAR_P (op0) = 0;
6191 return op0;
6194 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6195 if (GET_MODE (op0) == mode)
6196 return op0;
6198 /* If OP0 is a constant, just convert it into the proper mode. */
6199 if (CONSTANT_P (op0))
6200 return
6201 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6202 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6204 if (modifier == EXPAND_INITIALIZER)
6205 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6207 if (target == 0)
6208 return
6209 convert_to_mode (mode, op0,
6210 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6211 else
6212 convert_move (target, op0,
6213 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6214 return target;
6216 case PLUS_EXPR:
6217 /* We come here from MINUS_EXPR when the second operand is a
6218 constant. */
6219 plus_expr:
6220 this_optab = add_optab;
6222 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6223 something else, make sure we add the register to the constant and
6224 then to the other thing. This case can occur during strength
6225 reduction and doing it this way will produce better code if the
6226 frame pointer or argument pointer is eliminated.
6228 fold-const.c will ensure that the constant is always in the inner
6229 PLUS_EXPR, so the only case we need to do anything about is if
6230 sp, ap, or fp is our second argument, in which case we must swap
6231 the innermost first argument and our second argument. */
6233 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6234 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6235 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6236 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6237 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6238 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6240 tree t = TREE_OPERAND (exp, 1);
6242 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6243 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6246 /* If the result is to be ptr_mode and we are adding an integer to
6247 something, we might be forming a constant. So try to use
6248 plus_constant. If it produces a sum and we can't accept it,
6249 use force_operand. This allows P = &ARR[const] to generate
6250 efficient code on machines where a SYMBOL_REF is not a valid
6251 address.
6253 If this is an EXPAND_SUM call, always return the sum. */
6254 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6255 || mode == ptr_mode)
6257 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6258 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6259 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6261 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6262 EXPAND_SUM);
6263 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6264 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6265 op1 = force_operand (op1, target);
6266 return op1;
6269 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6270 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6271 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6273 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6274 EXPAND_SUM);
6275 if (! CONSTANT_P (op0))
6277 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6278 VOIDmode, modifier);
6279 /* Don't go to both_summands if modifier
6280 says it's not right to return a PLUS. */
6281 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6282 goto binop2;
6283 goto both_summands;
6285 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6286 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6287 op0 = force_operand (op0, target);
6288 return op0;
6292 /* No sense saving up arithmetic to be done
6293 if it's all in the wrong mode to form part of an address.
6294 And force_operand won't know whether to sign-extend or
6295 zero-extend. */
6296 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6297 || mode != ptr_mode)
6298 goto binop;
6300 preexpand_calls (exp);
6301 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6302 subtarget = 0;
6304 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6305 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6307 both_summands:
6308 /* Make sure any term that's a sum with a constant comes last. */
6309 if (GET_CODE (op0) == PLUS
6310 && CONSTANT_P (XEXP (op0, 1)))
6312 temp = op0;
6313 op0 = op1;
6314 op1 = temp;
6316 /* If adding to a sum including a constant,
6317 associate it to put the constant outside. */
6318 if (GET_CODE (op1) == PLUS
6319 && CONSTANT_P (XEXP (op1, 1)))
6321 rtx constant_term = const0_rtx;
6323 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6324 if (temp != 0)
6325 op0 = temp;
6326 /* Ensure that MULT comes first if there is one. */
6327 else if (GET_CODE (op0) == MULT)
6328 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6329 else
6330 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6332 /* Let's also eliminate constants from op0 if possible. */
6333 op0 = eliminate_constant_term (op0, &constant_term);
6335 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6336 their sum should be a constant. Form it into OP1, since the
6337 result we want will then be OP0 + OP1. */
6339 temp = simplify_binary_operation (PLUS, mode, constant_term,
6340 XEXP (op1, 1));
6341 if (temp != 0)
6342 op1 = temp;
6343 else
6344 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6347 /* Put a constant term last and put a multiplication first. */
6348 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6349 temp = op1, op1 = op0, op0 = temp;
6351 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6352 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6354 case MINUS_EXPR:
6355 /* For initializers, we are allowed to return a MINUS of two
6356 symbolic constants. Here we handle all cases when both operands
6357 are constant. */
6358 /* Handle difference of two symbolic constants,
6359 for the sake of an initializer. */
6360 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6361 && really_constant_p (TREE_OPERAND (exp, 0))
6362 && really_constant_p (TREE_OPERAND (exp, 1)))
6364 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6365 VOIDmode, ro_modifier);
6366 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6367 VOIDmode, ro_modifier);
6369 /* If the last operand is a CONST_INT, use plus_constant of
6370 the negated constant. Else make the MINUS. */
6371 if (GET_CODE (op1) == CONST_INT)
6372 return plus_constant (op0, - INTVAL (op1));
6373 else
6374 return gen_rtx_MINUS (mode, op0, op1);
6376 /* Convert A - const to A + (-const). */
6377 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6379 tree negated = fold (build1 (NEGATE_EXPR, type,
6380 TREE_OPERAND (exp, 1)));
6382 /* Deal with the case where we can't negate the constant
6383 in TYPE. */
6384 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6386 tree newtype = signed_type (type);
6387 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6388 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6389 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6391 if (! TREE_OVERFLOW (newneg))
6392 return expand_expr (convert (type,
6393 build (PLUS_EXPR, newtype,
6394 newop0, newneg)),
6395 target, tmode, ro_modifier);
6397 else
6399 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6400 goto plus_expr;
6403 this_optab = sub_optab;
6404 goto binop;
6406 case MULT_EXPR:
6407 preexpand_calls (exp);
6408 /* If first operand is constant, swap them.
6409 Thus the following special case checks need only
6410 check the second operand. */
6411 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6413 register tree t1 = TREE_OPERAND (exp, 0);
6414 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6415 TREE_OPERAND (exp, 1) = t1;
6418 /* Attempt to return something suitable for generating an
6419 indexed address, for machines that support that. */
6421 if (modifier == EXPAND_SUM && mode == ptr_mode
6422 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6423 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6425 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6426 EXPAND_SUM);
6428 /* Apply distributive law if OP0 is x+c. */
6429 if (GET_CODE (op0) == PLUS
6430 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6431 return
6432 gen_rtx_PLUS
6433 (mode,
6434 gen_rtx_MULT
6435 (mode, XEXP (op0, 0),
6436 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6437 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6438 * INTVAL (XEXP (op0, 1))));
6440 if (GET_CODE (op0) != REG)
6441 op0 = force_operand (op0, NULL_RTX);
6442 if (GET_CODE (op0) != REG)
6443 op0 = copy_to_mode_reg (mode, op0);
6445 return
6446 gen_rtx_MULT (mode, op0,
6447 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6450 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6451 subtarget = 0;
6453 /* Check for multiplying things that have been extended
6454 from a narrower type. If this machine supports multiplying
6455 in that narrower type with a result in the desired type,
6456 do it that way, and avoid the explicit type-conversion. */
6457 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6458 && TREE_CODE (type) == INTEGER_TYPE
6459 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6460 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6461 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6462 && int_fits_type_p (TREE_OPERAND (exp, 1),
6463 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6464 /* Don't use a widening multiply if a shift will do. */
6465 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6466 > HOST_BITS_PER_WIDE_INT)
6467 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6469 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6470 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6472 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6473 /* If both operands are extended, they must either both
6474 be zero-extended or both be sign-extended. */
6475 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6477 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6479 enum machine_mode innermode
6480 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6481 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6482 ? smul_widen_optab : umul_widen_optab);
6483 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6484 ? umul_widen_optab : smul_widen_optab);
6485 if (mode == GET_MODE_WIDER_MODE (innermode))
6487 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6489 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6490 NULL_RTX, VOIDmode, 0);
6491 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6492 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6493 VOIDmode, 0);
6494 else
6495 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6496 NULL_RTX, VOIDmode, 0);
6497 goto binop2;
6499 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6500 && innermode == word_mode)
6502 rtx htem;
6503 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6504 NULL_RTX, VOIDmode, 0);
6505 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6506 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6507 VOIDmode, 0);
6508 else
6509 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6510 NULL_RTX, VOIDmode, 0);
6511 temp = expand_binop (mode, other_optab, op0, op1, target,
6512 unsignedp, OPTAB_LIB_WIDEN);
6513 htem = expand_mult_highpart_adjust (innermode,
6514 gen_highpart (innermode, temp),
6515 op0, op1,
6516 gen_highpart (innermode, temp),
6517 unsignedp);
6518 emit_move_insn (gen_highpart (innermode, temp), htem);
6519 return temp;
6523 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6524 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6525 return expand_mult (mode, op0, op1, target, unsignedp);
6527 case TRUNC_DIV_EXPR:
6528 case FLOOR_DIV_EXPR:
6529 case CEIL_DIV_EXPR:
6530 case ROUND_DIV_EXPR:
6531 case EXACT_DIV_EXPR:
6532 preexpand_calls (exp);
6533 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6534 subtarget = 0;
6535 /* Possible optimization: compute the dividend with EXPAND_SUM
6536 then if the divisor is constant can optimize the case
6537 where some terms of the dividend have coeffs divisible by it. */
6538 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6539 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6540 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6542 case RDIV_EXPR:
6543 this_optab = flodiv_optab;
6544 goto binop;
6546 case TRUNC_MOD_EXPR:
6547 case FLOOR_MOD_EXPR:
6548 case CEIL_MOD_EXPR:
6549 case ROUND_MOD_EXPR:
6550 preexpand_calls (exp);
6551 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6552 subtarget = 0;
6553 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6554 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6555 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6557 case FIX_ROUND_EXPR:
6558 case FIX_FLOOR_EXPR:
6559 case FIX_CEIL_EXPR:
6560 abort (); /* Not used for C. */
6562 case FIX_TRUNC_EXPR:
6563 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6564 if (target == 0)
6565 target = gen_reg_rtx (mode);
6566 expand_fix (target, op0, unsignedp);
6567 return target;
6569 case FLOAT_EXPR:
6570 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6571 if (target == 0)
6572 target = gen_reg_rtx (mode);
6573 /* expand_float can't figure out what to do if FROM has VOIDmode.
6574 So give it the correct mode. With -O, cse will optimize this. */
6575 if (GET_MODE (op0) == VOIDmode)
6576 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6577 op0);
6578 expand_float (target, op0,
6579 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6580 return target;
6582 case NEGATE_EXPR:
6583 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6584 temp = expand_unop (mode, neg_optab, op0, target, 0);
6585 if (temp == 0)
6586 abort ();
6587 return temp;
6589 case ABS_EXPR:
6590 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6592 /* Handle complex values specially. */
6593 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6594 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6595 return expand_complex_abs (mode, op0, target, unsignedp);
6597 /* Unsigned abs is simply the operand. Testing here means we don't
6598 risk generating incorrect code below. */
6599 if (TREE_UNSIGNED (type))
6600 return op0;
6602 return expand_abs (mode, op0, target, unsignedp,
6603 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
6605 case MAX_EXPR:
6606 case MIN_EXPR:
6607 target = original_target;
6608 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
6609 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6610 || GET_MODE (target) != mode
6611 || (GET_CODE (target) == REG
6612 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6613 target = gen_reg_rtx (mode);
6614 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6615 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6617 /* First try to do it with a special MIN or MAX instruction.
6618 If that does not win, use a conditional jump to select the proper
6619 value. */
6620 this_optab = (TREE_UNSIGNED (type)
6621 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6622 : (code == MIN_EXPR ? smin_optab : smax_optab));
6624 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6625 OPTAB_WIDEN);
6626 if (temp != 0)
6627 return temp;
6629 /* At this point, a MEM target is no longer useful; we will get better
6630 code without it. */
6632 if (GET_CODE (target) == MEM)
6633 target = gen_reg_rtx (mode);
6635 if (target != op0)
6636 emit_move_insn (target, op0);
6638 op0 = gen_label_rtx ();
6640 /* If this mode is an integer too wide to compare properly,
6641 compare word by word. Rely on cse to optimize constant cases. */
6642 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6644 if (code == MAX_EXPR)
6645 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6646 target, op1, NULL_RTX, op0);
6647 else
6648 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6649 op1, target, NULL_RTX, op0);
6650 emit_move_insn (target, op1);
6652 else
6654 if (code == MAX_EXPR)
6655 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6656 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6657 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6658 else
6659 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6660 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6661 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6662 if (temp == const0_rtx)
6663 emit_move_insn (target, op1);
6664 else if (temp != const_true_rtx)
6666 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6667 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6668 else
6669 abort ();
6670 emit_move_insn (target, op1);
6673 emit_label (op0);
6674 return target;
6676 case BIT_NOT_EXPR:
6677 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6678 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6679 if (temp == 0)
6680 abort ();
6681 return temp;
6683 case FFS_EXPR:
6684 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6685 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6686 if (temp == 0)
6687 abort ();
6688 return temp;
6690 /* ??? Can optimize bitwise operations with one arg constant.
6691 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6692 and (a bitwise1 b) bitwise2 b (etc)
6693 but that is probably not worth while. */
6695 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6696 boolean values when we want in all cases to compute both of them. In
6697 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6698 as actual zero-or-1 values and then bitwise anding. In cases where
6699 there cannot be any side effects, better code would be made by
6700 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6701 how to recognize those cases. */
6703 case TRUTH_AND_EXPR:
6704 case BIT_AND_EXPR:
6705 this_optab = and_optab;
6706 goto binop;
6708 case TRUTH_OR_EXPR:
6709 case BIT_IOR_EXPR:
6710 this_optab = ior_optab;
6711 goto binop;
6713 case TRUTH_XOR_EXPR:
6714 case BIT_XOR_EXPR:
6715 this_optab = xor_optab;
6716 goto binop;
6718 case LSHIFT_EXPR:
6719 case RSHIFT_EXPR:
6720 case LROTATE_EXPR:
6721 case RROTATE_EXPR:
6722 preexpand_calls (exp);
6723 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6724 subtarget = 0;
6725 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6726 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6727 unsignedp);
6729 /* Could determine the answer when only additive constants differ. Also,
6730 the addition of one can be handled by changing the condition. */
6731 case LT_EXPR:
6732 case LE_EXPR:
6733 case GT_EXPR:
6734 case GE_EXPR:
6735 case EQ_EXPR:
6736 case NE_EXPR:
6737 preexpand_calls (exp);
6738 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6739 if (temp != 0)
6740 return temp;
6742 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6743 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6744 && original_target
6745 && GET_CODE (original_target) == REG
6746 && (GET_MODE (original_target)
6747 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6749 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6750 VOIDmode, 0);
6752 if (temp != original_target)
6753 temp = copy_to_reg (temp);
6755 op1 = gen_label_rtx ();
6756 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6757 GET_MODE (temp), unsignedp, 0);
6758 emit_jump_insn (gen_beq (op1));
6759 emit_move_insn (temp, const1_rtx);
6760 emit_label (op1);
6761 return temp;
6764 /* If no set-flag instruction, must generate a conditional
6765 store into a temporary variable. Drop through
6766 and handle this like && and ||. */
6768 case TRUTH_ANDIF_EXPR:
6769 case TRUTH_ORIF_EXPR:
6770 if (! ignore
6771 && (target == 0 || ! safe_from_p (target, exp, 1)
6772 /* Make sure we don't have a hard reg (such as function's return
6773 value) live across basic blocks, if not optimizing. */
6774 || (!optimize && GET_CODE (target) == REG
6775 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6776 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6778 if (target)
6779 emit_clr_insn (target);
6781 op1 = gen_label_rtx ();
6782 jumpifnot (exp, op1);
6784 if (target)
6785 emit_0_to_1_insn (target);
6787 emit_label (op1);
6788 return ignore ? const0_rtx : target;
6790 case TRUTH_NOT_EXPR:
6791 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6792 /* The parser is careful to generate TRUTH_NOT_EXPR
6793 only with operands that are always zero or one. */
6794 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6795 target, 1, OPTAB_LIB_WIDEN);
6796 if (temp == 0)
6797 abort ();
6798 return temp;
6800 case COMPOUND_EXPR:
6801 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6802 emit_queue ();
6803 return expand_expr (TREE_OPERAND (exp, 1),
6804 (ignore ? const0_rtx : target),
6805 VOIDmode, 0);
6807 case COND_EXPR:
6808 /* If we would have a "singleton" (see below) were it not for a
6809 conversion in each arm, bring that conversion back out. */
6810 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6811 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6812 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6813 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6815 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6816 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6818 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6819 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6820 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6821 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6822 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6823 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6824 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6825 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6826 return expand_expr (build1 (NOP_EXPR, type,
6827 build (COND_EXPR, TREE_TYPE (true),
6828 TREE_OPERAND (exp, 0),
6829 true, false)),
6830 target, tmode, modifier);
6834 /* Note that COND_EXPRs whose type is a structure or union
6835 are required to be constructed to contain assignments of
6836 a temporary variable, so that we can evaluate them here
6837 for side effect only. If type is void, we must do likewise. */
6839 /* If an arm of the branch requires a cleanup,
6840 only that cleanup is performed. */
6842 tree singleton = 0;
6843 tree binary_op = 0, unary_op = 0;
6845 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6846 convert it to our mode, if necessary. */
6847 if (integer_onep (TREE_OPERAND (exp, 1))
6848 && integer_zerop (TREE_OPERAND (exp, 2))
6849 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6851 if (ignore)
6853 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6854 ro_modifier);
6855 return const0_rtx;
6858 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6859 if (GET_MODE (op0) == mode)
6860 return op0;
6862 if (target == 0)
6863 target = gen_reg_rtx (mode);
6864 convert_move (target, op0, unsignedp);
6865 return target;
6868 /* Check for X ? A + B : A. If we have this, we can copy A to the
6869 output and conditionally add B. Similarly for unary operations.
6870 Don't do this if X has side-effects because those side effects
6871 might affect A or B and the "?" operation is a sequence point in
6872 ANSI. (operand_equal_p tests for side effects.) */
6874 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6875 && operand_equal_p (TREE_OPERAND (exp, 2),
6876 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6877 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6878 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6879 && operand_equal_p (TREE_OPERAND (exp, 1),
6880 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6881 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6882 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6883 && operand_equal_p (TREE_OPERAND (exp, 2),
6884 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6885 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6886 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6887 && operand_equal_p (TREE_OPERAND (exp, 1),
6888 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6889 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6891 /* If we are not to produce a result, we have no target. Otherwise,
6892 if a target was specified use it; it will not be used as an
6893 intermediate target unless it is safe. If no target, use a
6894 temporary. */
6896 if (ignore)
6897 temp = 0;
6898 else if (original_target
6899 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
6900 || (singleton && GET_CODE (original_target) == REG
6901 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6902 && original_target == var_rtx (singleton)))
6903 && GET_MODE (original_target) == mode
6904 #ifdef HAVE_conditional_move
6905 && (! can_conditionally_move_p (mode)
6906 || GET_CODE (original_target) == REG
6907 || TREE_ADDRESSABLE (type))
6908 #endif
6909 && ! (GET_CODE (original_target) == MEM
6910 && MEM_VOLATILE_P (original_target)))
6911 temp = original_target;
6912 else if (TREE_ADDRESSABLE (type))
6913 abort ();
6914 else
6915 temp = assign_temp (type, 0, 0, 1);
6917 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6918 do the test of X as a store-flag operation, do this as
6919 A + ((X != 0) << log C). Similarly for other simple binary
6920 operators. Only do for C == 1 if BRANCH_COST is low. */
6921 if (temp && singleton && binary_op
6922 && (TREE_CODE (binary_op) == PLUS_EXPR
6923 || TREE_CODE (binary_op) == MINUS_EXPR
6924 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6925 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6926 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6927 : integer_onep (TREE_OPERAND (binary_op, 1)))
6928 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6930 rtx result;
6931 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6932 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6933 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6934 : xor_optab);
6936 /* If we had X ? A : A + 1, do this as A + (X == 0).
6938 We have to invert the truth value here and then put it
6939 back later if do_store_flag fails. We cannot simply copy
6940 TREE_OPERAND (exp, 0) to another variable and modify that
6941 because invert_truthvalue can modify the tree pointed to
6942 by its argument. */
6943 if (singleton == TREE_OPERAND (exp, 1))
6944 TREE_OPERAND (exp, 0)
6945 = invert_truthvalue (TREE_OPERAND (exp, 0));
6947 result = do_store_flag (TREE_OPERAND (exp, 0),
6948 (safe_from_p (temp, singleton, 1)
6949 ? temp : NULL_RTX),
6950 mode, BRANCH_COST <= 1);
6952 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6953 result = expand_shift (LSHIFT_EXPR, mode, result,
6954 build_int_2 (tree_log2
6955 (TREE_OPERAND
6956 (binary_op, 1)),
6958 (safe_from_p (temp, singleton, 1)
6959 ? temp : NULL_RTX), 0);
6961 if (result)
6963 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6964 return expand_binop (mode, boptab, op1, result, temp,
6965 unsignedp, OPTAB_LIB_WIDEN);
6967 else if (singleton == TREE_OPERAND (exp, 1))
6968 TREE_OPERAND (exp, 0)
6969 = invert_truthvalue (TREE_OPERAND (exp, 0));
6972 do_pending_stack_adjust ();
6973 NO_DEFER_POP;
6974 op0 = gen_label_rtx ();
6976 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6978 if (temp != 0)
6980 /* If the target conflicts with the other operand of the
6981 binary op, we can't use it. Also, we can't use the target
6982 if it is a hard register, because evaluating the condition
6983 might clobber it. */
6984 if ((binary_op
6985 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
6986 || (GET_CODE (temp) == REG
6987 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6988 temp = gen_reg_rtx (mode);
6989 store_expr (singleton, temp, 0);
6991 else
6992 expand_expr (singleton,
6993 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6994 if (singleton == TREE_OPERAND (exp, 1))
6995 jumpif (TREE_OPERAND (exp, 0), op0);
6996 else
6997 jumpifnot (TREE_OPERAND (exp, 0), op0);
6999 start_cleanup_deferral ();
7000 if (binary_op && temp == 0)
7001 /* Just touch the other operand. */
7002 expand_expr (TREE_OPERAND (binary_op, 1),
7003 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7004 else if (binary_op)
7005 store_expr (build (TREE_CODE (binary_op), type,
7006 make_tree (type, temp),
7007 TREE_OPERAND (binary_op, 1)),
7008 temp, 0);
7009 else
7010 store_expr (build1 (TREE_CODE (unary_op), type,
7011 make_tree (type, temp)),
7012 temp, 0);
7013 op1 = op0;
7015 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7016 comparison operator. If we have one of these cases, set the
7017 output to A, branch on A (cse will merge these two references),
7018 then set the output to FOO. */
7019 else if (temp
7020 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7021 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7022 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7023 TREE_OPERAND (exp, 1), 0)
7024 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7025 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7026 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7028 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7029 temp = gen_reg_rtx (mode);
7030 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7031 jumpif (TREE_OPERAND (exp, 0), op0);
7033 start_cleanup_deferral ();
7034 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7035 op1 = op0;
7037 else if (temp
7038 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7039 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7040 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7041 TREE_OPERAND (exp, 2), 0)
7042 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7043 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7044 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7046 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7047 temp = gen_reg_rtx (mode);
7048 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7049 jumpifnot (TREE_OPERAND (exp, 0), op0);
7051 start_cleanup_deferral ();
7052 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7053 op1 = op0;
7055 else
7057 op1 = gen_label_rtx ();
7058 jumpifnot (TREE_OPERAND (exp, 0), op0);
7060 start_cleanup_deferral ();
7061 if (temp != 0)
7062 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7063 else
7064 expand_expr (TREE_OPERAND (exp, 1),
7065 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7066 end_cleanup_deferral ();
7067 emit_queue ();
7068 emit_jump_insn (gen_jump (op1));
7069 emit_barrier ();
7070 emit_label (op0);
7071 start_cleanup_deferral ();
7072 if (temp != 0)
7073 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7074 else
7075 expand_expr (TREE_OPERAND (exp, 2),
7076 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7079 end_cleanup_deferral ();
7081 emit_queue ();
7082 emit_label (op1);
7083 OK_DEFER_POP;
7085 return temp;
7088 case TARGET_EXPR:
7090 /* Something needs to be initialized, but we didn't know
7091 where that thing was when building the tree. For example,
7092 it could be the return value of a function, or a parameter
7093 to a function which lays down in the stack, or a temporary
7094 variable which must be passed by reference.
7096 We guarantee that the expression will either be constructed
7097 or copied into our original target. */
7099 tree slot = TREE_OPERAND (exp, 0);
7100 tree cleanups = NULL_TREE;
7101 tree exp1;
7102 rtx temp;
7104 if (TREE_CODE (slot) != VAR_DECL)
7105 abort ();
7107 if (! ignore)
7108 target = original_target;
7110 if (target == 0)
7112 if (DECL_RTL (slot) != 0)
7114 target = DECL_RTL (slot);
7115 /* If we have already expanded the slot, so don't do
7116 it again. (mrs) */
7117 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7118 return target;
7120 else
7122 target = assign_temp (type, 2, 0, 1);
7123 /* All temp slots at this level must not conflict. */
7124 preserve_temp_slots (target);
7125 DECL_RTL (slot) = target;
7126 if (TREE_ADDRESSABLE (slot))
7128 TREE_ADDRESSABLE (slot) = 0;
7129 mark_addressable (slot);
7132 /* Since SLOT is not known to the called function
7133 to belong to its stack frame, we must build an explicit
7134 cleanup. This case occurs when we must build up a reference
7135 to pass the reference as an argument. In this case,
7136 it is very likely that such a reference need not be
7137 built here. */
7139 if (TREE_OPERAND (exp, 2) == 0)
7140 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7141 cleanups = TREE_OPERAND (exp, 2);
7144 else
7146 /* This case does occur, when expanding a parameter which
7147 needs to be constructed on the stack. The target
7148 is the actual stack address that we want to initialize.
7149 The function we call will perform the cleanup in this case. */
7151 /* If we have already assigned it space, use that space,
7152 not target that we were passed in, as our target
7153 parameter is only a hint. */
7154 if (DECL_RTL (slot) != 0)
7156 target = DECL_RTL (slot);
7157 /* If we have already expanded the slot, so don't do
7158 it again. (mrs) */
7159 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7160 return target;
7162 else
7164 DECL_RTL (slot) = target;
7165 /* If we must have an addressable slot, then make sure that
7166 the RTL that we just stored in slot is OK. */
7167 if (TREE_ADDRESSABLE (slot))
7169 TREE_ADDRESSABLE (slot) = 0;
7170 mark_addressable (slot);
7175 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7176 /* Mark it as expanded. */
7177 TREE_OPERAND (exp, 1) = NULL_TREE;
7179 TREE_USED (slot) = 1;
7180 store_expr (exp1, target, 0);
7182 expand_decl_cleanup (NULL_TREE, cleanups);
7184 return target;
7187 case INIT_EXPR:
7189 tree lhs = TREE_OPERAND (exp, 0);
7190 tree rhs = TREE_OPERAND (exp, 1);
7191 tree noncopied_parts = 0;
7192 tree lhs_type = TREE_TYPE (lhs);
7194 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7195 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7196 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7197 TYPE_NONCOPIED_PARTS (lhs_type));
7198 while (noncopied_parts != 0)
7200 expand_assignment (TREE_VALUE (noncopied_parts),
7201 TREE_PURPOSE (noncopied_parts), 0, 0);
7202 noncopied_parts = TREE_CHAIN (noncopied_parts);
7204 return temp;
7207 case MODIFY_EXPR:
7209 /* If lhs is complex, expand calls in rhs before computing it.
7210 That's so we don't compute a pointer and save it over a call.
7211 If lhs is simple, compute it first so we can give it as a
7212 target if the rhs is just a call. This avoids an extra temp and copy
7213 and that prevents a partial-subsumption which makes bad code.
7214 Actually we could treat component_ref's of vars like vars. */
7216 tree lhs = TREE_OPERAND (exp, 0);
7217 tree rhs = TREE_OPERAND (exp, 1);
7218 tree noncopied_parts = 0;
7219 tree lhs_type = TREE_TYPE (lhs);
7221 temp = 0;
7223 if (TREE_CODE (lhs) != VAR_DECL
7224 && TREE_CODE (lhs) != RESULT_DECL
7225 && TREE_CODE (lhs) != PARM_DECL
7226 && ! (TREE_CODE (lhs) == INDIRECT_REF
7227 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7228 preexpand_calls (exp);
7230 /* Check for |= or &= of a bitfield of size one into another bitfield
7231 of size 1. In this case, (unless we need the result of the
7232 assignment) we can do this more efficiently with a
7233 test followed by an assignment, if necessary.
7235 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7236 things change so we do, this code should be enhanced to
7237 support it. */
7238 if (ignore
7239 && TREE_CODE (lhs) == COMPONENT_REF
7240 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7241 || TREE_CODE (rhs) == BIT_AND_EXPR)
7242 && TREE_OPERAND (rhs, 0) == lhs
7243 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7244 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7245 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7247 rtx label = gen_label_rtx ();
7249 do_jump (TREE_OPERAND (rhs, 1),
7250 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7251 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7252 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7253 (TREE_CODE (rhs) == BIT_IOR_EXPR
7254 ? integer_one_node
7255 : integer_zero_node)),
7256 0, 0);
7257 do_pending_stack_adjust ();
7258 emit_label (label);
7259 return const0_rtx;
7262 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7263 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7264 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7265 TYPE_NONCOPIED_PARTS (lhs_type));
7267 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7268 while (noncopied_parts != 0)
7270 expand_assignment (TREE_PURPOSE (noncopied_parts),
7271 TREE_VALUE (noncopied_parts), 0, 0);
7272 noncopied_parts = TREE_CHAIN (noncopied_parts);
7274 return temp;
7277 case PREINCREMENT_EXPR:
7278 case PREDECREMENT_EXPR:
7279 return expand_increment (exp, 0, ignore);
7281 case POSTINCREMENT_EXPR:
7282 case POSTDECREMENT_EXPR:
7283 /* Faster to treat as pre-increment if result is not used. */
7284 return expand_increment (exp, ! ignore, ignore);
7286 case ADDR_EXPR:
7287 /* If nonzero, TEMP will be set to the address of something that might
7288 be a MEM corresponding to a stack slot. */
7289 temp = 0;
7291 /* Are we taking the address of a nested function? */
7292 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7293 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7294 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7295 && ! TREE_STATIC (exp))
7297 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7298 op0 = force_operand (op0, target);
7300 /* If we are taking the address of something erroneous, just
7301 return a zero. */
7302 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7303 return const0_rtx;
7304 else
7306 /* We make sure to pass const0_rtx down if we came in with
7307 ignore set, to avoid doing the cleanups twice for something. */
7308 op0 = expand_expr (TREE_OPERAND (exp, 0),
7309 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7310 (modifier == EXPAND_INITIALIZER
7311 ? modifier : EXPAND_CONST_ADDRESS));
7313 /* If we are going to ignore the result, OP0 will have been set
7314 to const0_rtx, so just return it. Don't get confused and
7315 think we are taking the address of the constant. */
7316 if (ignore)
7317 return op0;
7319 op0 = protect_from_queue (op0, 0);
7321 /* We would like the object in memory. If it is a constant, we can
7322 have it be statically allocated into memory. For a non-constant,
7323 we need to allocate some memory and store the value into it. */
7325 if (CONSTANT_P (op0))
7326 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7327 op0);
7328 else if (GET_CODE (op0) == MEM)
7330 mark_temp_addr_taken (op0);
7331 temp = XEXP (op0, 0);
7334 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7335 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7337 /* If this object is in a register, it must be not
7338 be BLKmode. */
7339 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7340 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7342 mark_temp_addr_taken (memloc);
7343 emit_move_insn (memloc, op0);
7344 op0 = memloc;
7347 if (GET_CODE (op0) != MEM)
7348 abort ();
7350 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7352 temp = XEXP (op0, 0);
7353 #ifdef POINTERS_EXTEND_UNSIGNED
7354 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7355 && mode == ptr_mode)
7356 temp = convert_memory_address (ptr_mode, temp);
7357 #endif
7358 return temp;
7361 op0 = force_operand (XEXP (op0, 0), target);
7364 if (flag_force_addr && GET_CODE (op0) != REG)
7365 op0 = force_reg (Pmode, op0);
7367 if (GET_CODE (op0) == REG
7368 && ! REG_USERVAR_P (op0))
7369 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7371 /* If we might have had a temp slot, add an equivalent address
7372 for it. */
7373 if (temp != 0)
7374 update_temp_slot_address (temp, op0);
7376 #ifdef POINTERS_EXTEND_UNSIGNED
7377 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7378 && mode == ptr_mode)
7379 op0 = convert_memory_address (ptr_mode, op0);
7380 #endif
7382 return op0;
7384 case ENTRY_VALUE_EXPR:
7385 abort ();
7387 /* COMPLEX type for Extended Pascal & Fortran */
7388 case COMPLEX_EXPR:
7390 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7391 rtx insns;
7393 /* Get the rtx code of the operands. */
7394 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7395 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7397 if (! target)
7398 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7400 start_sequence ();
7402 /* Move the real (op0) and imaginary (op1) parts to their location. */
7403 emit_move_insn (gen_realpart (mode, target), op0);
7404 emit_move_insn (gen_imagpart (mode, target), op1);
7406 insns = get_insns ();
7407 end_sequence ();
7409 /* Complex construction should appear as a single unit. */
7410 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7411 each with a separate pseudo as destination.
7412 It's not correct for flow to treat them as a unit. */
7413 if (GET_CODE (target) != CONCAT)
7414 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7415 else
7416 emit_insns (insns);
7418 return target;
7421 case REALPART_EXPR:
7422 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7423 return gen_realpart (mode, op0);
7425 case IMAGPART_EXPR:
7426 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7427 return gen_imagpart (mode, op0);
7429 case CONJ_EXPR:
7431 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7432 rtx imag_t;
7433 rtx insns;
7435 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7437 if (! target)
7438 target = gen_reg_rtx (mode);
7440 start_sequence ();
7442 /* Store the realpart and the negated imagpart to target. */
7443 emit_move_insn (gen_realpart (partmode, target),
7444 gen_realpart (partmode, op0));
7446 imag_t = gen_imagpart (partmode, target);
7447 temp = expand_unop (partmode, neg_optab,
7448 gen_imagpart (partmode, op0), imag_t, 0);
7449 if (temp != imag_t)
7450 emit_move_insn (imag_t, temp);
7452 insns = get_insns ();
7453 end_sequence ();
7455 /* Conjugate should appear as a single unit
7456 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7457 each with a separate pseudo as destination.
7458 It's not correct for flow to treat them as a unit. */
7459 if (GET_CODE (target) != CONCAT)
7460 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7461 else
7462 emit_insns (insns);
7464 return target;
7467 case TRY_CATCH_EXPR:
7469 tree handler = TREE_OPERAND (exp, 1);
7471 expand_eh_region_start ();
7473 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7475 expand_eh_region_end (handler);
7477 return op0;
7480 case POPDCC_EXPR:
7482 rtx dcc = get_dynamic_cleanup_chain ();
7483 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
7484 return const0_rtx;
7487 case POPDHC_EXPR:
7489 rtx dhc = get_dynamic_handler_chain ();
7490 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
7491 return const0_rtx;
7494 case ERROR_MARK:
7495 op0 = CONST0_RTX (tmode);
7496 if (op0 != 0)
7497 return op0;
7498 return const0_rtx;
7500 default:
7501 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7504 /* Here to do an ordinary binary operator, generating an instruction
7505 from the optab already placed in `this_optab'. */
7506 binop:
7507 preexpand_calls (exp);
7508 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7509 subtarget = 0;
7510 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7511 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7512 binop2:
7513 temp = expand_binop (mode, this_optab, op0, op1, target,
7514 unsignedp, OPTAB_LIB_WIDEN);
7515 if (temp == 0)
7516 abort ();
7517 return temp;
7521 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7523 void
7524 bc_expand_expr (exp)
7525 tree exp;
7527 enum tree_code code;
7528 tree type, arg0;
7529 rtx r;
7530 struct binary_operator *binoptab;
7531 struct unary_operator *unoptab;
7532 struct increment_operator *incroptab;
7533 struct bc_label *lab, *lab1;
7534 enum bytecode_opcode opcode;
7537 code = TREE_CODE (exp);
7539 switch (code)
7541 case PARM_DECL:
7543 if (DECL_RTL (exp) == 0)
7545 error_with_decl (exp, "prior parameter's size depends on `%s'");
7546 return;
7549 bc_load_parmaddr (DECL_RTL (exp));
7550 bc_load_memory (TREE_TYPE (exp), exp);
7552 return;
7554 case VAR_DECL:
7556 if (DECL_RTL (exp) == 0)
7557 abort ();
7559 #if 0
7560 if (BYTECODE_LABEL (DECL_RTL (exp)))
7561 bc_load_externaddr (DECL_RTL (exp));
7562 else
7563 bc_load_localaddr (DECL_RTL (exp));
7564 #endif
7565 if (TREE_PUBLIC (exp))
7566 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7567 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7568 else
7569 bc_load_localaddr (DECL_RTL (exp));
7571 bc_load_memory (TREE_TYPE (exp), exp);
7572 return;
7574 case INTEGER_CST:
7576 #ifdef DEBUG_PRINT_CODE
7577 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7578 #endif
7579 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7580 ? SImode
7581 : TYPE_MODE (TREE_TYPE (exp)))],
7582 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7583 return;
7585 case REAL_CST:
7587 #if 0
7588 #ifdef DEBUG_PRINT_CODE
7589 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7590 #endif
7591 /* FIX THIS: find a better way to pass real_cst's. -bson */
7592 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7593 (double) TREE_REAL_CST (exp));
7594 #else
7595 abort ();
7596 #endif
7598 return;
7600 case CALL_EXPR:
7602 /* We build a call description vector describing the type of
7603 the return value and of the arguments; this call vector,
7604 together with a pointer to a location for the return value
7605 and the base of the argument list, is passed to the low
7606 level machine dependent call subroutine, which is responsible
7607 for putting the arguments wherever real functions expect
7608 them, as well as getting the return value back. */
7610 tree calldesc = 0, arg;
7611 int nargs = 0, i;
7612 rtx retval;
7614 /* Push the evaluated args on the evaluation stack in reverse
7615 order. Also make an entry for each arg in the calldesc
7616 vector while we're at it. */
7618 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7620 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7622 ++nargs;
7623 bc_expand_expr (TREE_VALUE (arg));
7625 calldesc = tree_cons ((tree) 0,
7626 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7627 calldesc);
7628 calldesc = tree_cons ((tree) 0,
7629 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7630 calldesc);
7633 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7635 /* Allocate a location for the return value and push its
7636 address on the evaluation stack. Also make an entry
7637 at the front of the calldesc for the return value type. */
7639 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7640 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7641 bc_load_localaddr (retval);
7643 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7644 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7646 /* Prepend the argument count. */
7647 calldesc = tree_cons ((tree) 0,
7648 build_int_2 (nargs, 0),
7649 calldesc);
7651 /* Push the address of the call description vector on the stack. */
7652 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7653 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7654 build_index_type (build_int_2 (nargs * 2, 0)));
7655 r = output_constant_def (calldesc);
7656 bc_load_externaddr (r);
7658 /* Push the address of the function to be called. */
7659 bc_expand_expr (TREE_OPERAND (exp, 0));
7661 /* Call the function, popping its address and the calldesc vector
7662 address off the evaluation stack in the process. */
7663 bc_emit_instruction (call);
7665 /* Pop the arguments off the stack. */
7666 bc_adjust_stack (nargs);
7668 /* Load the return value onto the stack. */
7669 bc_load_localaddr (retval);
7670 bc_load_memory (type, TREE_OPERAND (exp, 0));
7672 return;
7674 case SAVE_EXPR:
7676 if (!SAVE_EXPR_RTL (exp))
7678 /* First time around: copy to local variable */
7679 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7680 TYPE_ALIGN (TREE_TYPE(exp)));
7681 bc_expand_expr (TREE_OPERAND (exp, 0));
7682 bc_emit_instruction (duplicate);
7684 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7685 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7687 else
7689 /* Consecutive reference: use saved copy */
7690 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7691 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7693 return;
7695 #if 0
7696 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7697 how are they handled instead? */
7698 case LET_STMT:
7700 TREE_USED (exp) = 1;
7701 bc_expand_expr (STMT_BODY (exp));
7702 return;
7703 #endif
7705 case NOP_EXPR:
7706 case CONVERT_EXPR:
7708 bc_expand_expr (TREE_OPERAND (exp, 0));
7709 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7710 return;
7712 case MODIFY_EXPR:
7714 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7715 return;
7717 case ADDR_EXPR:
7719 bc_expand_address (TREE_OPERAND (exp, 0));
7720 return;
7722 case INDIRECT_REF:
7724 bc_expand_expr (TREE_OPERAND (exp, 0));
7725 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7726 return;
7728 case ARRAY_REF:
7730 bc_expand_expr (bc_canonicalize_array_ref (exp));
7731 return;
7733 case COMPONENT_REF:
7735 bc_expand_component_address (exp);
7737 /* If we have a bitfield, generate a proper load */
7738 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7739 return;
7741 case COMPOUND_EXPR:
7743 bc_expand_expr (TREE_OPERAND (exp, 0));
7744 bc_emit_instruction (drop);
7745 bc_expand_expr (TREE_OPERAND (exp, 1));
7746 return;
7748 case COND_EXPR:
7750 bc_expand_expr (TREE_OPERAND (exp, 0));
7751 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7752 lab = bc_get_bytecode_label ();
7753 bc_emit_bytecode (xjumpifnot);
7754 bc_emit_bytecode_labelref (lab);
7756 #ifdef DEBUG_PRINT_CODE
7757 fputc ('\n', stderr);
7758 #endif
7759 bc_expand_expr (TREE_OPERAND (exp, 1));
7760 lab1 = bc_get_bytecode_label ();
7761 bc_emit_bytecode (jump);
7762 bc_emit_bytecode_labelref (lab1);
7764 #ifdef DEBUG_PRINT_CODE
7765 fputc ('\n', stderr);
7766 #endif
7768 bc_emit_bytecode_labeldef (lab);
7769 bc_expand_expr (TREE_OPERAND (exp, 2));
7770 bc_emit_bytecode_labeldef (lab1);
7771 return;
7773 case TRUTH_ANDIF_EXPR:
7775 opcode = xjumpifnot;
7776 goto andorif;
7778 case TRUTH_ORIF_EXPR:
7780 opcode = xjumpif;
7781 goto andorif;
7783 case PLUS_EXPR:
7785 binoptab = optab_plus_expr;
7786 goto binop;
7788 case MINUS_EXPR:
7790 binoptab = optab_minus_expr;
7791 goto binop;
7793 case MULT_EXPR:
7795 binoptab = optab_mult_expr;
7796 goto binop;
7798 case TRUNC_DIV_EXPR:
7799 case FLOOR_DIV_EXPR:
7800 case CEIL_DIV_EXPR:
7801 case ROUND_DIV_EXPR:
7802 case EXACT_DIV_EXPR:
7804 binoptab = optab_trunc_div_expr;
7805 goto binop;
7807 case TRUNC_MOD_EXPR:
7808 case FLOOR_MOD_EXPR:
7809 case CEIL_MOD_EXPR:
7810 case ROUND_MOD_EXPR:
7812 binoptab = optab_trunc_mod_expr;
7813 goto binop;
7815 case FIX_ROUND_EXPR:
7816 case FIX_FLOOR_EXPR:
7817 case FIX_CEIL_EXPR:
7818 abort (); /* Not used for C. */
7820 case FIX_TRUNC_EXPR:
7821 case FLOAT_EXPR:
7822 case MAX_EXPR:
7823 case MIN_EXPR:
7824 case FFS_EXPR:
7825 case LROTATE_EXPR:
7826 case RROTATE_EXPR:
7827 abort (); /* FIXME */
7829 case RDIV_EXPR:
7831 binoptab = optab_rdiv_expr;
7832 goto binop;
7834 case BIT_AND_EXPR:
7836 binoptab = optab_bit_and_expr;
7837 goto binop;
7839 case BIT_IOR_EXPR:
7841 binoptab = optab_bit_ior_expr;
7842 goto binop;
7844 case BIT_XOR_EXPR:
7846 binoptab = optab_bit_xor_expr;
7847 goto binop;
7849 case LSHIFT_EXPR:
7851 binoptab = optab_lshift_expr;
7852 goto binop;
7854 case RSHIFT_EXPR:
7856 binoptab = optab_rshift_expr;
7857 goto binop;
7859 case TRUTH_AND_EXPR:
7861 binoptab = optab_truth_and_expr;
7862 goto binop;
7864 case TRUTH_OR_EXPR:
7866 binoptab = optab_truth_or_expr;
7867 goto binop;
7869 case LT_EXPR:
7871 binoptab = optab_lt_expr;
7872 goto binop;
7874 case LE_EXPR:
7876 binoptab = optab_le_expr;
7877 goto binop;
7879 case GE_EXPR:
7881 binoptab = optab_ge_expr;
7882 goto binop;
7884 case GT_EXPR:
7886 binoptab = optab_gt_expr;
7887 goto binop;
7889 case EQ_EXPR:
7891 binoptab = optab_eq_expr;
7892 goto binop;
7894 case NE_EXPR:
7896 binoptab = optab_ne_expr;
7897 goto binop;
7899 case NEGATE_EXPR:
7901 unoptab = optab_negate_expr;
7902 goto unop;
7904 case BIT_NOT_EXPR:
7906 unoptab = optab_bit_not_expr;
7907 goto unop;
7909 case TRUTH_NOT_EXPR:
7911 unoptab = optab_truth_not_expr;
7912 goto unop;
7914 case PREDECREMENT_EXPR:
7916 incroptab = optab_predecrement_expr;
7917 goto increment;
7919 case PREINCREMENT_EXPR:
7921 incroptab = optab_preincrement_expr;
7922 goto increment;
7924 case POSTDECREMENT_EXPR:
7926 incroptab = optab_postdecrement_expr;
7927 goto increment;
7929 case POSTINCREMENT_EXPR:
7931 incroptab = optab_postincrement_expr;
7932 goto increment;
7934 case CONSTRUCTOR:
7936 bc_expand_constructor (exp);
7937 return;
7939 case ERROR_MARK:
7940 case RTL_EXPR:
7942 return;
7944 case BIND_EXPR:
7946 tree vars = TREE_OPERAND (exp, 0);
7947 int vars_need_expansion = 0;
7949 /* Need to open a binding contour here because
7950 if there are any cleanups they most be contained here. */
7951 expand_start_bindings (0);
7953 /* Mark the corresponding BLOCK for output. */
7954 if (TREE_OPERAND (exp, 2) != 0)
7955 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7957 /* If VARS have not yet been expanded, expand them now. */
7958 while (vars)
7960 if (DECL_RTL (vars) == 0)
7962 vars_need_expansion = 1;
7963 expand_decl (vars);
7965 expand_decl_init (vars);
7966 vars = TREE_CHAIN (vars);
7969 bc_expand_expr (TREE_OPERAND (exp, 1));
7971 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7973 return;
7976 default:
7977 abort ();
7980 abort ();
7982 binop:
7984 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7985 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7986 return;
7989 unop:
7991 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7992 return;
7995 andorif:
7997 bc_expand_expr (TREE_OPERAND (exp, 0));
7998 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7999 lab = bc_get_bytecode_label ();
8001 bc_emit_instruction (duplicate);
8002 bc_emit_bytecode (opcode);
8003 bc_emit_bytecode_labelref (lab);
8005 #ifdef DEBUG_PRINT_CODE
8006 fputc ('\n', stderr);
8007 #endif
8009 bc_emit_instruction (drop);
8011 bc_expand_expr (TREE_OPERAND (exp, 1));
8012 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
8013 bc_emit_bytecode_labeldef (lab);
8014 return;
8017 increment:
8019 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8021 /* Push the quantum. */
8022 bc_expand_expr (TREE_OPERAND (exp, 1));
8024 /* Convert it to the lvalue's type. */
8025 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
8027 /* Push the address of the lvalue */
8028 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
8030 /* Perform actual increment */
8031 bc_expand_increment (incroptab, type);
8032 return;
8035 /* Return the alignment in bits of EXP, a pointer valued expression.
8036 But don't return more than MAX_ALIGN no matter what.
8037 The alignment returned is, by default, the alignment of the thing that
8038 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8040 Otherwise, look at the expression to see if we can do better, i.e., if the
8041 expression is actually pointing at an object whose alignment is tighter. */
8043 static int
8044 get_pointer_alignment (exp, max_align)
8045 tree exp;
8046 unsigned max_align;
8048 unsigned align, inner;
8050 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
8051 return 0;
8053 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8054 align = MIN (align, max_align);
8056 while (1)
8058 switch (TREE_CODE (exp))
8060 case NOP_EXPR:
8061 case CONVERT_EXPR:
8062 case NON_LVALUE_EXPR:
8063 exp = TREE_OPERAND (exp, 0);
8064 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
8065 return align;
8067 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8068 align = MIN (inner, max_align);
8069 break;
8071 case PLUS_EXPR:
8072 /* If sum of pointer + int, restrict our maximum alignment to that
8073 imposed by the integer. If not, we can't do any better than
8074 ALIGN. */
8075 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8076 return align;
8078 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8079 & (max_align - 1))
8080 != 0)
8081 max_align >>= 1;
8083 exp = TREE_OPERAND (exp, 0);
8084 break;
8086 case ADDR_EXPR:
8087 /* See what we are pointing at and look at its alignment. */
8088 exp = TREE_OPERAND (exp, 0);
8089 if (TREE_CODE (exp) == FUNCTION_DECL)
8090 align = FUNCTION_BOUNDARY;
8091 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8092 align = DECL_ALIGN (exp);
8093 #ifdef CONSTANT_ALIGNMENT
8094 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8095 align = CONSTANT_ALIGNMENT (exp, align);
8096 #endif
8097 return MIN (align, max_align);
8099 default:
8100 return align;
8105 /* Return the tree node and offset if a given argument corresponds to
8106 a string constant. */
8108 static tree
8109 string_constant (arg, ptr_offset)
8110 tree arg;
8111 tree *ptr_offset;
8113 STRIP_NOPS (arg);
8115 if (TREE_CODE (arg) == ADDR_EXPR
8116 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8118 *ptr_offset = integer_zero_node;
8119 return TREE_OPERAND (arg, 0);
8121 else if (TREE_CODE (arg) == PLUS_EXPR)
8123 tree arg0 = TREE_OPERAND (arg, 0);
8124 tree arg1 = TREE_OPERAND (arg, 1);
8126 STRIP_NOPS (arg0);
8127 STRIP_NOPS (arg1);
8129 if (TREE_CODE (arg0) == ADDR_EXPR
8130 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8132 *ptr_offset = arg1;
8133 return TREE_OPERAND (arg0, 0);
8135 else if (TREE_CODE (arg1) == ADDR_EXPR
8136 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8138 *ptr_offset = arg0;
8139 return TREE_OPERAND (arg1, 0);
8143 return 0;
8146 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8147 way, because it could contain a zero byte in the middle.
8148 TREE_STRING_LENGTH is the size of the character array, not the string.
8150 Unfortunately, string_constant can't access the values of const char
8151 arrays with initializers, so neither can we do so here. */
8153 static tree
8154 c_strlen (src)
8155 tree src;
8157 tree offset_node;
8158 int offset, max;
8159 char *ptr;
8161 src = string_constant (src, &offset_node);
8162 if (src == 0)
8163 return 0;
8164 max = TREE_STRING_LENGTH (src);
8165 ptr = TREE_STRING_POINTER (src);
8166 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8168 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8169 compute the offset to the following null if we don't know where to
8170 start searching for it. */
8171 int i;
8172 for (i = 0; i < max; i++)
8173 if (ptr[i] == 0)
8174 return 0;
8175 /* We don't know the starting offset, but we do know that the string
8176 has no internal zero bytes. We can assume that the offset falls
8177 within the bounds of the string; otherwise, the programmer deserves
8178 what he gets. Subtract the offset from the length of the string,
8179 and return that. */
8180 /* This would perhaps not be valid if we were dealing with named
8181 arrays in addition to literal string constants. */
8182 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8185 /* We have a known offset into the string. Start searching there for
8186 a null character. */
8187 if (offset_node == 0)
8188 offset = 0;
8189 else
8191 /* Did we get a long long offset? If so, punt. */
8192 if (TREE_INT_CST_HIGH (offset_node) != 0)
8193 return 0;
8194 offset = TREE_INT_CST_LOW (offset_node);
8196 /* If the offset is known to be out of bounds, warn, and call strlen at
8197 runtime. */
8198 if (offset < 0 || offset > max)
8200 warning ("offset outside bounds of constant string");
8201 return 0;
8203 /* Use strlen to search for the first zero byte. Since any strings
8204 constructed with build_string will have nulls appended, we win even
8205 if we get handed something like (char[4])"abcd".
8207 Since OFFSET is our starting index into the string, no further
8208 calculation is needed. */
8209 return size_int (strlen (ptr + offset));
8213 expand_builtin_return_addr (fndecl_code, count, tem)
8214 enum built_in_function fndecl_code;
8215 int count;
8216 rtx tem;
8218 int i;
8220 /* Some machines need special handling before we can access
8221 arbitrary frames. For example, on the sparc, we must first flush
8222 all register windows to the stack. */
8223 #ifdef SETUP_FRAME_ADDRESSES
8224 if (count > 0)
8225 SETUP_FRAME_ADDRESSES ();
8226 #endif
8228 /* On the sparc, the return address is not in the frame, it is in a
8229 register. There is no way to access it off of the current frame
8230 pointer, but it can be accessed off the previous frame pointer by
8231 reading the value from the register window save area. */
8232 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8233 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8234 count--;
8235 #endif
8237 /* Scan back COUNT frames to the specified frame. */
8238 for (i = 0; i < count; i++)
8240 /* Assume the dynamic chain pointer is in the word that the
8241 frame address points to, unless otherwise specified. */
8242 #ifdef DYNAMIC_CHAIN_ADDRESS
8243 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8244 #endif
8245 tem = memory_address (Pmode, tem);
8246 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8249 /* For __builtin_frame_address, return what we've got. */
8250 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8251 return tem;
8253 /* For __builtin_return_address, Get the return address from that
8254 frame. */
8255 #ifdef RETURN_ADDR_RTX
8256 tem = RETURN_ADDR_RTX (count, tem);
8257 #else
8258 tem = memory_address (Pmode,
8259 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8260 tem = gen_rtx_MEM (Pmode, tem);
8261 #endif
8262 return tem;
8265 /* __builtin_setjmp is passed a pointer to an array of five words (not
8266 all will be used on all machines). It operates similarly to the C
8267 library function of the same name, but is more efficient. Much of
8268 the code below (and for longjmp) is copied from the handling of
8269 non-local gotos.
8271 NOTE: This is intended for use by GNAT and the exception handling
8272 scheme in the compiler and will only work in the method used by
8273 them. */
8276 expand_builtin_setjmp (buf_addr, target)
8277 rtx buf_addr;
8278 rtx target;
8280 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8281 enum machine_mode sa_mode
8283 #ifdef HAVE_save_stack_nonlocal
8284 (HAVE_save_stack_nonlocal
8285 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0] : Pmode)
8286 #else
8287 Pmode
8288 #endif
8290 enum machine_mode value_mode = TYPE_MODE (integer_type_node);
8291 rtx stack_save;
8292 int old_inhibit_defer_pop = inhibit_defer_pop;
8293 int return_pops
8294 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8295 build_function_type (void_type_node, NULL_TREE),
8297 rtx next_arg_reg;
8298 CUMULATIVE_ARGS args_so_far;
8299 rtx op0;
8300 int i;
8302 #ifdef STACK_SAVEAREA_MODE
8303 sa_mode = STACK_SAVEAREA_MODE (sa_mode, SAVE_NONLOCAL);
8304 #endif
8306 #ifdef POINTERS_EXTEND_UNSIGNED
8307 buf_addr = convert_memory_address (Pmode, buf_addr);
8308 #endif
8310 buf_addr = force_reg (Pmode, buf_addr);
8312 if (target == 0 || GET_CODE (target) != REG
8313 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8314 target = gen_reg_rtx (value_mode);
8316 emit_queue ();
8318 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8319 current_function_calls_setjmp = 1;
8321 /* We store the frame pointer and the address of lab1 in the buffer
8322 and use the rest of it for the stack save area, which is
8323 machine-dependent. */
8325 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8326 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8327 #endif
8329 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr), BUILTIN_SETJMP_FRAME_VALUE);
8331 emit_move_insn
8332 (validize_mem (gen_rtx_MEM (Pmode,
8333 plus_constant (buf_addr,
8334 GET_MODE_SIZE (Pmode)))),
8335 gen_rtx_LABEL_REF (Pmode, lab1));
8338 stack_save = gen_rtx_MEM (sa_mode,
8339 plus_constant (buf_addr,
8340 2 * GET_MODE_SIZE (Pmode)));
8341 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8343 #ifdef HAVE_setjmp
8344 if (HAVE_setjmp)
8345 emit_insn (gen_setjmp ());
8346 #endif
8348 /* Set TARGET to zero and branch around the other case. */
8349 emit_move_insn (target, const0_rtx);
8350 emit_jump_insn (gen_jump (lab2));
8351 emit_barrier ();
8352 emit_label (lab1);
8354 /* Note that setjmp clobbers FP when we get here, so we have to make
8355 sure it's marked as used by this function. */
8356 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8358 /* Mark the static chain as clobbered here so life information
8359 doesn't get messed up for it. */
8360 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8362 /* Now put in the code to restore the frame pointer, and argument
8363 pointer, if needed. The code below is from expand_end_bindings
8364 in stmt.c; see detailed documentation there. */
8365 #ifdef HAVE_nonlocal_goto
8366 if (! HAVE_nonlocal_goto)
8367 #endif
8368 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8370 /* Do we need to do something like:
8372 current_function_has_nonlocal_label = 1;
8374 here? It seems like we might have to, or some subset of that
8375 functionality, but I am unsure. (mrs) */
8377 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8378 if (fixed_regs[ARG_POINTER_REGNUM])
8380 #ifdef ELIMINABLE_REGS
8381 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8383 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8384 if (elim_regs[i].from == ARG_POINTER_REGNUM
8385 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8386 break;
8388 if (i == sizeof elim_regs / sizeof elim_regs [0])
8389 #endif
8391 /* Now restore our arg pointer from the address at which it
8392 was saved in our stack frame.
8393 If there hasn't be space allocated for it yet, make
8394 some now. */
8395 if (arg_pointer_save_area == 0)
8396 arg_pointer_save_area
8397 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8398 emit_move_insn (virtual_incoming_args_rtx,
8399 copy_to_reg (arg_pointer_save_area));
8402 #endif
8404 #ifdef HAVE_nonlocal_goto_receiver
8405 if (HAVE_nonlocal_goto_receiver)
8406 emit_insn (gen_nonlocal_goto_receiver ());
8407 #endif
8408 /* The static chain pointer contains the address of dummy function.
8409 We need to call it here to handle some PIC cases of restoring a
8410 global pointer. Then return 1. */
8411 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8413 /* We can't actually call emit_library_call here, so do everything
8414 it does, which isn't much for a libfunc with no args. */
8415 op0 = memory_address (FUNCTION_MODE, op0);
8417 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8418 gen_rtx_SYMBOL_REF (Pmode, "__dummy"), 1);
8419 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8421 #ifndef ACCUMULATE_OUTGOING_ARGS
8422 #ifdef HAVE_call_pop
8423 if (HAVE_call_pop)
8424 emit_call_insn (gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, op0),
8425 const0_rtx, next_arg_reg,
8426 GEN_INT (return_pops)));
8427 else
8428 #endif
8429 #endif
8431 #ifdef HAVE_call
8432 if (HAVE_call)
8433 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, op0),
8434 const0_rtx, next_arg_reg, const0_rtx));
8435 else
8436 #endif
8437 abort ();
8439 #ifdef HAVE_builtin_setjmp_receiver
8440 if (HAVE_builtin_setjmp_receiver)
8441 emit_insn (gen_builtin_setjmp_receiver ());
8442 #endif
8444 emit_move_insn (target, const1_rtx);
8445 emit_label (lab2);
8446 return target;
8450 /* Expand an expression EXP that calls a built-in function,
8451 with result going to TARGET if that's convenient
8452 (and in mode MODE if that's convenient).
8453 SUBTARGET may be used as the target for computing one of EXP's operands.
8454 IGNORE is nonzero if the value is to be ignored. */
8456 #define CALLED_AS_BUILT_IN(NODE) \
8457 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8459 static rtx
8460 expand_builtin (exp, target, subtarget, mode, ignore)
8461 tree exp;
8462 rtx target;
8463 rtx subtarget;
8464 enum machine_mode mode;
8465 int ignore;
8467 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8468 tree arglist = TREE_OPERAND (exp, 1);
8469 rtx op0;
8470 rtx lab1, insns;
8471 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8472 optab builtin_optab;
8474 switch (DECL_FUNCTION_CODE (fndecl))
8476 case BUILT_IN_ABS:
8477 case BUILT_IN_LABS:
8478 case BUILT_IN_FABS:
8479 /* build_function_call changes these into ABS_EXPR. */
8480 abort ();
8482 case BUILT_IN_SIN:
8483 case BUILT_IN_COS:
8484 /* Treat these like sqrt, but only if the user asks for them. */
8485 if (! flag_fast_math)
8486 break;
8487 case BUILT_IN_FSQRT:
8488 /* If not optimizing, call the library function. */
8489 if (! optimize)
8490 break;
8492 if (arglist == 0
8493 /* Arg could be wrong type if user redeclared this fcn wrong. */
8494 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8495 break;
8497 /* Stabilize and compute the argument. */
8498 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8499 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8501 exp = copy_node (exp);
8502 arglist = copy_node (arglist);
8503 TREE_OPERAND (exp, 1) = arglist;
8504 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8506 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8508 /* Make a suitable register to place result in. */
8509 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8511 emit_queue ();
8512 start_sequence ();
8514 switch (DECL_FUNCTION_CODE (fndecl))
8516 case BUILT_IN_SIN:
8517 builtin_optab = sin_optab; break;
8518 case BUILT_IN_COS:
8519 builtin_optab = cos_optab; break;
8520 case BUILT_IN_FSQRT:
8521 builtin_optab = sqrt_optab; break;
8522 default:
8523 abort ();
8526 /* Compute into TARGET.
8527 Set TARGET to wherever the result comes back. */
8528 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8529 builtin_optab, op0, target, 0);
8531 /* If we were unable to expand via the builtin, stop the
8532 sequence (without outputting the insns) and break, causing
8533 a call the the library function. */
8534 if (target == 0)
8536 end_sequence ();
8537 break;
8540 /* Check the results by default. But if flag_fast_math is turned on,
8541 then assume sqrt will always be called with valid arguments. */
8543 if (! flag_fast_math)
8545 /* Don't define the builtin FP instructions
8546 if your machine is not IEEE. */
8547 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8548 abort ();
8550 lab1 = gen_label_rtx ();
8552 /* Test the result; if it is NaN, set errno=EDOM because
8553 the argument was not in the domain. */
8554 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8555 emit_jump_insn (gen_beq (lab1));
8557 #ifdef TARGET_EDOM
8559 #ifdef GEN_ERRNO_RTX
8560 rtx errno_rtx = GEN_ERRNO_RTX;
8561 #else
8562 rtx errno_rtx
8563 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8564 #endif
8566 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8568 #else
8569 /* We can't set errno=EDOM directly; let the library call do it.
8570 Pop the arguments right away in case the call gets deleted. */
8571 NO_DEFER_POP;
8572 expand_call (exp, target, 0);
8573 OK_DEFER_POP;
8574 #endif
8576 emit_label (lab1);
8579 /* Output the entire sequence. */
8580 insns = get_insns ();
8581 end_sequence ();
8582 emit_insns (insns);
8584 return target;
8586 /* __builtin_apply_args returns block of memory allocated on
8587 the stack into which is stored the arg pointer, structure
8588 value address, static chain, and all the registers that might
8589 possibly be used in performing a function call. The code is
8590 moved to the start of the function so the incoming values are
8591 saved. */
8592 case BUILT_IN_APPLY_ARGS:
8593 /* Don't do __builtin_apply_args more than once in a function.
8594 Save the result of the first call and reuse it. */
8595 if (apply_args_value != 0)
8596 return apply_args_value;
8598 /* When this function is called, it means that registers must be
8599 saved on entry to this function. So we migrate the
8600 call to the first insn of this function. */
8601 rtx temp;
8602 rtx seq;
8604 start_sequence ();
8605 temp = expand_builtin_apply_args ();
8606 seq = get_insns ();
8607 end_sequence ();
8609 apply_args_value = temp;
8611 /* Put the sequence after the NOTE that starts the function.
8612 If this is inside a SEQUENCE, make the outer-level insn
8613 chain current, so the code is placed at the start of the
8614 function. */
8615 push_topmost_sequence ();
8616 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8617 pop_topmost_sequence ();
8618 return temp;
8621 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8622 FUNCTION with a copy of the parameters described by
8623 ARGUMENTS, and ARGSIZE. It returns a block of memory
8624 allocated on the stack into which is stored all the registers
8625 that might possibly be used for returning the result of a
8626 function. ARGUMENTS is the value returned by
8627 __builtin_apply_args. ARGSIZE is the number of bytes of
8628 arguments that must be copied. ??? How should this value be
8629 computed? We'll also need a safe worst case value for varargs
8630 functions. */
8631 case BUILT_IN_APPLY:
8632 if (arglist == 0
8633 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8634 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8635 || TREE_CHAIN (arglist) == 0
8636 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8637 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8638 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8639 return const0_rtx;
8640 else
8642 int i;
8643 tree t;
8644 rtx ops[3];
8646 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8647 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8649 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8652 /* __builtin_return (RESULT) causes the function to return the
8653 value described by RESULT. RESULT is address of the block of
8654 memory returned by __builtin_apply. */
8655 case BUILT_IN_RETURN:
8656 if (arglist
8657 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8658 && POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))))
8659 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8660 NULL_RTX, VOIDmode, 0));
8661 return const0_rtx;
8663 case BUILT_IN_SAVEREGS:
8664 /* Don't do __builtin_saveregs more than once in a function.
8665 Save the result of the first call and reuse it. */
8666 if (saveregs_value != 0)
8667 return saveregs_value;
8669 /* When this function is called, it means that registers must be
8670 saved on entry to this function. So we migrate the
8671 call to the first insn of this function. */
8672 rtx temp;
8673 rtx seq;
8675 /* Now really call the function. `expand_call' does not call
8676 expand_builtin, so there is no danger of infinite recursion here. */
8677 start_sequence ();
8679 #ifdef EXPAND_BUILTIN_SAVEREGS
8680 /* Do whatever the machine needs done in this case. */
8681 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8682 #else
8683 /* The register where the function returns its value
8684 is likely to have something else in it, such as an argument.
8685 So preserve that register around the call. */
8687 if (value_mode != VOIDmode)
8689 rtx valreg = hard_libcall_value (value_mode);
8690 rtx saved_valreg = gen_reg_rtx (value_mode);
8692 emit_move_insn (saved_valreg, valreg);
8693 temp = expand_call (exp, target, ignore);
8694 emit_move_insn (valreg, saved_valreg);
8696 else
8697 /* Generate the call, putting the value in a pseudo. */
8698 temp = expand_call (exp, target, ignore);
8699 #endif
8701 seq = get_insns ();
8702 end_sequence ();
8704 saveregs_value = temp;
8706 /* Put the sequence after the NOTE that starts the function.
8707 If this is inside a SEQUENCE, make the outer-level insn
8708 chain current, so the code is placed at the start of the
8709 function. */
8710 push_topmost_sequence ();
8711 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8712 pop_topmost_sequence ();
8713 return temp;
8716 /* __builtin_args_info (N) returns word N of the arg space info
8717 for the current function. The number and meanings of words
8718 is controlled by the definition of CUMULATIVE_ARGS. */
8719 case BUILT_IN_ARGS_INFO:
8721 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8722 int i;
8723 int *word_ptr = (int *) &current_function_args_info;
8724 tree type, elts, result;
8726 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8727 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8728 __FILE__, __LINE__);
8730 if (arglist != 0)
8732 tree arg = TREE_VALUE (arglist);
8733 if (TREE_CODE (arg) != INTEGER_CST)
8734 error ("argument of `__builtin_args_info' must be constant");
8735 else
8737 int wordnum = TREE_INT_CST_LOW (arg);
8739 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8740 error ("argument of `__builtin_args_info' out of range");
8741 else
8742 return GEN_INT (word_ptr[wordnum]);
8745 else
8746 error ("missing argument in `__builtin_args_info'");
8748 return const0_rtx;
8750 #if 0
8751 for (i = 0; i < nwords; i++)
8752 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8754 type = build_array_type (integer_type_node,
8755 build_index_type (build_int_2 (nwords, 0)));
8756 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8757 TREE_CONSTANT (result) = 1;
8758 TREE_STATIC (result) = 1;
8759 result = build (INDIRECT_REF, build_pointer_type (type), result);
8760 TREE_CONSTANT (result) = 1;
8761 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8762 #endif
8765 /* Return the address of the first anonymous stack arg. */
8766 case BUILT_IN_NEXT_ARG:
8768 tree fntype = TREE_TYPE (current_function_decl);
8770 if ((TYPE_ARG_TYPES (fntype) == 0
8771 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8772 == void_type_node))
8773 && ! current_function_varargs)
8775 error ("`va_start' used in function with fixed args");
8776 return const0_rtx;
8779 if (arglist)
8781 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8782 tree arg = TREE_VALUE (arglist);
8784 /* Strip off all nops for the sake of the comparison. This
8785 is not quite the same as STRIP_NOPS. It does more.
8786 We must also strip off INDIRECT_EXPR for C++ reference
8787 parameters. */
8788 while (TREE_CODE (arg) == NOP_EXPR
8789 || TREE_CODE (arg) == CONVERT_EXPR
8790 || TREE_CODE (arg) == NON_LVALUE_EXPR
8791 || TREE_CODE (arg) == INDIRECT_REF)
8792 arg = TREE_OPERAND (arg, 0);
8793 if (arg != last_parm)
8794 warning ("second parameter of `va_start' not last named argument");
8796 else if (! current_function_varargs)
8797 /* Evidently an out of date version of <stdarg.h>; can't validate
8798 va_start's second argument, but can still work as intended. */
8799 warning ("`__builtin_next_arg' called without an argument");
8802 return expand_binop (Pmode, add_optab,
8803 current_function_internal_arg_pointer,
8804 current_function_arg_offset_rtx,
8805 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8807 case BUILT_IN_CLASSIFY_TYPE:
8808 if (arglist != 0)
8810 tree type = TREE_TYPE (TREE_VALUE (arglist));
8811 enum tree_code code = TREE_CODE (type);
8812 if (code == VOID_TYPE)
8813 return GEN_INT (void_type_class);
8814 if (code == INTEGER_TYPE)
8815 return GEN_INT (integer_type_class);
8816 if (code == CHAR_TYPE)
8817 return GEN_INT (char_type_class);
8818 if (code == ENUMERAL_TYPE)
8819 return GEN_INT (enumeral_type_class);
8820 if (code == BOOLEAN_TYPE)
8821 return GEN_INT (boolean_type_class);
8822 if (code == POINTER_TYPE)
8823 return GEN_INT (pointer_type_class);
8824 if (code == REFERENCE_TYPE)
8825 return GEN_INT (reference_type_class);
8826 if (code == OFFSET_TYPE)
8827 return GEN_INT (offset_type_class);
8828 if (code == REAL_TYPE)
8829 return GEN_INT (real_type_class);
8830 if (code == COMPLEX_TYPE)
8831 return GEN_INT (complex_type_class);
8832 if (code == FUNCTION_TYPE)
8833 return GEN_INT (function_type_class);
8834 if (code == METHOD_TYPE)
8835 return GEN_INT (method_type_class);
8836 if (code == RECORD_TYPE)
8837 return GEN_INT (record_type_class);
8838 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8839 return GEN_INT (union_type_class);
8840 if (code == ARRAY_TYPE)
8842 if (TYPE_STRING_FLAG (type))
8843 return GEN_INT (string_type_class);
8844 else
8845 return GEN_INT (array_type_class);
8847 if (code == SET_TYPE)
8848 return GEN_INT (set_type_class);
8849 if (code == FILE_TYPE)
8850 return GEN_INT (file_type_class);
8851 if (code == LANG_TYPE)
8852 return GEN_INT (lang_type_class);
8854 return GEN_INT (no_type_class);
8856 case BUILT_IN_CONSTANT_P:
8857 if (arglist == 0)
8858 return const0_rtx;
8859 else
8861 tree arg = TREE_VALUE (arglist);
8863 /* We return 1 for a numeric type that's known to be a constant
8864 value at compile-time or for an aggregate type that's a
8865 literal constant. */
8866 STRIP_NOPS (arg);
8868 /* If we know this is a constant, emit the constant of one. */
8869 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8870 || (TREE_CODE (arg) == CONSTRUCTOR
8871 && TREE_CONSTANT (arg))
8872 || (TREE_CODE (arg) == ADDR_EXPR
8873 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
8874 return const1_rtx;
8876 /* If we aren't going to be running CSE or this expression
8877 has side effects, show we don't know it to be a constant.
8878 Likewise if it's a pointer or aggregate type since in those
8879 case we only want literals, since those are only optimized
8880 when generating RTL, not later. */
8881 else if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
8882 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8883 || POINTER_TYPE_P (TREE_TYPE (arg)))
8884 return const0_rtx;
8886 /* Otherwise, emit (const (constant_p_rtx (ARG))) and let CSE
8887 get a chance to see if it can deduce whether ARG is constant.
8888 We always generate the CONST in ptr_mode since that's
8889 certain to be valid on this machine, then convert it to
8890 whatever we need. */
8891 else
8892 return
8893 convert_to_mode
8894 (mode,
8895 gen_rtx_CONST
8896 (ptr_mode,
8897 gen_rtx_CONSTANT_P_RTX (ptr_mode,
8898 expand_expr (arg, NULL_RTX,
8899 VOIDmode, 0))),
8904 case BUILT_IN_FRAME_ADDRESS:
8905 /* The argument must be a nonnegative integer constant.
8906 It counts the number of frames to scan up the stack.
8907 The value is the address of that frame. */
8908 case BUILT_IN_RETURN_ADDRESS:
8909 /* The argument must be a nonnegative integer constant.
8910 It counts the number of frames to scan up the stack.
8911 The value is the return address saved in that frame. */
8912 if (arglist == 0)
8913 /* Warning about missing arg was already issued. */
8914 return const0_rtx;
8915 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8916 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8918 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8919 error ("invalid arg to `__builtin_frame_address'");
8920 else
8921 error ("invalid arg to `__builtin_return_address'");
8922 return const0_rtx;
8924 else
8926 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8927 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8928 hard_frame_pointer_rtx);
8930 /* Some ports cannot access arbitrary stack frames. */
8931 if (tem == NULL)
8933 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8934 warning ("unsupported arg to `__builtin_frame_address'");
8935 else
8936 warning ("unsupported arg to `__builtin_return_address'");
8937 return const0_rtx;
8940 /* For __builtin_frame_address, return what we've got. */
8941 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8942 return tem;
8944 if (GET_CODE (tem) != REG)
8945 tem = copy_to_reg (tem);
8946 return tem;
8949 /* Returns the address of the area where the structure is returned.
8950 0 otherwise. */
8951 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8952 if (arglist != 0
8953 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8954 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8955 return const0_rtx;
8956 else
8957 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8959 case BUILT_IN_ALLOCA:
8960 if (arglist == 0
8961 /* Arg could be non-integer if user redeclared this fcn wrong. */
8962 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8963 break;
8965 /* Compute the argument. */
8966 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8968 /* Allocate the desired space. */
8969 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8971 case BUILT_IN_FFS:
8972 /* If not optimizing, call the library function. */
8973 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8974 break;
8976 if (arglist == 0
8977 /* Arg could be non-integer if user redeclared this fcn wrong. */
8978 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8979 break;
8981 /* Compute the argument. */
8982 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8983 /* Compute ffs, into TARGET if possible.
8984 Set TARGET to wherever the result comes back. */
8985 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8986 ffs_optab, op0, target, 1);
8987 if (target == 0)
8988 abort ();
8989 return target;
8991 case BUILT_IN_STRLEN:
8992 /* If not optimizing, call the library function. */
8993 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8994 break;
8996 if (arglist == 0
8997 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8998 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))))
8999 break;
9000 else
9002 tree src = TREE_VALUE (arglist);
9003 tree len = c_strlen (src);
9005 int align
9006 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9008 rtx result, src_rtx, char_rtx;
9009 enum machine_mode insn_mode = value_mode, char_mode;
9010 enum insn_code icode;
9012 /* If the length is known, just return it. */
9013 if (len != 0)
9014 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9016 /* If SRC is not a pointer type, don't do this operation inline. */
9017 if (align == 0)
9018 break;
9020 /* Call a function if we can't compute strlen in the right mode. */
9022 while (insn_mode != VOIDmode)
9024 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9025 if (icode != CODE_FOR_nothing)
9026 break;
9028 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9030 if (insn_mode == VOIDmode)
9031 break;
9033 /* Make a place to write the result of the instruction. */
9034 result = target;
9035 if (! (result != 0
9036 && GET_CODE (result) == REG
9037 && GET_MODE (result) == insn_mode
9038 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9039 result = gen_reg_rtx (insn_mode);
9041 /* Make sure the operands are acceptable to the predicates. */
9043 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9044 result = gen_reg_rtx (insn_mode);
9045 src_rtx = memory_address (BLKmode,
9046 expand_expr (src, NULL_RTX, ptr_mode,
9047 EXPAND_NORMAL));
9049 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9050 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9052 /* Check the string is readable and has an end. */
9053 if (current_function_check_memory_usage)
9054 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9055 src_rtx, ptr_mode,
9056 GEN_INT (MEMORY_USE_RO),
9057 TYPE_MODE (integer_type_node));
9059 char_rtx = const0_rtx;
9060 char_mode = insn_operand_mode[(int)icode][2];
9061 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9062 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9064 emit_insn (GEN_FCN (icode) (result,
9065 gen_rtx_MEM (BLKmode, src_rtx),
9066 char_rtx, GEN_INT (align)));
9068 /* Return the value in the proper mode for this function. */
9069 if (GET_MODE (result) == value_mode)
9070 return result;
9071 else if (target != 0)
9073 convert_move (target, result, 0);
9074 return target;
9076 else
9077 return convert_to_mode (value_mode, result, 0);
9080 case BUILT_IN_STRCPY:
9081 /* If not optimizing, call the library function. */
9082 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9083 break;
9085 if (arglist == 0
9086 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9087 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
9088 || TREE_CHAIN (arglist) == 0
9089 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))))
9090 break;
9091 else
9093 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9095 if (len == 0)
9096 break;
9098 len = size_binop (PLUS_EXPR, len, integer_one_node);
9100 chainon (arglist, build_tree_list (NULL_TREE, len));
9103 /* Drops in. */
9104 case BUILT_IN_MEMCPY:
9105 /* If not optimizing, call the library function. */
9106 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9107 break;
9109 if (arglist == 0
9110 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9111 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
9112 || TREE_CHAIN (arglist) == 0
9113 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9114 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9115 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9116 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9117 != INTEGER_TYPE))
9118 break;
9119 else
9121 tree dest = TREE_VALUE (arglist);
9122 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9123 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9124 tree type;
9126 int src_align
9127 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9128 int dest_align
9129 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9130 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
9132 /* If either SRC or DEST is not a pointer type, don't do
9133 this operation in-line. */
9134 if (src_align == 0 || dest_align == 0)
9136 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9137 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9138 break;
9141 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
9142 dest_mem = gen_rtx_MEM (BLKmode,
9143 memory_address (BLKmode, dest_rtx));
9144 /* There could be a void* cast on top of the object. */
9145 while (TREE_CODE (dest) == NOP_EXPR)
9146 dest = TREE_OPERAND (dest, 0);
9147 type = TREE_TYPE (TREE_TYPE (dest));
9148 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
9149 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
9150 src_mem = gen_rtx_MEM (BLKmode,
9151 memory_address (BLKmode, src_rtx));
9152 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9154 /* Just copy the rights of SRC to the rights of DEST. */
9155 if (current_function_check_memory_usage)
9156 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9157 dest_rtx, ptr_mode,
9158 src_rtx, ptr_mode,
9159 len_rtx, TYPE_MODE (sizetype));
9161 /* There could be a void* cast on top of the object. */
9162 while (TREE_CODE (src) == NOP_EXPR)
9163 src = TREE_OPERAND (src, 0);
9164 type = TREE_TYPE (TREE_TYPE (src));
9165 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
9167 /* Copy word part most expediently. */
9168 dest_addr
9169 = emit_block_move (dest_mem, src_mem, len_rtx,
9170 MIN (src_align, dest_align));
9172 if (dest_addr == 0)
9173 dest_addr = force_operand (dest_rtx, NULL_RTX);
9175 return dest_addr;
9178 case BUILT_IN_MEMSET:
9179 /* If not optimizing, call the library function. */
9180 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9181 break;
9183 if (arglist == 0
9184 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9185 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
9186 || TREE_CHAIN (arglist) == 0
9187 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9188 != INTEGER_TYPE)
9189 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9190 || (INTEGER_CST
9191 != (TREE_CODE (TREE_TYPE
9192 (TREE_VALUE
9193 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9194 break;
9195 else
9197 tree dest = TREE_VALUE (arglist);
9198 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9199 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9200 tree type;
9202 int dest_align
9203 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9204 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
9206 /* If DEST is not a pointer type, don't do this
9207 operation in-line. */
9208 if (dest_align == 0)
9209 break;
9211 /* If VAL is not 0, don't do this operation in-line. */
9212 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9213 break;
9215 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
9216 dest_mem = gen_rtx_MEM (BLKmode,
9217 memory_address (BLKmode, dest_rtx));
9218 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9220 /* Just check DST is writable and mark it as readable. */
9221 if (current_function_check_memory_usage)
9222 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9223 dest_rtx, ptr_mode,
9224 len_rtx, TYPE_MODE (sizetype),
9225 GEN_INT (MEMORY_USE_WO),
9226 TYPE_MODE (integer_type_node));
9229 /* There could be a void* cast on top of the object. */
9230 while (TREE_CODE (dest) == NOP_EXPR)
9231 dest = TREE_OPERAND (dest, 0);
9232 type = TREE_TYPE (TREE_TYPE (dest));
9233 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
9235 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9237 if (dest_addr == 0)
9238 dest_addr = force_operand (dest_rtx, NULL_RTX);
9240 return dest_addr;
9243 /* These comparison functions need an instruction that returns an actual
9244 index. An ordinary compare that just sets the condition codes
9245 is not enough. */
9246 #ifdef HAVE_cmpstrsi
9247 case BUILT_IN_STRCMP:
9248 /* If not optimizing, call the library function. */
9249 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9250 break;
9252 /* If we need to check memory accesses, call the library function. */
9253 if (current_function_check_memory_usage)
9254 break;
9256 if (arglist == 0
9257 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9258 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
9259 || TREE_CHAIN (arglist) == 0
9260 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))))
9261 break;
9262 else if (!HAVE_cmpstrsi)
9263 break;
9265 tree arg1 = TREE_VALUE (arglist);
9266 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9267 tree offset;
9268 tree len, len2;
9270 len = c_strlen (arg1);
9271 if (len)
9272 len = size_binop (PLUS_EXPR, integer_one_node, len);
9273 len2 = c_strlen (arg2);
9274 if (len2)
9275 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9277 /* If we don't have a constant length for the first, use the length
9278 of the second, if we know it. We don't require a constant for
9279 this case; some cost analysis could be done if both are available
9280 but neither is constant. For now, assume they're equally cheap.
9282 If both strings have constant lengths, use the smaller. This
9283 could arise if optimization results in strcpy being called with
9284 two fixed strings, or if the code was machine-generated. We should
9285 add some code to the `memcmp' handler below to deal with such
9286 situations, someday. */
9287 if (!len || TREE_CODE (len) != INTEGER_CST)
9289 if (len2)
9290 len = len2;
9291 else if (len == 0)
9292 break;
9294 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9296 if (tree_int_cst_lt (len2, len))
9297 len = len2;
9300 chainon (arglist, build_tree_list (NULL_TREE, len));
9303 /* Drops in. */
9304 case BUILT_IN_MEMCMP:
9305 /* If not optimizing, call the library function. */
9306 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9307 break;
9309 /* If we need to check memory accesses, call the library function. */
9310 if (current_function_check_memory_usage)
9311 break;
9313 if (arglist == 0
9314 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9315 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
9316 || TREE_CHAIN (arglist) == 0
9317 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9318 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9319 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9320 break;
9321 else if (!HAVE_cmpstrsi)
9322 break;
9324 tree arg1 = TREE_VALUE (arglist);
9325 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9326 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9327 rtx result;
9329 int arg1_align
9330 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9331 int arg2_align
9332 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9333 enum machine_mode insn_mode
9334 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9336 /* If we don't have POINTER_TYPE, call the function. */
9337 if (arg1_align == 0 || arg2_align == 0)
9339 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9340 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9341 break;
9344 /* Make a place to write the result of the instruction. */
9345 result = target;
9346 if (! (result != 0
9347 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9348 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9349 result = gen_reg_rtx (insn_mode);
9351 emit_insn (gen_cmpstrsi (result,
9352 gen_rtx_MEM (BLKmode,
9353 expand_expr (arg1, NULL_RTX,
9354 ptr_mode,
9355 EXPAND_NORMAL)),
9356 gen_rtx_MEM (BLKmode,
9357 expand_expr (arg2, NULL_RTX,
9358 ptr_mode,
9359 EXPAND_NORMAL)),
9360 expand_expr (len, NULL_RTX, VOIDmode, 0),
9361 GEN_INT (MIN (arg1_align, arg2_align))));
9363 /* Return the value in the proper mode for this function. */
9364 mode = TYPE_MODE (TREE_TYPE (exp));
9365 if (GET_MODE (result) == mode)
9366 return result;
9367 else if (target != 0)
9369 convert_move (target, result, 0);
9370 return target;
9372 else
9373 return convert_to_mode (mode, result, 0);
9375 #else
9376 case BUILT_IN_STRCMP:
9377 case BUILT_IN_MEMCMP:
9378 break;
9379 #endif
9381 case BUILT_IN_SETJMP:
9382 if (arglist == 0
9383 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))))
9384 break;
9387 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9388 VOIDmode, 0);
9389 return expand_builtin_setjmp (buf_addr, target);
9392 /* __builtin_longjmp is passed a pointer to an array of five words
9393 and a value, which is a dummy. It's similar to the C library longjmp
9394 function but works with __builtin_setjmp above. */
9395 case BUILT_IN_LONGJMP:
9396 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9397 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))))
9398 break;
9401 tree dummy_id = get_identifier ("__dummy");
9402 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9403 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9404 #ifdef POINTERS_EXTEND_UNSIGNED
9405 rtx buf_addr
9406 = force_reg (Pmode,
9407 convert_memory_address
9408 (Pmode,
9409 expand_expr (TREE_VALUE (arglist),
9410 NULL_RTX, VOIDmode, 0)));
9411 #else
9412 rtx buf_addr
9413 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9414 NULL_RTX,
9415 VOIDmode, 0));
9416 #endif
9417 rtx fp = gen_rtx_MEM (Pmode, buf_addr);
9418 rtx lab = gen_rtx_MEM (Pmode,
9419 plus_constant (buf_addr,
9420 GET_MODE_SIZE (Pmode)));
9421 enum machine_mode sa_mode
9423 #ifdef HAVE_save_stack_nonlocal
9424 (HAVE_save_stack_nonlocal
9425 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9426 : Pmode)
9427 #else
9428 Pmode
9429 #endif
9431 rtx stack;
9433 #ifdef STACK_SAVEAREA_MODE
9434 sa_mode = STACK_SAVEAREA_MODE (sa_mode, SAVE_NONLOCAL);
9435 #endif
9437 stack = gen_rtx_MEM (sa_mode,
9438 plus_constant (buf_addr,
9439 2 * GET_MODE_SIZE (Pmode)));
9440 DECL_EXTERNAL (dummy_decl) = 1;
9441 TREE_PUBLIC (dummy_decl) = 1;
9442 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9444 /* Expand the second expression just for side-effects. */
9445 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9446 const0_rtx, VOIDmode, 0);
9448 assemble_external (dummy_decl);
9449 TREE_USED (dummy_decl) = 1;
9451 /* Pick up FP, label, and SP from the block and jump. This code is
9452 from expand_goto in stmt.c; see there for detailed comments. */
9453 #if HAVE_nonlocal_goto
9454 if (HAVE_nonlocal_goto)
9455 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9456 XEXP (DECL_RTL (dummy_decl), 0)));
9457 else
9458 #endif
9460 lab = copy_to_reg (lab);
9461 emit_move_insn (hard_frame_pointer_rtx, fp);
9462 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9464 /* Put in the static chain register the address of the dummy
9465 function. */
9466 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9467 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
9468 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
9469 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
9470 emit_indirect_jump (lab);
9473 return const0_rtx;
9476 /* Various hooks for the DWARF 2 __throw routine. */
9477 case BUILT_IN_UNWIND_INIT:
9478 expand_builtin_unwind_init ();
9479 return const0_rtx;
9480 case BUILT_IN_FP:
9481 return frame_pointer_rtx;
9482 case BUILT_IN_SP:
9483 return stack_pointer_rtx;
9484 #ifdef DWARF2_UNWIND_INFO
9485 case BUILT_IN_DWARF_FP_REGNUM:
9486 return expand_builtin_dwarf_fp_regnum ();
9487 case BUILT_IN_DWARF_REG_SIZE:
9488 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9489 #endif
9490 case BUILT_IN_FROB_RETURN_ADDR:
9491 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9492 case BUILT_IN_EXTRACT_RETURN_ADDR:
9493 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9494 case BUILT_IN_SET_RETURN_ADDR_REG:
9495 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
9496 return const0_rtx;
9497 case BUILT_IN_EH_STUB:
9498 return expand_builtin_eh_stub ();
9499 case BUILT_IN_SET_EH_REGS:
9500 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
9501 TREE_VALUE (TREE_CHAIN (arglist)));
9502 return const0_rtx;
9504 default: /* just do library call, if unknown builtin */
9505 error ("built-in function `%s' not currently supported",
9506 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9509 /* The switch statement above can drop through to cause the function
9510 to be called normally. */
9512 return expand_call (exp, target, ignore);
9515 /* Built-in functions to perform an untyped call and return. */
9517 /* For each register that may be used for calling a function, this
9518 gives a mode used to copy the register's value. VOIDmode indicates
9519 the register is not used for calling a function. If the machine
9520 has register windows, this gives only the outbound registers.
9521 INCOMING_REGNO gives the corresponding inbound register. */
9522 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9524 /* For each register that may be used for returning values, this gives
9525 a mode used to copy the register's value. VOIDmode indicates the
9526 register is not used for returning values. If the machine has
9527 register windows, this gives only the outbound registers.
9528 INCOMING_REGNO gives the corresponding inbound register. */
9529 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9531 /* For each register that may be used for calling a function, this
9532 gives the offset of that register into the block returned by
9533 __builtin_apply_args. 0 indicates that the register is not
9534 used for calling a function. */
9535 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9537 /* Return the offset of register REGNO into the block returned by
9538 __builtin_apply_args. This is not declared static, since it is
9539 needed in objc-act.c. */
9541 int
9542 apply_args_register_offset (regno)
9543 int regno;
9545 apply_args_size ();
9547 /* Arguments are always put in outgoing registers (in the argument
9548 block) if such make sense. */
9549 #ifdef OUTGOING_REGNO
9550 regno = OUTGOING_REGNO(regno);
9551 #endif
9552 return apply_args_reg_offset[regno];
9555 /* Return the size required for the block returned by __builtin_apply_args,
9556 and initialize apply_args_mode. */
9558 static int
9559 apply_args_size ()
9561 static int size = -1;
9562 int align, regno;
9563 enum machine_mode mode;
9565 /* The values computed by this function never change. */
9566 if (size < 0)
9568 /* The first value is the incoming arg-pointer. */
9569 size = GET_MODE_SIZE (Pmode);
9571 /* The second value is the structure value address unless this is
9572 passed as an "invisible" first argument. */
9573 if (struct_value_rtx)
9574 size += GET_MODE_SIZE (Pmode);
9576 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9577 if (FUNCTION_ARG_REGNO_P (regno))
9579 /* Search for the proper mode for copying this register's
9580 value. I'm not sure this is right, but it works so far. */
9581 enum machine_mode best_mode = VOIDmode;
9583 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9584 mode != VOIDmode;
9585 mode = GET_MODE_WIDER_MODE (mode))
9586 if (HARD_REGNO_MODE_OK (regno, mode)
9587 && HARD_REGNO_NREGS (regno, mode) == 1)
9588 best_mode = mode;
9590 if (best_mode == VOIDmode)
9591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9592 mode != VOIDmode;
9593 mode = GET_MODE_WIDER_MODE (mode))
9594 if (HARD_REGNO_MODE_OK (regno, mode)
9595 && (mov_optab->handlers[(int) mode].insn_code
9596 != CODE_FOR_nothing))
9597 best_mode = mode;
9599 mode = best_mode;
9600 if (mode == VOIDmode)
9601 abort ();
9603 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9604 if (size % align != 0)
9605 size = CEIL (size, align) * align;
9606 apply_args_reg_offset[regno] = size;
9607 size += GET_MODE_SIZE (mode);
9608 apply_args_mode[regno] = mode;
9610 else
9612 apply_args_mode[regno] = VOIDmode;
9613 apply_args_reg_offset[regno] = 0;
9616 return size;
9619 /* Return the size required for the block returned by __builtin_apply,
9620 and initialize apply_result_mode. */
9622 static int
9623 apply_result_size ()
9625 static int size = -1;
9626 int align, regno;
9627 enum machine_mode mode;
9629 /* The values computed by this function never change. */
9630 if (size < 0)
9632 size = 0;
9634 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9635 if (FUNCTION_VALUE_REGNO_P (regno))
9637 /* Search for the proper mode for copying this register's
9638 value. I'm not sure this is right, but it works so far. */
9639 enum machine_mode best_mode = VOIDmode;
9641 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9642 mode != TImode;
9643 mode = GET_MODE_WIDER_MODE (mode))
9644 if (HARD_REGNO_MODE_OK (regno, mode))
9645 best_mode = mode;
9647 if (best_mode == VOIDmode)
9648 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9649 mode != VOIDmode;
9650 mode = GET_MODE_WIDER_MODE (mode))
9651 if (HARD_REGNO_MODE_OK (regno, mode)
9652 && (mov_optab->handlers[(int) mode].insn_code
9653 != CODE_FOR_nothing))
9654 best_mode = mode;
9656 mode = best_mode;
9657 if (mode == VOIDmode)
9658 abort ();
9660 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9661 if (size % align != 0)
9662 size = CEIL (size, align) * align;
9663 size += GET_MODE_SIZE (mode);
9664 apply_result_mode[regno] = mode;
9666 else
9667 apply_result_mode[regno] = VOIDmode;
9669 /* Allow targets that use untyped_call and untyped_return to override
9670 the size so that machine-specific information can be stored here. */
9671 #ifdef APPLY_RESULT_SIZE
9672 size = APPLY_RESULT_SIZE;
9673 #endif
9675 return size;
9678 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9679 /* Create a vector describing the result block RESULT. If SAVEP is true,
9680 the result block is used to save the values; otherwise it is used to
9681 restore the values. */
9683 static rtx
9684 result_vector (savep, result)
9685 int savep;
9686 rtx result;
9688 int regno, size, align, nelts;
9689 enum machine_mode mode;
9690 rtx reg, mem;
9691 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9693 size = nelts = 0;
9694 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9695 if ((mode = apply_result_mode[regno]) != VOIDmode)
9697 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9698 if (size % align != 0)
9699 size = CEIL (size, align) * align;
9700 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9701 mem = change_address (result, mode,
9702 plus_constant (XEXP (result, 0), size));
9703 savevec[nelts++] = (savep
9704 ? gen_rtx_SET (VOIDmode, mem, reg)
9705 : gen_rtx_SET (VOIDmode, reg, mem));
9706 size += GET_MODE_SIZE (mode);
9708 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9710 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9712 /* Save the state required to perform an untyped call with the same
9713 arguments as were passed to the current function. */
9715 static rtx
9716 expand_builtin_apply_args ()
9718 rtx registers;
9719 int size, align, regno;
9720 enum machine_mode mode;
9722 /* Create a block where the arg-pointer, structure value address,
9723 and argument registers can be saved. */
9724 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9726 /* Walk past the arg-pointer and structure value address. */
9727 size = GET_MODE_SIZE (Pmode);
9728 if (struct_value_rtx)
9729 size += GET_MODE_SIZE (Pmode);
9731 /* Save each register used in calling a function to the block. */
9732 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9733 if ((mode = apply_args_mode[regno]) != VOIDmode)
9735 rtx tem;
9737 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9738 if (size % align != 0)
9739 size = CEIL (size, align) * align;
9741 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9743 #ifdef STACK_REGS
9744 /* For reg-stack.c's stack register household.
9745 Compare with a similar piece of code in function.c. */
9747 emit_insn (gen_rtx_USE (mode, tem));
9748 #endif
9750 emit_move_insn (change_address (registers, mode,
9751 plus_constant (XEXP (registers, 0),
9752 size)),
9753 tem);
9754 size += GET_MODE_SIZE (mode);
9757 /* Save the arg pointer to the block. */
9758 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9759 copy_to_reg (virtual_incoming_args_rtx));
9760 size = GET_MODE_SIZE (Pmode);
9762 /* Save the structure value address unless this is passed as an
9763 "invisible" first argument. */
9764 if (struct_value_incoming_rtx)
9766 emit_move_insn (change_address (registers, Pmode,
9767 plus_constant (XEXP (registers, 0),
9768 size)),
9769 copy_to_reg (struct_value_incoming_rtx));
9770 size += GET_MODE_SIZE (Pmode);
9773 /* Return the address of the block. */
9774 return copy_addr_to_reg (XEXP (registers, 0));
9777 /* Perform an untyped call and save the state required to perform an
9778 untyped return of whatever value was returned by the given function. */
9780 static rtx
9781 expand_builtin_apply (function, arguments, argsize)
9782 rtx function, arguments, argsize;
9784 int size, align, regno;
9785 enum machine_mode mode;
9786 rtx incoming_args, result, reg, dest, call_insn;
9787 rtx old_stack_level = 0;
9788 rtx call_fusage = 0;
9790 /* Create a block where the return registers can be saved. */
9791 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9793 /* ??? The argsize value should be adjusted here. */
9795 /* Fetch the arg pointer from the ARGUMENTS block. */
9796 incoming_args = gen_reg_rtx (Pmode);
9797 emit_move_insn (incoming_args,
9798 gen_rtx_MEM (Pmode, arguments));
9799 #ifndef STACK_GROWS_DOWNWARD
9800 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9801 incoming_args, 0, OPTAB_LIB_WIDEN);
9802 #endif
9804 /* Perform postincrements before actually calling the function. */
9805 emit_queue ();
9807 /* Push a new argument block and copy the arguments. */
9808 do_pending_stack_adjust ();
9810 /* Save the stack with nonlocal if available */
9811 #ifdef HAVE_save_stack_nonlocal
9812 if (HAVE_save_stack_nonlocal)
9813 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9814 else
9815 #endif
9816 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9818 /* Push a block of memory onto the stack to store the memory arguments.
9819 Save the address in a register, and copy the memory arguments. ??? I
9820 haven't figured out how the calling convention macros effect this,
9821 but it's likely that the source and/or destination addresses in
9822 the block copy will need updating in machine specific ways. */
9823 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9824 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9825 gen_rtx_MEM (BLKmode, incoming_args),
9826 argsize,
9827 PARM_BOUNDARY / BITS_PER_UNIT);
9829 /* Refer to the argument block. */
9830 apply_args_size ();
9831 arguments = gen_rtx_MEM (BLKmode, arguments);
9833 /* Walk past the arg-pointer and structure value address. */
9834 size = GET_MODE_SIZE (Pmode);
9835 if (struct_value_rtx)
9836 size += GET_MODE_SIZE (Pmode);
9838 /* Restore each of the registers previously saved. Make USE insns
9839 for each of these registers for use in making the call. */
9840 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9841 if ((mode = apply_args_mode[regno]) != VOIDmode)
9843 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9844 if (size % align != 0)
9845 size = CEIL (size, align) * align;
9846 reg = gen_rtx_REG (mode, regno);
9847 emit_move_insn (reg,
9848 change_address (arguments, mode,
9849 plus_constant (XEXP (arguments, 0),
9850 size)));
9852 use_reg (&call_fusage, reg);
9853 size += GET_MODE_SIZE (mode);
9856 /* Restore the structure value address unless this is passed as an
9857 "invisible" first argument. */
9858 size = GET_MODE_SIZE (Pmode);
9859 if (struct_value_rtx)
9861 rtx value = gen_reg_rtx (Pmode);
9862 emit_move_insn (value,
9863 change_address (arguments, Pmode,
9864 plus_constant (XEXP (arguments, 0),
9865 size)));
9866 emit_move_insn (struct_value_rtx, value);
9867 if (GET_CODE (struct_value_rtx) == REG)
9868 use_reg (&call_fusage, struct_value_rtx);
9869 size += GET_MODE_SIZE (Pmode);
9872 /* All arguments and registers used for the call are set up by now! */
9873 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9875 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9876 and we don't want to load it into a register as an optimization,
9877 because prepare_call_address already did it if it should be done. */
9878 if (GET_CODE (function) != SYMBOL_REF)
9879 function = memory_address (FUNCTION_MODE, function);
9881 /* Generate the actual call instruction and save the return value. */
9882 #ifdef HAVE_untyped_call
9883 if (HAVE_untyped_call)
9884 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9885 result, result_vector (1, result)));
9886 else
9887 #endif
9888 #ifdef HAVE_call_value
9889 if (HAVE_call_value)
9891 rtx valreg = 0;
9893 /* Locate the unique return register. It is not possible to
9894 express a call that sets more than one return register using
9895 call_value; use untyped_call for that. In fact, untyped_call
9896 only needs to save the return registers in the given block. */
9897 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9898 if ((mode = apply_result_mode[regno]) != VOIDmode)
9900 if (valreg)
9901 abort (); /* HAVE_untyped_call required. */
9902 valreg = gen_rtx_REG (mode, regno);
9905 emit_call_insn (gen_call_value (valreg,
9906 gen_rtx_MEM (FUNCTION_MODE, function),
9907 const0_rtx, NULL_RTX, const0_rtx));
9909 emit_move_insn (change_address (result, GET_MODE (valreg),
9910 XEXP (result, 0)),
9911 valreg);
9913 else
9914 #endif
9915 abort ();
9917 /* Find the CALL insn we just emitted. */
9918 for (call_insn = get_last_insn ();
9919 call_insn && GET_CODE (call_insn) != CALL_INSN;
9920 call_insn = PREV_INSN (call_insn))
9923 if (! call_insn)
9924 abort ();
9926 /* Put the register usage information on the CALL. If there is already
9927 some usage information, put ours at the end. */
9928 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9930 rtx link;
9932 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9933 link = XEXP (link, 1))
9936 XEXP (link, 1) = call_fusage;
9938 else
9939 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9941 /* Restore the stack. */
9942 #ifdef HAVE_save_stack_nonlocal
9943 if (HAVE_save_stack_nonlocal)
9944 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9945 else
9946 #endif
9947 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9949 /* Return the address of the result block. */
9950 return copy_addr_to_reg (XEXP (result, 0));
9953 /* Perform an untyped return. */
9955 static void
9956 expand_builtin_return (result)
9957 rtx result;
9959 int size, align, regno;
9960 enum machine_mode mode;
9961 rtx reg;
9962 rtx call_fusage = 0;
9964 apply_result_size ();
9965 result = gen_rtx_MEM (BLKmode, result);
9967 #ifdef HAVE_untyped_return
9968 if (HAVE_untyped_return)
9970 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9971 emit_barrier ();
9972 return;
9974 #endif
9976 /* Restore the return value and note that each value is used. */
9977 size = 0;
9978 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9979 if ((mode = apply_result_mode[regno]) != VOIDmode)
9981 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9982 if (size % align != 0)
9983 size = CEIL (size, align) * align;
9984 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9985 emit_move_insn (reg,
9986 change_address (result, mode,
9987 plus_constant (XEXP (result, 0),
9988 size)));
9990 push_to_sequence (call_fusage);
9991 emit_insn (gen_rtx_USE (VOIDmode, reg));
9992 call_fusage = get_insns ();
9993 end_sequence ();
9994 size += GET_MODE_SIZE (mode);
9997 /* Put the USE insns before the return. */
9998 emit_insns (call_fusage);
10000 /* Return whatever values was restored by jumping directly to the end
10001 of the function. */
10002 expand_null_return ();
10005 /* Expand code for a post- or pre- increment or decrement
10006 and return the RTX for the result.
10007 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10009 static rtx
10010 expand_increment (exp, post, ignore)
10011 register tree exp;
10012 int post, ignore;
10014 register rtx op0, op1;
10015 register rtx temp, value;
10016 register tree incremented = TREE_OPERAND (exp, 0);
10017 optab this_optab = add_optab;
10018 int icode;
10019 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10020 int op0_is_copy = 0;
10021 int single_insn = 0;
10022 /* 1 means we can't store into OP0 directly,
10023 because it is a subreg narrower than a word,
10024 and we don't dare clobber the rest of the word. */
10025 int bad_subreg = 0;
10027 if (output_bytecode)
10029 bc_expand_expr (exp);
10030 return NULL_RTX;
10033 /* Stabilize any component ref that might need to be
10034 evaluated more than once below. */
10035 if (!post
10036 || TREE_CODE (incremented) == BIT_FIELD_REF
10037 || (TREE_CODE (incremented) == COMPONENT_REF
10038 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10039 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10040 incremented = stabilize_reference (incremented);
10041 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10042 ones into save exprs so that they don't accidentally get evaluated
10043 more than once by the code below. */
10044 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10045 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10046 incremented = save_expr (incremented);
10048 /* Compute the operands as RTX.
10049 Note whether OP0 is the actual lvalue or a copy of it:
10050 I believe it is a copy iff it is a register or subreg
10051 and insns were generated in computing it. */
10053 temp = get_last_insn ();
10054 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10056 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10057 in place but instead must do sign- or zero-extension during assignment,
10058 so we copy it into a new register and let the code below use it as
10059 a copy.
10061 Note that we can safely modify this SUBREG since it is know not to be
10062 shared (it was made by the expand_expr call above). */
10064 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10066 if (post)
10067 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10068 else
10069 bad_subreg = 1;
10071 else if (GET_CODE (op0) == SUBREG
10072 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10074 /* We cannot increment this SUBREG in place. If we are
10075 post-incrementing, get a copy of the old value. Otherwise,
10076 just mark that we cannot increment in place. */
10077 if (post)
10078 op0 = copy_to_reg (op0);
10079 else
10080 bad_subreg = 1;
10083 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10084 && temp != get_last_insn ());
10085 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10086 EXPAND_MEMORY_USE_BAD);
10088 /* Decide whether incrementing or decrementing. */
10089 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10090 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10091 this_optab = sub_optab;
10093 /* Convert decrement by a constant into a negative increment. */
10094 if (this_optab == sub_optab
10095 && GET_CODE (op1) == CONST_INT)
10097 op1 = GEN_INT (- INTVAL (op1));
10098 this_optab = add_optab;
10101 /* For a preincrement, see if we can do this with a single instruction. */
10102 if (!post)
10104 icode = (int) this_optab->handlers[(int) mode].insn_code;
10105 if (icode != (int) CODE_FOR_nothing
10106 /* Make sure that OP0 is valid for operands 0 and 1
10107 of the insn we want to queue. */
10108 && (*insn_operand_predicate[icode][0]) (op0, mode)
10109 && (*insn_operand_predicate[icode][1]) (op0, mode)
10110 && (*insn_operand_predicate[icode][2]) (op1, mode))
10111 single_insn = 1;
10114 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10115 then we cannot just increment OP0. We must therefore contrive to
10116 increment the original value. Then, for postincrement, we can return
10117 OP0 since it is a copy of the old value. For preincrement, expand here
10118 unless we can do it with a single insn.
10120 Likewise if storing directly into OP0 would clobber high bits
10121 we need to preserve (bad_subreg). */
10122 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10124 /* This is the easiest way to increment the value wherever it is.
10125 Problems with multiple evaluation of INCREMENTED are prevented
10126 because either (1) it is a component_ref or preincrement,
10127 in which case it was stabilized above, or (2) it is an array_ref
10128 with constant index in an array in a register, which is
10129 safe to reevaluate. */
10130 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10131 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10132 ? MINUS_EXPR : PLUS_EXPR),
10133 TREE_TYPE (exp),
10134 incremented,
10135 TREE_OPERAND (exp, 1));
10137 while (TREE_CODE (incremented) == NOP_EXPR
10138 || TREE_CODE (incremented) == CONVERT_EXPR)
10140 newexp = convert (TREE_TYPE (incremented), newexp);
10141 incremented = TREE_OPERAND (incremented, 0);
10144 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10145 return post ? op0 : temp;
10148 if (post)
10150 /* We have a true reference to the value in OP0.
10151 If there is an insn to add or subtract in this mode, queue it.
10152 Queueing the increment insn avoids the register shuffling
10153 that often results if we must increment now and first save
10154 the old value for subsequent use. */
10156 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10157 op0 = stabilize (op0);
10158 #endif
10160 icode = (int) this_optab->handlers[(int) mode].insn_code;
10161 if (icode != (int) CODE_FOR_nothing
10162 /* Make sure that OP0 is valid for operands 0 and 1
10163 of the insn we want to queue. */
10164 && (*insn_operand_predicate[icode][0]) (op0, mode)
10165 && (*insn_operand_predicate[icode][1]) (op0, mode))
10167 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10168 op1 = force_reg (mode, op1);
10170 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10172 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10174 rtx addr = (general_operand (XEXP (op0, 0), mode)
10175 ? force_reg (Pmode, XEXP (op0, 0))
10176 : copy_to_reg (XEXP (op0, 0)));
10177 rtx temp, result;
10179 op0 = change_address (op0, VOIDmode, addr);
10180 temp = force_reg (GET_MODE (op0), op0);
10181 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10182 op1 = force_reg (mode, op1);
10184 /* The increment queue is LIFO, thus we have to `queue'
10185 the instructions in reverse order. */
10186 enqueue_insn (op0, gen_move_insn (op0, temp));
10187 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10188 return result;
10192 /* Preincrement, or we can't increment with one simple insn. */
10193 if (post)
10194 /* Save a copy of the value before inc or dec, to return it later. */
10195 temp = value = copy_to_reg (op0);
10196 else
10197 /* Arrange to return the incremented value. */
10198 /* Copy the rtx because expand_binop will protect from the queue,
10199 and the results of that would be invalid for us to return
10200 if our caller does emit_queue before using our result. */
10201 temp = copy_rtx (value = op0);
10203 /* Increment however we can. */
10204 op1 = expand_binop (mode, this_optab, value, op1,
10205 current_function_check_memory_usage ? NULL_RTX : op0,
10206 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10207 /* Make sure the value is stored into OP0. */
10208 if (op1 != op0)
10209 emit_move_insn (op0, op1);
10211 return temp;
10214 /* Expand all function calls contained within EXP, innermost ones first.
10215 But don't look within expressions that have sequence points.
10216 For each CALL_EXPR, record the rtx for its value
10217 in the CALL_EXPR_RTL field. */
10219 static void
10220 preexpand_calls (exp)
10221 tree exp;
10223 register int nops, i;
10224 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10226 if (! do_preexpand_calls)
10227 return;
10229 /* Only expressions and references can contain calls. */
10231 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10232 return;
10234 switch (TREE_CODE (exp))
10236 case CALL_EXPR:
10237 /* Do nothing if already expanded. */
10238 if (CALL_EXPR_RTL (exp) != 0
10239 /* Do nothing if the call returns a variable-sized object. */
10240 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10241 /* Do nothing to built-in functions. */
10242 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10243 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10244 == FUNCTION_DECL)
10245 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10246 return;
10248 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10249 return;
10251 case COMPOUND_EXPR:
10252 case COND_EXPR:
10253 case TRUTH_ANDIF_EXPR:
10254 case TRUTH_ORIF_EXPR:
10255 /* If we find one of these, then we can be sure
10256 the adjust will be done for it (since it makes jumps).
10257 Do it now, so that if this is inside an argument
10258 of a function, we don't get the stack adjustment
10259 after some other args have already been pushed. */
10260 do_pending_stack_adjust ();
10261 return;
10263 case BLOCK:
10264 case RTL_EXPR:
10265 case WITH_CLEANUP_EXPR:
10266 case CLEANUP_POINT_EXPR:
10267 case TRY_CATCH_EXPR:
10268 return;
10270 case SAVE_EXPR:
10271 if (SAVE_EXPR_RTL (exp) != 0)
10272 return;
10274 default:
10275 break;
10278 nops = tree_code_length[(int) TREE_CODE (exp)];
10279 for (i = 0; i < nops; i++)
10280 if (TREE_OPERAND (exp, i) != 0)
10282 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10283 if (type == 'e' || type == '<' || type == '1' || type == '2'
10284 || type == 'r')
10285 preexpand_calls (TREE_OPERAND (exp, i));
10289 /* At the start of a function, record that we have no previously-pushed
10290 arguments waiting to be popped. */
10292 void
10293 init_pending_stack_adjust ()
10295 pending_stack_adjust = 0;
10298 /* When exiting from function, if safe, clear out any pending stack adjust
10299 so the adjustment won't get done. */
10301 void
10302 clear_pending_stack_adjust ()
10304 #ifdef EXIT_IGNORE_STACK
10305 if (optimize > 0
10306 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
10307 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10308 && ! flag_inline_functions)
10309 pending_stack_adjust = 0;
10310 #endif
10313 /* Pop any previously-pushed arguments that have not been popped yet. */
10315 void
10316 do_pending_stack_adjust ()
10318 if (inhibit_defer_pop == 0)
10320 if (pending_stack_adjust != 0)
10321 adjust_stack (GEN_INT (pending_stack_adjust));
10322 pending_stack_adjust = 0;
10326 /* Expand conditional expressions. */
10328 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10329 LABEL is an rtx of code CODE_LABEL, in this function and all the
10330 functions here. */
10332 void
10333 jumpifnot (exp, label)
10334 tree exp;
10335 rtx label;
10337 do_jump (exp, label, NULL_RTX);
10340 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10342 void
10343 jumpif (exp, label)
10344 tree exp;
10345 rtx label;
10347 do_jump (exp, NULL_RTX, label);
10350 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10351 the result is zero, or IF_TRUE_LABEL if the result is one.
10352 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10353 meaning fall through in that case.
10355 do_jump always does any pending stack adjust except when it does not
10356 actually perform a jump. An example where there is no jump
10357 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10359 This function is responsible for optimizing cases such as
10360 &&, || and comparison operators in EXP. */
10362 void
10363 do_jump (exp, if_false_label, if_true_label)
10364 tree exp;
10365 rtx if_false_label, if_true_label;
10367 register enum tree_code code = TREE_CODE (exp);
10368 /* Some cases need to create a label to jump to
10369 in order to properly fall through.
10370 These cases set DROP_THROUGH_LABEL nonzero. */
10371 rtx drop_through_label = 0;
10372 rtx temp;
10373 rtx comparison = 0;
10374 int i;
10375 tree type;
10376 enum machine_mode mode;
10378 emit_queue ();
10380 switch (code)
10382 case ERROR_MARK:
10383 break;
10385 case INTEGER_CST:
10386 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10387 if (temp)
10388 emit_jump (temp);
10389 break;
10391 #if 0
10392 /* This is not true with #pragma weak */
10393 case ADDR_EXPR:
10394 /* The address of something can never be zero. */
10395 if (if_true_label)
10396 emit_jump (if_true_label);
10397 break;
10398 #endif
10400 case NOP_EXPR:
10401 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10402 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10403 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10404 goto normal;
10405 case CONVERT_EXPR:
10406 /* If we are narrowing the operand, we have to do the compare in the
10407 narrower mode. */
10408 if ((TYPE_PRECISION (TREE_TYPE (exp))
10409 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10410 goto normal;
10411 case NON_LVALUE_EXPR:
10412 case REFERENCE_EXPR:
10413 case ABS_EXPR:
10414 case NEGATE_EXPR:
10415 case LROTATE_EXPR:
10416 case RROTATE_EXPR:
10417 /* These cannot change zero->non-zero or vice versa. */
10418 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10419 break;
10421 #if 0
10422 /* This is never less insns than evaluating the PLUS_EXPR followed by
10423 a test and can be longer if the test is eliminated. */
10424 case PLUS_EXPR:
10425 /* Reduce to minus. */
10426 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10427 TREE_OPERAND (exp, 0),
10428 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10429 TREE_OPERAND (exp, 1))));
10430 /* Process as MINUS. */
10431 #endif
10433 case MINUS_EXPR:
10434 /* Non-zero iff operands of minus differ. */
10435 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10436 TREE_OPERAND (exp, 0),
10437 TREE_OPERAND (exp, 1)),
10438 NE, NE);
10439 break;
10441 case BIT_AND_EXPR:
10442 /* If we are AND'ing with a small constant, do this comparison in the
10443 smallest type that fits. If the machine doesn't have comparisons
10444 that small, it will be converted back to the wider comparison.
10445 This helps if we are testing the sign bit of a narrower object.
10446 combine can't do this for us because it can't know whether a
10447 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10449 if (! SLOW_BYTE_ACCESS
10450 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10451 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10452 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10453 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10454 && (type = type_for_mode (mode, 1)) != 0
10455 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10456 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10457 != CODE_FOR_nothing))
10459 do_jump (convert (type, exp), if_false_label, if_true_label);
10460 break;
10462 goto normal;
10464 case TRUTH_NOT_EXPR:
10465 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10466 break;
10468 case TRUTH_ANDIF_EXPR:
10469 if (if_false_label == 0)
10470 if_false_label = drop_through_label = gen_label_rtx ();
10471 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10472 start_cleanup_deferral ();
10473 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10474 end_cleanup_deferral ();
10475 break;
10477 case TRUTH_ORIF_EXPR:
10478 if (if_true_label == 0)
10479 if_true_label = drop_through_label = gen_label_rtx ();
10480 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10481 start_cleanup_deferral ();
10482 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10483 end_cleanup_deferral ();
10484 break;
10486 case COMPOUND_EXPR:
10487 push_temp_slots ();
10488 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10489 preserve_temp_slots (NULL_RTX);
10490 free_temp_slots ();
10491 pop_temp_slots ();
10492 emit_queue ();
10493 do_pending_stack_adjust ();
10494 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10495 break;
10497 case COMPONENT_REF:
10498 case BIT_FIELD_REF:
10499 case ARRAY_REF:
10501 int bitsize, bitpos, unsignedp;
10502 enum machine_mode mode;
10503 tree type;
10504 tree offset;
10505 int volatilep = 0;
10506 int alignment;
10508 /* Get description of this reference. We don't actually care
10509 about the underlying object here. */
10510 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10511 &mode, &unsignedp, &volatilep,
10512 &alignment);
10514 type = type_for_size (bitsize, unsignedp);
10515 if (! SLOW_BYTE_ACCESS
10516 && type != 0 && bitsize >= 0
10517 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10518 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10519 != CODE_FOR_nothing))
10521 do_jump (convert (type, exp), if_false_label, if_true_label);
10522 break;
10524 goto normal;
10527 case COND_EXPR:
10528 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10529 if (integer_onep (TREE_OPERAND (exp, 1))
10530 && integer_zerop (TREE_OPERAND (exp, 2)))
10531 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10533 else if (integer_zerop (TREE_OPERAND (exp, 1))
10534 && integer_onep (TREE_OPERAND (exp, 2)))
10535 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10537 else
10539 register rtx label1 = gen_label_rtx ();
10540 drop_through_label = gen_label_rtx ();
10542 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10544 start_cleanup_deferral ();
10545 /* Now the THEN-expression. */
10546 do_jump (TREE_OPERAND (exp, 1),
10547 if_false_label ? if_false_label : drop_through_label,
10548 if_true_label ? if_true_label : drop_through_label);
10549 /* In case the do_jump just above never jumps. */
10550 do_pending_stack_adjust ();
10551 emit_label (label1);
10553 /* Now the ELSE-expression. */
10554 do_jump (TREE_OPERAND (exp, 2),
10555 if_false_label ? if_false_label : drop_through_label,
10556 if_true_label ? if_true_label : drop_through_label);
10557 end_cleanup_deferral ();
10559 break;
10561 case EQ_EXPR:
10563 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10565 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10566 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10568 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10569 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10571 do_jump
10572 (fold
10573 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10574 fold (build (EQ_EXPR, TREE_TYPE (exp),
10575 fold (build1 (REALPART_EXPR,
10576 TREE_TYPE (inner_type),
10577 exp0)),
10578 fold (build1 (REALPART_EXPR,
10579 TREE_TYPE (inner_type),
10580 exp1)))),
10581 fold (build (EQ_EXPR, TREE_TYPE (exp),
10582 fold (build1 (IMAGPART_EXPR,
10583 TREE_TYPE (inner_type),
10584 exp0)),
10585 fold (build1 (IMAGPART_EXPR,
10586 TREE_TYPE (inner_type),
10587 exp1)))))),
10588 if_false_label, if_true_label);
10591 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10592 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10594 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10595 && !can_compare_p (TYPE_MODE (inner_type)))
10596 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10597 else
10598 comparison = compare (exp, EQ, EQ);
10599 break;
10602 case NE_EXPR:
10604 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10606 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10607 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10609 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10610 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10612 do_jump
10613 (fold
10614 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10615 fold (build (NE_EXPR, TREE_TYPE (exp),
10616 fold (build1 (REALPART_EXPR,
10617 TREE_TYPE (inner_type),
10618 exp0)),
10619 fold (build1 (REALPART_EXPR,
10620 TREE_TYPE (inner_type),
10621 exp1)))),
10622 fold (build (NE_EXPR, TREE_TYPE (exp),
10623 fold (build1 (IMAGPART_EXPR,
10624 TREE_TYPE (inner_type),
10625 exp0)),
10626 fold (build1 (IMAGPART_EXPR,
10627 TREE_TYPE (inner_type),
10628 exp1)))))),
10629 if_false_label, if_true_label);
10632 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10633 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10635 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10636 && !can_compare_p (TYPE_MODE (inner_type)))
10637 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10638 else
10639 comparison = compare (exp, NE, NE);
10640 break;
10643 case LT_EXPR:
10644 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10645 == MODE_INT)
10646 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10647 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10648 else
10649 comparison = compare (exp, LT, LTU);
10650 break;
10652 case LE_EXPR:
10653 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10654 == MODE_INT)
10655 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10656 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10657 else
10658 comparison = compare (exp, LE, LEU);
10659 break;
10661 case GT_EXPR:
10662 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10663 == MODE_INT)
10664 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10665 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10666 else
10667 comparison = compare (exp, GT, GTU);
10668 break;
10670 case GE_EXPR:
10671 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10672 == MODE_INT)
10673 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10674 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10675 else
10676 comparison = compare (exp, GE, GEU);
10677 break;
10679 default:
10680 normal:
10681 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10682 #if 0
10683 /* This is not needed any more and causes poor code since it causes
10684 comparisons and tests from non-SI objects to have different code
10685 sequences. */
10686 /* Copy to register to avoid generating bad insns by cse
10687 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10688 if (!cse_not_expected && GET_CODE (temp) == MEM)
10689 temp = copy_to_reg (temp);
10690 #endif
10691 do_pending_stack_adjust ();
10692 if (GET_CODE (temp) == CONST_INT)
10693 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10694 else if (GET_CODE (temp) == LABEL_REF)
10695 comparison = const_true_rtx;
10696 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10697 && !can_compare_p (GET_MODE (temp)))
10698 /* Note swapping the labels gives us not-equal. */
10699 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10700 else if (GET_MODE (temp) != VOIDmode)
10701 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10702 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10703 GET_MODE (temp), NULL_RTX, 0);
10704 else
10705 abort ();
10708 /* Do any postincrements in the expression that was tested. */
10709 emit_queue ();
10711 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10712 straight into a conditional jump instruction as the jump condition.
10713 Otherwise, all the work has been done already. */
10715 if (comparison == const_true_rtx)
10717 if (if_true_label)
10718 emit_jump (if_true_label);
10720 else if (comparison == const0_rtx)
10722 if (if_false_label)
10723 emit_jump (if_false_label);
10725 else if (comparison)
10726 do_jump_for_compare (comparison, if_false_label, if_true_label);
10728 if (drop_through_label)
10730 /* If do_jump produces code that might be jumped around,
10731 do any stack adjusts from that code, before the place
10732 where control merges in. */
10733 do_pending_stack_adjust ();
10734 emit_label (drop_through_label);
10738 /* Given a comparison expression EXP for values too wide to be compared
10739 with one insn, test the comparison and jump to the appropriate label.
10740 The code of EXP is ignored; we always test GT if SWAP is 0,
10741 and LT if SWAP is 1. */
10743 static void
10744 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10745 tree exp;
10746 int swap;
10747 rtx if_false_label, if_true_label;
10749 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10750 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10751 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10752 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10753 rtx drop_through_label = 0;
10754 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10755 int i;
10757 if (! if_true_label || ! if_false_label)
10758 drop_through_label = gen_label_rtx ();
10759 if (! if_true_label)
10760 if_true_label = drop_through_label;
10761 if (! if_false_label)
10762 if_false_label = drop_through_label;
10764 /* Compare a word at a time, high order first. */
10765 for (i = 0; i < nwords; i++)
10767 rtx comp;
10768 rtx op0_word, op1_word;
10770 if (WORDS_BIG_ENDIAN)
10772 op0_word = operand_subword_force (op0, i, mode);
10773 op1_word = operand_subword_force (op1, i, mode);
10775 else
10777 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10778 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10781 /* All but high-order word must be compared as unsigned. */
10782 comp = compare_from_rtx (op0_word, op1_word,
10783 (unsignedp || i > 0) ? GTU : GT,
10784 unsignedp, word_mode, NULL_RTX, 0);
10785 if (comp == const_true_rtx)
10786 emit_jump (if_true_label);
10787 else if (comp != const0_rtx)
10788 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10790 /* Consider lower words only if these are equal. */
10791 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10792 NULL_RTX, 0);
10793 if (comp == const_true_rtx)
10794 emit_jump (if_false_label);
10795 else if (comp != const0_rtx)
10796 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10799 if (if_false_label)
10800 emit_jump (if_false_label);
10801 if (drop_through_label)
10802 emit_label (drop_through_label);
10805 /* Compare OP0 with OP1, word at a time, in mode MODE.
10806 UNSIGNEDP says to do unsigned comparison.
10807 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10809 void
10810 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10811 enum machine_mode mode;
10812 int unsignedp;
10813 rtx op0, op1;
10814 rtx if_false_label, if_true_label;
10816 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10817 rtx drop_through_label = 0;
10818 int i;
10820 if (! if_true_label || ! if_false_label)
10821 drop_through_label = gen_label_rtx ();
10822 if (! if_true_label)
10823 if_true_label = drop_through_label;
10824 if (! if_false_label)
10825 if_false_label = drop_through_label;
10827 /* Compare a word at a time, high order first. */
10828 for (i = 0; i < nwords; i++)
10830 rtx comp;
10831 rtx op0_word, op1_word;
10833 if (WORDS_BIG_ENDIAN)
10835 op0_word = operand_subword_force (op0, i, mode);
10836 op1_word = operand_subword_force (op1, i, mode);
10838 else
10840 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10841 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10844 /* All but high-order word must be compared as unsigned. */
10845 comp = compare_from_rtx (op0_word, op1_word,
10846 (unsignedp || i > 0) ? GTU : GT,
10847 unsignedp, word_mode, NULL_RTX, 0);
10848 if (comp == const_true_rtx)
10849 emit_jump (if_true_label);
10850 else if (comp != const0_rtx)
10851 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10853 /* Consider lower words only if these are equal. */
10854 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10855 NULL_RTX, 0);
10856 if (comp == const_true_rtx)
10857 emit_jump (if_false_label);
10858 else if (comp != const0_rtx)
10859 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10862 if (if_false_label)
10863 emit_jump (if_false_label);
10864 if (drop_through_label)
10865 emit_label (drop_through_label);
10868 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10869 with one insn, test the comparison and jump to the appropriate label. */
10871 static void
10872 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10873 tree exp;
10874 rtx if_false_label, if_true_label;
10876 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10877 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10878 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10879 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10880 int i;
10881 rtx drop_through_label = 0;
10883 if (! if_false_label)
10884 drop_through_label = if_false_label = gen_label_rtx ();
10886 for (i = 0; i < nwords; i++)
10888 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10889 operand_subword_force (op1, i, mode),
10890 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10891 word_mode, NULL_RTX, 0);
10892 if (comp == const_true_rtx)
10893 emit_jump (if_false_label);
10894 else if (comp != const0_rtx)
10895 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10898 if (if_true_label)
10899 emit_jump (if_true_label);
10900 if (drop_through_label)
10901 emit_label (drop_through_label);
10904 /* Jump according to whether OP0 is 0.
10905 We assume that OP0 has an integer mode that is too wide
10906 for the available compare insns. */
10908 void
10909 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10910 rtx op0;
10911 rtx if_false_label, if_true_label;
10913 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10914 rtx part;
10915 int i;
10916 rtx drop_through_label = 0;
10918 /* The fastest way of doing this comparison on almost any machine is to
10919 "or" all the words and compare the result. If all have to be loaded
10920 from memory and this is a very wide item, it's possible this may
10921 be slower, but that's highly unlikely. */
10923 part = gen_reg_rtx (word_mode);
10924 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10925 for (i = 1; i < nwords && part != 0; i++)
10926 part = expand_binop (word_mode, ior_optab, part,
10927 operand_subword_force (op0, i, GET_MODE (op0)),
10928 part, 1, OPTAB_WIDEN);
10930 if (part != 0)
10932 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10933 NULL_RTX, 0);
10935 if (comp == const_true_rtx)
10936 emit_jump (if_false_label);
10937 else if (comp == const0_rtx)
10938 emit_jump (if_true_label);
10939 else
10940 do_jump_for_compare (comp, if_false_label, if_true_label);
10942 return;
10945 /* If we couldn't do the "or" simply, do this with a series of compares. */
10946 if (! if_false_label)
10947 drop_through_label = if_false_label = gen_label_rtx ();
10949 for (i = 0; i < nwords; i++)
10951 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10952 GET_MODE (op0)),
10953 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10954 if (comp == const_true_rtx)
10955 emit_jump (if_false_label);
10956 else if (comp != const0_rtx)
10957 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10960 if (if_true_label)
10961 emit_jump (if_true_label);
10963 if (drop_through_label)
10964 emit_label (drop_through_label);
10967 /* Given a comparison expression in rtl form, output conditional branches to
10968 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10970 static void
10971 do_jump_for_compare (comparison, if_false_label, if_true_label)
10972 rtx comparison, if_false_label, if_true_label;
10974 if (if_true_label)
10976 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10977 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10978 else
10979 abort ();
10981 if (if_false_label)
10982 emit_jump (if_false_label);
10984 else if (if_false_label)
10986 rtx insn;
10987 rtx prev = get_last_insn ();
10988 rtx branch = 0;
10990 /* Output the branch with the opposite condition. Then try to invert
10991 what is generated. If more than one insn is a branch, or if the
10992 branch is not the last insn written, abort. If we can't invert
10993 the branch, emit make a true label, redirect this jump to that,
10994 emit a jump to the false label and define the true label. */
10996 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10997 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10998 else
10999 abort ();
11001 /* Here we get the first insn that was just emitted. It used to be the
11002 case that, on some machines, emitting the branch would discard
11003 the previous compare insn and emit a replacement. This isn't
11004 done anymore, but abort if we see that PREV is deleted. */
11006 if (prev == 0)
11007 insn = get_insns ();
11008 else if (INSN_DELETED_P (prev))
11009 abort ();
11010 else
11011 insn = NEXT_INSN (prev);
11013 for (; insn; insn = NEXT_INSN (insn))
11014 if (GET_CODE (insn) == JUMP_INSN)
11016 if (branch)
11017 abort ();
11018 branch = insn;
11021 if (branch != get_last_insn ())
11022 abort ();
11024 JUMP_LABEL (branch) = if_false_label;
11025 if (! invert_jump (branch, if_false_label))
11027 if_true_label = gen_label_rtx ();
11028 redirect_jump (branch, if_true_label);
11029 emit_jump (if_false_label);
11030 emit_label (if_true_label);
11035 /* Generate code for a comparison expression EXP
11036 (including code to compute the values to be compared)
11037 and set (CC0) according to the result.
11038 SIGNED_CODE should be the rtx operation for this comparison for
11039 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11041 We force a stack adjustment unless there are currently
11042 things pushed on the stack that aren't yet used. */
11044 static rtx
11045 compare (exp, signed_code, unsigned_code)
11046 register tree exp;
11047 enum rtx_code signed_code, unsigned_code;
11049 register rtx op0
11050 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11051 register rtx op1
11052 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11053 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
11054 register enum machine_mode mode = TYPE_MODE (type);
11055 int unsignedp = TREE_UNSIGNED (type);
11056 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
11058 #ifdef HAVE_canonicalize_funcptr_for_compare
11059 /* If function pointers need to be "canonicalized" before they can
11060 be reliably compared, then canonicalize them. */
11061 if (HAVE_canonicalize_funcptr_for_compare
11062 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0)))
11063 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11064 == FUNCTION_TYPE))
11066 rtx new_op0 = gen_reg_rtx (mode);
11068 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11069 op0 = new_op0;
11072 if (HAVE_canonicalize_funcptr_for_compare
11073 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 1)))
11074 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11075 == FUNCTION_TYPE))
11077 rtx new_op1 = gen_reg_rtx (mode);
11079 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11080 op1 = new_op1;
11082 #endif
11084 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11085 ((mode == BLKmode)
11086 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11087 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11090 /* Like compare but expects the values to compare as two rtx's.
11091 The decision as to signed or unsigned comparison must be made by the caller.
11093 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11094 compared.
11096 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11097 size of MODE should be used. */
11100 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11101 register rtx op0, op1;
11102 enum rtx_code code;
11103 int unsignedp;
11104 enum machine_mode mode;
11105 rtx size;
11106 int align;
11108 rtx tem;
11110 /* If one operand is constant, make it the second one. Only do this
11111 if the other operand is not constant as well. */
11113 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11114 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11116 tem = op0;
11117 op0 = op1;
11118 op1 = tem;
11119 code = swap_condition (code);
11122 if (flag_force_mem)
11124 op0 = force_not_mem (op0);
11125 op1 = force_not_mem (op1);
11128 do_pending_stack_adjust ();
11130 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11131 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11132 return tem;
11134 #if 0
11135 /* There's no need to do this now that combine.c can eliminate lots of
11136 sign extensions. This can be less efficient in certain cases on other
11137 machines. */
11139 /* If this is a signed equality comparison, we can do it as an
11140 unsigned comparison since zero-extension is cheaper than sign
11141 extension and comparisons with zero are done as unsigned. This is
11142 the case even on machines that can do fast sign extension, since
11143 zero-extension is easier to combine with other operations than
11144 sign-extension is. If we are comparing against a constant, we must
11145 convert it to what it would look like unsigned. */
11146 if ((code == EQ || code == NE) && ! unsignedp
11147 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11149 if (GET_CODE (op1) == CONST_INT
11150 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11151 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11152 unsignedp = 1;
11154 #endif
11156 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11158 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
11161 /* Generate code to calculate EXP using a store-flag instruction
11162 and return an rtx for the result. EXP is either a comparison
11163 or a TRUTH_NOT_EXPR whose operand is a comparison.
11165 If TARGET is nonzero, store the result there if convenient.
11167 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11168 cheap.
11170 Return zero if there is no suitable set-flag instruction
11171 available on this machine.
11173 Once expand_expr has been called on the arguments of the comparison,
11174 we are committed to doing the store flag, since it is not safe to
11175 re-evaluate the expression. We emit the store-flag insn by calling
11176 emit_store_flag, but only expand the arguments if we have a reason
11177 to believe that emit_store_flag will be successful. If we think that
11178 it will, but it isn't, we have to simulate the store-flag with a
11179 set/jump/set sequence. */
11181 static rtx
11182 do_store_flag (exp, target, mode, only_cheap)
11183 tree exp;
11184 rtx target;
11185 enum machine_mode mode;
11186 int only_cheap;
11188 enum rtx_code code;
11189 tree arg0, arg1, type;
11190 tree tem;
11191 enum machine_mode operand_mode;
11192 int invert = 0;
11193 int unsignedp;
11194 rtx op0, op1;
11195 enum insn_code icode;
11196 rtx subtarget = target;
11197 rtx result, label, pattern, jump_pat;
11199 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11200 result at the end. We can't simply invert the test since it would
11201 have already been inverted if it were valid. This case occurs for
11202 some floating-point comparisons. */
11204 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11205 invert = 1, exp = TREE_OPERAND (exp, 0);
11207 arg0 = TREE_OPERAND (exp, 0);
11208 arg1 = TREE_OPERAND (exp, 1);
11209 type = TREE_TYPE (arg0);
11210 operand_mode = TYPE_MODE (type);
11211 unsignedp = TREE_UNSIGNED (type);
11213 /* We won't bother with BLKmode store-flag operations because it would mean
11214 passing a lot of information to emit_store_flag. */
11215 if (operand_mode == BLKmode)
11216 return 0;
11218 /* We won't bother with store-flag operations involving function pointers
11219 when function pointers must be canonicalized before comparisons. */
11220 #ifdef HAVE_canonicalize_funcptr_for_compare
11221 if (HAVE_canonicalize_funcptr_for_compare
11222 && ((POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0)))
11223 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11224 == FUNCTION_TYPE))
11225 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 1)))
11226 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11227 == FUNCTION_TYPE))))
11228 return 0;
11229 #endif
11231 STRIP_NOPS (arg0);
11232 STRIP_NOPS (arg1);
11234 /* Get the rtx comparison code to use. We know that EXP is a comparison
11235 operation of some type. Some comparisons against 1 and -1 can be
11236 converted to comparisons with zero. Do so here so that the tests
11237 below will be aware that we have a comparison with zero. These
11238 tests will not catch constants in the first operand, but constants
11239 are rarely passed as the first operand. */
11241 switch (TREE_CODE (exp))
11243 case EQ_EXPR:
11244 code = EQ;
11245 break;
11246 case NE_EXPR:
11247 code = NE;
11248 break;
11249 case LT_EXPR:
11250 if (integer_onep (arg1))
11251 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11252 else
11253 code = unsignedp ? LTU : LT;
11254 break;
11255 case LE_EXPR:
11256 if (! unsignedp && integer_all_onesp (arg1))
11257 arg1 = integer_zero_node, code = LT;
11258 else
11259 code = unsignedp ? LEU : LE;
11260 break;
11261 case GT_EXPR:
11262 if (! unsignedp && integer_all_onesp (arg1))
11263 arg1 = integer_zero_node, code = GE;
11264 else
11265 code = unsignedp ? GTU : GT;
11266 break;
11267 case GE_EXPR:
11268 if (integer_onep (arg1))
11269 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11270 else
11271 code = unsignedp ? GEU : GE;
11272 break;
11273 default:
11274 abort ();
11277 /* Put a constant second. */
11278 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11280 tem = arg0; arg0 = arg1; arg1 = tem;
11281 code = swap_condition (code);
11284 /* If this is an equality or inequality test of a single bit, we can
11285 do this by shifting the bit being tested to the low-order bit and
11286 masking the result with the constant 1. If the condition was EQ,
11287 we xor it with 1. This does not require an scc insn and is faster
11288 than an scc insn even if we have it. */
11290 if ((code == NE || code == EQ)
11291 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11292 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11294 tree inner = TREE_OPERAND (arg0, 0);
11295 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11296 int ops_unsignedp;
11298 /* If INNER is a right shift of a constant and it plus BITNUM does
11299 not overflow, adjust BITNUM and INNER. */
11301 if (TREE_CODE (inner) == RSHIFT_EXPR
11302 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11303 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11304 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11305 < TYPE_PRECISION (type)))
11307 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11308 inner = TREE_OPERAND (inner, 0);
11311 /* If we are going to be able to omit the AND below, we must do our
11312 operations as unsigned. If we must use the AND, we have a choice.
11313 Normally unsigned is faster, but for some machines signed is. */
11314 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11315 #ifdef LOAD_EXTEND_OP
11316 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11317 #else
11319 #endif
11322 if (subtarget == 0 || GET_CODE (subtarget) != REG
11323 || GET_MODE (subtarget) != operand_mode
11324 || ! safe_from_p (subtarget, inner, 1))
11325 subtarget = 0;
11327 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11329 if (bitnum != 0)
11330 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11331 size_int (bitnum), subtarget, ops_unsignedp);
11333 if (GET_MODE (op0) != mode)
11334 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11336 if ((code == EQ && ! invert) || (code == NE && invert))
11337 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11338 ops_unsignedp, OPTAB_LIB_WIDEN);
11340 /* Put the AND last so it can combine with more things. */
11341 if (bitnum != TYPE_PRECISION (type) - 1)
11342 op0 = expand_and (op0, const1_rtx, subtarget);
11344 return op0;
11347 /* Now see if we are likely to be able to do this. Return if not. */
11348 if (! can_compare_p (operand_mode))
11349 return 0;
11350 icode = setcc_gen_code[(int) code];
11351 if (icode == CODE_FOR_nothing
11352 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11354 /* We can only do this if it is one of the special cases that
11355 can be handled without an scc insn. */
11356 if ((code == LT && integer_zerop (arg1))
11357 || (! only_cheap && code == GE && integer_zerop (arg1)))
11359 else if (BRANCH_COST >= 0
11360 && ! only_cheap && (code == NE || code == EQ)
11361 && TREE_CODE (type) != REAL_TYPE
11362 && ((abs_optab->handlers[(int) operand_mode].insn_code
11363 != CODE_FOR_nothing)
11364 || (ffs_optab->handlers[(int) operand_mode].insn_code
11365 != CODE_FOR_nothing)))
11367 else
11368 return 0;
11371 preexpand_calls (exp);
11372 if (subtarget == 0 || GET_CODE (subtarget) != REG
11373 || GET_MODE (subtarget) != operand_mode
11374 || ! safe_from_p (subtarget, arg1, 1))
11375 subtarget = 0;
11377 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11378 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11380 if (target == 0)
11381 target = gen_reg_rtx (mode);
11383 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11384 because, if the emit_store_flag does anything it will succeed and
11385 OP0 and OP1 will not be used subsequently. */
11387 result = emit_store_flag (target, code,
11388 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11389 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11390 operand_mode, unsignedp, 1);
11392 if (result)
11394 if (invert)
11395 result = expand_binop (mode, xor_optab, result, const1_rtx,
11396 result, 0, OPTAB_LIB_WIDEN);
11397 return result;
11400 /* If this failed, we have to do this with set/compare/jump/set code. */
11401 if (GET_CODE (target) != REG
11402 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11403 target = gen_reg_rtx (GET_MODE (target));
11405 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11406 result = compare_from_rtx (op0, op1, code, unsignedp,
11407 operand_mode, NULL_RTX, 0);
11408 if (GET_CODE (result) == CONST_INT)
11409 return (((result == const0_rtx && ! invert)
11410 || (result != const0_rtx && invert))
11411 ? const0_rtx : const1_rtx);
11413 label = gen_label_rtx ();
11414 if (bcc_gen_fctn[(int) code] == 0)
11415 abort ();
11417 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11418 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11419 emit_label (label);
11421 return target;
11424 /* Generate a tablejump instruction (used for switch statements). */
11426 #ifdef HAVE_tablejump
11428 /* INDEX is the value being switched on, with the lowest value
11429 in the table already subtracted.
11430 MODE is its expected mode (needed if INDEX is constant).
11431 RANGE is the length of the jump table.
11432 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11434 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11435 index value is out of range. */
11437 void
11438 do_tablejump (index, mode, range, table_label, default_label)
11439 rtx index, range, table_label, default_label;
11440 enum machine_mode mode;
11442 register rtx temp, vector;
11444 /* Do an unsigned comparison (in the proper mode) between the index
11445 expression and the value which represents the length of the range.
11446 Since we just finished subtracting the lower bound of the range
11447 from the index expression, this comparison allows us to simultaneously
11448 check that the original index expression value is both greater than
11449 or equal to the minimum value of the range and less than or equal to
11450 the maximum value of the range. */
11452 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11453 emit_jump_insn (gen_bgtu (default_label));
11455 /* If index is in range, it must fit in Pmode.
11456 Convert to Pmode so we can index with it. */
11457 if (mode != Pmode)
11458 index = convert_to_mode (Pmode, index, 1);
11460 /* Don't let a MEM slip thru, because then INDEX that comes
11461 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11462 and break_out_memory_refs will go to work on it and mess it up. */
11463 #ifdef PIC_CASE_VECTOR_ADDRESS
11464 if (flag_pic && GET_CODE (index) != REG)
11465 index = copy_to_mode_reg (Pmode, index);
11466 #endif
11468 /* If flag_force_addr were to affect this address
11469 it could interfere with the tricky assumptions made
11470 about addresses that contain label-refs,
11471 which may be valid only very near the tablejump itself. */
11472 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11473 GET_MODE_SIZE, because this indicates how large insns are. The other
11474 uses should all be Pmode, because they are addresses. This code
11475 could fail if addresses and insns are not the same size. */
11476 index
11477 = gen_rtx_PLUS (Pmode,
11478 gen_rtx_MULT (Pmode, index,
11479 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11480 gen_rtx_LABEL_REF (Pmode, table_label));
11482 #ifdef PIC_CASE_VECTOR_ADDRESS
11483 if (flag_pic)
11484 index = PIC_CASE_VECTOR_ADDRESS (index);
11485 else
11486 #endif
11487 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11488 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11489 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11490 RTX_UNCHANGING_P (vector) = 1;
11491 convert_move (temp, vector, 0);
11493 emit_jump_insn (gen_tablejump (temp, table_label));
11495 #ifndef CASE_VECTOR_PC_RELATIVE
11496 /* If we are generating PIC code or if the table is PC-relative, the
11497 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11498 if (! flag_pic)
11499 emit_barrier ();
11500 #endif
11503 #endif /* HAVE_tablejump */
11506 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11507 to that value is on the top of the stack. The resulting type is TYPE, and
11508 the source declaration is DECL. */
11510 void
11511 bc_load_memory (type, decl)
11512 tree type, decl;
11514 enum bytecode_opcode opcode;
11517 /* Bit fields are special. We only know about signed and
11518 unsigned ints, and enums. The latter are treated as
11519 signed integers. */
11521 if (DECL_BIT_FIELD (decl))
11522 if (TREE_CODE (type) == ENUMERAL_TYPE
11523 || TREE_CODE (type) == INTEGER_TYPE)
11524 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11525 else
11526 abort ();
11527 else
11528 /* See corresponding comment in bc_store_memory. */
11529 if (TYPE_MODE (type) == BLKmode
11530 || TYPE_MODE (type) == VOIDmode)
11531 return;
11532 else
11533 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11535 if (opcode == neverneverland)
11536 abort ();
11538 bc_emit_bytecode (opcode);
11540 #ifdef DEBUG_PRINT_CODE
11541 fputc ('\n', stderr);
11542 #endif
11546 /* Store the contents of the second stack slot to the address in the
11547 top stack slot. DECL is the declaration of the destination and is used
11548 to determine whether we're dealing with a bitfield. */
11550 void
11551 bc_store_memory (type, decl)
11552 tree type, decl;
11554 enum bytecode_opcode opcode;
11557 if (DECL_BIT_FIELD (decl))
11559 if (TREE_CODE (type) == ENUMERAL_TYPE
11560 || TREE_CODE (type) == INTEGER_TYPE)
11561 opcode = sstoreBI;
11562 else
11563 abort ();
11565 else
11566 if (TYPE_MODE (type) == BLKmode)
11568 /* Copy structure. This expands to a block copy instruction, storeBLK.
11569 In addition to the arguments expected by the other store instructions,
11570 it also expects a type size (SImode) on top of the stack, which is the
11571 structure size in size units (usually bytes). The two first arguments
11572 are already on the stack; so we just put the size on level 1. For some
11573 other languages, the size may be variable, this is why we don't encode
11574 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11576 bc_expand_expr (TYPE_SIZE (type));
11577 opcode = storeBLK;
11579 else
11580 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11582 if (opcode == neverneverland)
11583 abort ();
11585 bc_emit_bytecode (opcode);
11587 #ifdef DEBUG_PRINT_CODE
11588 fputc ('\n', stderr);
11589 #endif
11593 /* Allocate local stack space sufficient to hold a value of the given
11594 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11595 integral power of 2. A special case is locals of type VOID, which
11596 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11597 remapped into the corresponding attribute of SI. */
11600 bc_allocate_local (size, alignment)
11601 int size, alignment;
11603 rtx retval;
11604 int byte_alignment;
11606 if (size < 0)
11607 abort ();
11609 /* Normalize size and alignment */
11610 if (!size)
11611 size = UNITS_PER_WORD;
11613 if (alignment < BITS_PER_UNIT)
11614 byte_alignment = 1 << (INT_ALIGN - 1);
11615 else
11616 /* Align */
11617 byte_alignment = alignment / BITS_PER_UNIT;
11619 if (local_vars_size & (byte_alignment - 1))
11620 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11622 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11623 local_vars_size += size;
11625 return retval;
11629 /* Allocate variable-sized local array. Variable-sized arrays are
11630 actually pointers to the address in memory where they are stored. */
11633 bc_allocate_variable_array (size)
11634 tree size;
11636 rtx retval;
11637 const int ptralign = (1 << (PTR_ALIGN - 1));
11639 /* Align pointer */
11640 if (local_vars_size & ptralign)
11641 local_vars_size += ptralign - (local_vars_size & ptralign);
11643 /* Note down local space needed: pointer to block; also return
11644 dummy rtx */
11646 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11647 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11648 return retval;
11652 /* Push the machine address for the given external variable offset. */
11654 void
11655 bc_load_externaddr (externaddr)
11656 rtx externaddr;
11658 bc_emit_bytecode (constP);
11659 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11660 BYTECODE_BC_LABEL (externaddr)->offset);
11662 #ifdef DEBUG_PRINT_CODE
11663 fputc ('\n', stderr);
11664 #endif
11668 /* Like above, but expects an IDENTIFIER. */
11670 void
11671 bc_load_externaddr_id (id, offset)
11672 tree id;
11673 int offset;
11675 if (!IDENTIFIER_POINTER (id))
11676 abort ();
11678 bc_emit_bytecode (constP);
11679 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11681 #ifdef DEBUG_PRINT_CODE
11682 fputc ('\n', stderr);
11683 #endif
11687 /* Push the machine address for the given local variable offset. */
11689 void
11690 bc_load_localaddr (localaddr)
11691 rtx localaddr;
11693 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11697 /* Push the machine address for the given parameter offset.
11698 NOTE: offset is in bits. */
11700 void
11701 bc_load_parmaddr (parmaddr)
11702 rtx parmaddr;
11704 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11705 / BITS_PER_UNIT));
11709 /* Convert a[i] into *(a + i). */
11711 tree
11712 bc_canonicalize_array_ref (exp)
11713 tree exp;
11715 tree type = TREE_TYPE (exp);
11716 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11717 TREE_OPERAND (exp, 0));
11718 tree index = TREE_OPERAND (exp, 1);
11721 /* Convert the integer argument to a type the same size as a pointer
11722 so the multiply won't overflow spuriously. */
11724 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11725 index = convert (type_for_size (POINTER_SIZE, 0), index);
11727 /* The array address isn't volatile even if the array is.
11728 (Of course this isn't terribly relevant since the bytecode
11729 translator treats nearly everything as volatile anyway.) */
11730 TREE_THIS_VOLATILE (array_adr) = 0;
11732 return build1 (INDIRECT_REF, type,
11733 fold (build (PLUS_EXPR,
11734 TYPE_POINTER_TO (type),
11735 array_adr,
11736 fold (build (MULT_EXPR,
11737 TYPE_POINTER_TO (type),
11738 index,
11739 size_in_bytes (type))))));
11743 /* Load the address of the component referenced by the given
11744 COMPONENT_REF expression.
11746 Returns innermost lvalue. */
11748 tree
11749 bc_expand_component_address (exp)
11750 tree exp;
11752 tree tem, chain;
11753 enum machine_mode mode;
11754 int bitpos = 0;
11755 HOST_WIDE_INT SIval;
11758 tem = TREE_OPERAND (exp, 1);
11759 mode = DECL_MODE (tem);
11762 /* Compute cumulative bit offset for nested component refs
11763 and array refs, and find the ultimate containing object. */
11765 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11767 if (TREE_CODE (tem) == COMPONENT_REF)
11768 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11769 else
11770 if (TREE_CODE (tem) == ARRAY_REF
11771 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11772 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11774 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11775 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11776 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11777 else
11778 break;
11781 bc_expand_expr (tem);
11784 /* For bitfields also push their offset and size */
11785 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11786 bc_push_offset_and_size (bitpos, TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 1))));
11787 else
11788 if (SIval = bitpos / BITS_PER_UNIT)
11789 bc_emit_instruction (addconstPSI, SIval);
11791 return (TREE_OPERAND (exp, 1));
11795 /* Emit code to push two SI constants */
11797 void
11798 bc_push_offset_and_size (offset, size)
11799 HOST_WIDE_INT offset, size;
11801 bc_emit_instruction (constSI, offset);
11802 bc_emit_instruction (constSI, size);
11806 /* Emit byte code to push the address of the given lvalue expression to
11807 the stack. If it's a bit field, we also push offset and size info.
11809 Returns innermost component, which allows us to determine not only
11810 its type, but also whether it's a bitfield. */
11812 tree
11813 bc_expand_address (exp)
11814 tree exp;
11816 /* Safeguard */
11817 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11818 return (exp);
11821 switch (TREE_CODE (exp))
11823 case ARRAY_REF:
11825 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11827 case COMPONENT_REF:
11829 return (bc_expand_component_address (exp));
11831 case INDIRECT_REF:
11833 bc_expand_expr (TREE_OPERAND (exp, 0));
11835 /* For variable-sized types: retrieve pointer. Sometimes the
11836 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11837 also make sure we have an operand, just in case... */
11839 if (TREE_OPERAND (exp, 0)
11840 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11841 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11842 bc_emit_instruction (loadP);
11844 /* If packed, also return offset and size */
11845 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11847 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11848 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11850 return (TREE_OPERAND (exp, 0));
11852 case FUNCTION_DECL:
11854 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11855 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11856 break;
11858 case PARM_DECL:
11860 bc_load_parmaddr (DECL_RTL (exp));
11862 /* For variable-sized types: retrieve pointer */
11863 if (TYPE_SIZE (TREE_TYPE (exp))
11864 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11865 bc_emit_instruction (loadP);
11867 /* If packed, also return offset and size */
11868 if (DECL_BIT_FIELD (exp))
11869 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11870 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11872 break;
11874 case RESULT_DECL:
11876 bc_emit_instruction (returnP);
11877 break;
11879 case VAR_DECL:
11881 #if 0
11882 if (BYTECODE_LABEL (DECL_RTL (exp)))
11883 bc_load_externaddr (DECL_RTL (exp));
11884 #endif
11886 if (DECL_EXTERNAL (exp))
11887 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11888 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11889 else
11890 bc_load_localaddr (DECL_RTL (exp));
11892 /* For variable-sized types: retrieve pointer */
11893 if (TYPE_SIZE (TREE_TYPE (exp))
11894 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11895 bc_emit_instruction (loadP);
11897 /* If packed, also return offset and size */
11898 if (DECL_BIT_FIELD (exp))
11899 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11900 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11902 break;
11904 case STRING_CST:
11906 rtx r;
11908 bc_emit_bytecode (constP);
11909 r = output_constant_def (exp);
11910 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11912 #ifdef DEBUG_PRINT_CODE
11913 fputc ('\n', stderr);
11914 #endif
11916 break;
11918 default:
11920 abort();
11921 break;
11924 /* Most lvalues don't have components. */
11925 return (exp);
11929 /* Emit a type code to be used by the runtime support in handling
11930 parameter passing. The type code consists of the machine mode
11931 plus the minimal alignment shifted left 8 bits. */
11933 tree
11934 bc_runtime_type_code (type)
11935 tree type;
11937 int val;
11939 switch (TREE_CODE (type))
11941 case VOID_TYPE:
11942 case INTEGER_TYPE:
11943 case REAL_TYPE:
11944 case COMPLEX_TYPE:
11945 case ENUMERAL_TYPE:
11946 case POINTER_TYPE:
11947 case RECORD_TYPE:
11949 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11950 break;
11952 case ERROR_MARK:
11954 val = 0;
11955 break;
11957 default:
11959 abort ();
11961 return build_int_2 (val, 0);
11965 /* Generate constructor label */
11967 char *
11968 bc_gen_constr_label ()
11970 static int label_counter;
11971 static char label[20];
11973 sprintf (label, "*LR%d", label_counter++);
11975 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11979 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11980 expand the constructor data as static data, and push a pointer to it.
11981 The pointer is put in the pointer table and is retrieved by a constP
11982 bytecode instruction. We then loop and store each constructor member in
11983 the corresponding component. Finally, we return the original pointer on
11984 the stack. */
11986 void
11987 bc_expand_constructor (constr)
11988 tree constr;
11990 char *l;
11991 HOST_WIDE_INT ptroffs;
11992 rtx constr_rtx;
11995 /* Literal constructors are handled as constants, whereas
11996 non-literals are evaluated and stored element by element
11997 into the data segment. */
11999 /* Allocate space in proper segment and push pointer to space on stack.
12002 l = bc_gen_constr_label ();
12004 if (TREE_CONSTANT (constr))
12006 text_section ();
12008 bc_emit_const_labeldef (l);
12009 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
12011 else
12013 data_section ();
12015 bc_emit_data_labeldef (l);
12016 bc_output_data_constructor (constr);
12020 /* Add reference to pointer table and recall pointer to stack;
12021 this code is common for both types of constructors: literals
12022 and non-literals. */
12024 ptroffs = bc_define_pointer (l);
12025 bc_emit_instruction (constP, ptroffs);
12027 /* This is all that has to be done if it's a literal. */
12028 if (TREE_CONSTANT (constr))
12029 return;
12032 /* At this point, we have the pointer to the structure on top of the stack.
12033 Generate sequences of store_memory calls for the constructor. */
12035 /* constructor type is structure */
12036 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
12038 register tree elt;
12040 /* If the constructor has fewer fields than the structure,
12041 clear the whole structure first. */
12043 if (list_length (CONSTRUCTOR_ELTS (constr))
12044 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
12046 bc_emit_instruction (duplicate);
12047 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
12048 bc_emit_instruction (clearBLK);
12051 /* Store each element of the constructor into the corresponding
12052 field of TARGET. */
12054 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
12056 register tree field = TREE_PURPOSE (elt);
12057 register enum machine_mode mode;
12058 int bitsize;
12059 int bitpos;
12060 int unsignedp;
12062 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
12063 mode = DECL_MODE (field);
12064 unsignedp = TREE_UNSIGNED (field);
12066 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
12068 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
12069 /* The alignment of TARGET is
12070 at least what its type requires. */
12071 VOIDmode, 0,
12072 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
12073 int_size_in_bytes (TREE_TYPE (constr)));
12076 else
12078 /* Constructor type is array */
12079 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
12081 register tree elt;
12082 register int i;
12083 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
12084 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
12085 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
12086 tree elttype = TREE_TYPE (TREE_TYPE (constr));
12088 /* If the constructor has fewer fields than the structure,
12089 clear the whole structure first. */
12091 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
12093 bc_emit_instruction (duplicate);
12094 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
12095 bc_emit_instruction (clearBLK);
12099 /* Store each element of the constructor into the corresponding
12100 element of TARGET, determined by counting the elements. */
12102 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
12103 elt;
12104 elt = TREE_CHAIN (elt), i++)
12106 register enum machine_mode mode;
12107 int bitsize;
12108 int bitpos;
12109 int unsignedp;
12111 mode = TYPE_MODE (elttype);
12112 bitsize = GET_MODE_BITSIZE (mode);
12113 unsignedp = TREE_UNSIGNED (elttype);
12115 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
12116 /* * TYPE_SIZE_UNIT (elttype) */ );
12118 bc_store_field (elt, bitsize, bitpos, mode,
12119 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
12120 /* The alignment of TARGET is
12121 at least what its type requires. */
12122 VOIDmode, 0,
12123 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
12124 int_size_in_bytes (TREE_TYPE (constr)));
12131 /* Store the value of EXP (an expression tree) into member FIELD of
12132 structure at address on stack, which has type TYPE, mode MODE and
12133 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
12134 structure.
12136 ALIGN is the alignment that TARGET is known to have, measured in bytes.
12137 TOTAL_SIZE is its size in bytes, or -1 if variable. */
12139 void
12140 bc_store_field (field, bitsize, bitpos, mode, exp, type,
12141 value_mode, unsignedp, align, total_size)
12142 int bitsize, bitpos;
12143 enum machine_mode mode;
12144 tree field, exp, type;
12145 enum machine_mode value_mode;
12146 int unsignedp;
12147 int align;
12148 int total_size;
12151 /* Expand expression and copy pointer */
12152 bc_expand_expr (exp);
12153 bc_emit_instruction (over);
12156 /* If the component is a bit field, we cannot use addressing to access
12157 it. Use bit-field techniques to store in it. */
12159 if (DECL_BIT_FIELD (field))
12161 bc_store_bit_field (bitpos, bitsize, unsignedp);
12162 return;
12164 else
12165 /* Not bit field */
12167 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
12169 /* Advance pointer to the desired member */
12170 if (offset)
12171 bc_emit_instruction (addconstPSI, offset);
12173 /* Store */
12174 bc_store_memory (type, field);
12179 /* Store SI/SU in bitfield */
12181 void
12182 bc_store_bit_field (offset, size, unsignedp)
12183 int offset, size, unsignedp;
12185 /* Push bitfield offset and size */
12186 bc_push_offset_and_size (offset, size);
12188 /* Store */
12189 bc_emit_instruction (sstoreBI);
12193 /* Load SI/SU from bitfield */
12195 void
12196 bc_load_bit_field (offset, size, unsignedp)
12197 int offset, size, unsignedp;
12199 /* Push bitfield offset and size */
12200 bc_push_offset_and_size (offset, size);
12202 /* Load: sign-extend if signed, else zero-extend */
12203 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
12207 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
12208 (adjust stack pointer upwards), negative means add that number of
12209 levels (adjust the stack pointer downwards). Only positive values
12210 normally make sense. */
12212 void
12213 bc_adjust_stack (nlevels)
12214 int nlevels;
12216 switch (nlevels)
12218 case 0:
12219 break;
12221 case 2:
12222 bc_emit_instruction (drop);
12224 case 1:
12225 bc_emit_instruction (drop);
12226 break;
12228 default:
12230 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
12231 stack_depth -= nlevels;
12234 #if defined (VALIDATE_STACK_FOR_BC)
12235 VALIDATE_STACK_FOR_BC ();
12236 #endif