import of gcc-2.8
[official-gcc.git] / gcc / expr.c
blob4534ef5a9b57aa95164e98013fdbd2c1c7e25d19
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include <stdio.h>
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "expr.h"
36 #include "insn-config.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "defaults.h"
42 #include "bytecode.h"
43 #include "bc-opcode.h"
44 #include "bc-typecd.h"
45 #include "bc-optab.h"
46 #include "bc-emit.h"
49 #define CEIL(x,y) (((x) + (y) - 1) / (y))
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
60 #define PUSH_ARGS_REVERSED /* If it's last to first */
61 #endif
63 #endif
65 #ifndef STACK_PUSH_CODE
66 #ifdef STACK_GROWS_DOWNWARD
67 #define STACK_PUSH_CODE PRE_DEC
68 #else
69 #define STACK_PUSH_CODE PRE_INC
70 #endif
71 #endif
73 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
74 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
84 /* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87 int do_preexpand_calls = 1;
89 /* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91 int pending_stack_adjust;
93 /* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97 int inhibit_defer_pop;
99 /* When temporaries are created by TARGET_EXPRs, they are created at
100 this level of temp_slot_level, so that they can remain allocated
101 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
102 of TARGET_EXPRs. */
103 int target_temp_slot_level;
105 /* Nonzero means __builtin_saveregs has already been done in this function.
106 The value is the pseudoreg containing the value __builtin_saveregs
107 returned. */
108 static rtx saveregs_value;
110 /* Similarly for __builtin_apply_args. */
111 static rtx apply_args_value;
113 /* Don't check memory usage, since code is being emitted to check a memory
114 usage. Used when flag_check_memory_usage is true, to avoid infinite
115 recursion. */
116 static int in_check_memory_usage;
118 /* This structure is used by move_by_pieces to describe the move to
119 be performed. */
120 struct move_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 int to_struct;
127 rtx from;
128 rtx from_addr;
129 int autinc_from;
130 int explicit_inc_from;
131 int from_struct;
132 int len;
133 int offset;
134 int reverse;
137 /* This structure is used by clear_by_pieces to describe the clear to
138 be performed. */
140 struct clear_by_pieces
142 rtx to;
143 rtx to_addr;
144 int autinc_to;
145 int explicit_inc_to;
146 int to_struct;
147 int len;
148 int offset;
149 int reverse;
152 /* Used to generate bytecodes: keep track of size of local variables,
153 as well as depth of arithmetic stack. (Notice that variables are
154 stored on the machine's stack, not the arithmetic stack.) */
156 static rtx get_push_address PROTO ((int));
157 extern int local_vars_size;
158 extern int stack_depth;
159 extern int max_stack_depth;
160 extern struct obstack permanent_obstack;
161 extern rtx arg_pointer_save_area;
163 static rtx enqueue_insn PROTO((rtx, rtx));
164 static int queued_subexp_p PROTO((rtx));
165 static void init_queue PROTO((void));
166 static void move_by_pieces PROTO((rtx, rtx, int, int));
167 static int move_by_pieces_ninsns PROTO((unsigned int, int));
168 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct move_by_pieces *));
170 static void clear_by_pieces PROTO((rtx, int, int));
171 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
172 struct clear_by_pieces *));
173 static int is_zeros_p PROTO((tree));
174 static int mostly_zeros_p PROTO((tree));
175 static void store_constructor PROTO((tree, rtx, int));
176 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
177 enum machine_mode, int, int, int));
178 static tree save_noncopied_parts PROTO((tree, tree));
179 static tree init_noncopied_parts PROTO((tree, tree));
180 static int safe_from_p PROTO((rtx, tree));
181 static int fixed_type_p PROTO((tree));
182 static rtx var_rtx PROTO((tree));
183 static int get_pointer_alignment PROTO((tree, unsigned));
184 static tree string_constant PROTO((tree, tree *));
185 static tree c_strlen PROTO((tree));
186 static rtx expand_builtin PROTO((tree, rtx, rtx,
187 enum machine_mode, int));
188 static int apply_args_size PROTO((void));
189 static int apply_result_size PROTO((void));
190 static rtx result_vector PROTO((int, rtx));
191 static rtx expand_builtin_apply_args PROTO((void));
192 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
193 static void expand_builtin_return PROTO((rtx));
194 static rtx expand_increment PROTO((tree, int, int));
195 void bc_expand_increment PROTO((struct increment_operator *, tree));
196 rtx bc_allocate_local PROTO((int, int));
197 void bc_store_memory PROTO((tree, tree));
198 tree bc_expand_component_address PROTO((tree));
199 tree bc_expand_address PROTO((tree));
200 void bc_expand_constructor PROTO((tree));
201 void bc_adjust_stack PROTO((int));
202 tree bc_canonicalize_array_ref PROTO((tree));
203 void bc_load_memory PROTO((tree, tree));
204 void bc_load_externaddr PROTO((rtx));
205 void bc_load_externaddr_id PROTO((tree, int));
206 void bc_load_localaddr PROTO((rtx));
207 void bc_load_parmaddr PROTO((rtx));
208 static void preexpand_calls PROTO((tree));
209 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
210 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
211 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
212 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
213 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
214 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
215 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
216 extern tree truthvalue_conversion PROTO((tree));
218 /* Record for each mode whether we can move a register directly to or
219 from an object of that mode in memory. If we can't, we won't try
220 to use that mode directly when accessing a field of that mode. */
222 static char direct_load[NUM_MACHINE_MODES];
223 static char direct_store[NUM_MACHINE_MODES];
225 /* MOVE_RATIO is the number of move instructions that is better than
226 a block move. */
228 #ifndef MOVE_RATIO
229 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
230 #define MOVE_RATIO 2
231 #else
232 /* A value of around 6 would minimize code size; infinity would minimize
233 execution time. */
234 #define MOVE_RATIO 15
235 #endif
236 #endif
238 /* This array records the insn_code of insns to perform block moves. */
239 enum insn_code movstr_optab[NUM_MACHINE_MODES];
241 /* This array records the insn_code of insns to perform block clears. */
242 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
244 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
246 #ifndef SLOW_UNALIGNED_ACCESS
247 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
248 #endif
250 /* Register mappings for target machines without register windows. */
251 #ifndef INCOMING_REGNO
252 #define INCOMING_REGNO(OUT) (OUT)
253 #endif
254 #ifndef OUTGOING_REGNO
255 #define OUTGOING_REGNO(IN) (IN)
256 #endif
258 /* Maps used to convert modes to const, load, and store bytecodes. */
259 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
261 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
263 /* Initialize maps used to convert modes to const, load, and store
264 bytecodes. */
266 void
267 bc_init_mode_to_opcode_maps ()
269 int mode;
271 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
272 mode_to_const_map[mode]
273 = mode_to_load_map[mode]
274 = mode_to_store_map[mode] = neverneverland;
276 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
277 mode_to_const_map[(int) SYM] = CONST; \
278 mode_to_load_map[(int) SYM] = LOAD; \
279 mode_to_store_map[(int) SYM] = STORE;
281 #include "modemap.def"
282 #undef DEF_MODEMAP
285 /* This is run once per compilation to set up which modes can be used
286 directly in memory and to initialize the block move optab. */
288 void
289 init_expr_once ()
291 rtx insn, pat;
292 enum machine_mode mode;
293 /* Try indexing by frame ptr and try by stack ptr.
294 It is known that on the Convex the stack ptr isn't a valid index.
295 With luck, one or the other is valid on any machine. */
296 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
297 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
299 start_sequence ();
300 insn = emit_insn (gen_rtx (SET, 0, NULL_RTX, NULL_RTX));
301 pat = PATTERN (insn);
303 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
304 mode = (enum machine_mode) ((int) mode + 1))
306 int regno;
307 rtx reg;
308 int num_clobbers;
310 direct_load[(int) mode] = direct_store[(int) mode] = 0;
311 PUT_MODE (mem, mode);
312 PUT_MODE (mem1, mode);
314 /* See if there is some register that can be used in this mode and
315 directly loaded or stored from memory. */
317 if (mode != VOIDmode && mode != BLKmode)
318 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
319 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
320 regno++)
322 if (! HARD_REGNO_MODE_OK (regno, mode))
323 continue;
325 reg = gen_rtx (REG, mode, regno);
327 SET_SRC (pat) = mem;
328 SET_DEST (pat) = reg;
329 if (recog (pat, insn, &num_clobbers) >= 0)
330 direct_load[(int) mode] = 1;
332 SET_SRC (pat) = mem1;
333 SET_DEST (pat) = reg;
334 if (recog (pat, insn, &num_clobbers) >= 0)
335 direct_load[(int) mode] = 1;
337 SET_SRC (pat) = reg;
338 SET_DEST (pat) = mem;
339 if (recog (pat, insn, &num_clobbers) >= 0)
340 direct_store[(int) mode] = 1;
342 SET_SRC (pat) = reg;
343 SET_DEST (pat) = mem1;
344 if (recog (pat, insn, &num_clobbers) >= 0)
345 direct_store[(int) mode] = 1;
349 end_sequence ();
352 /* This is run at the start of compiling a function. */
354 void
355 init_expr ()
357 init_queue ();
359 pending_stack_adjust = 0;
360 inhibit_defer_pop = 0;
361 saveregs_value = 0;
362 apply_args_value = 0;
363 forced_labels = 0;
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
369 void
370 save_expr_status (p)
371 struct function *p;
373 /* Instead of saving the postincrement queue, empty it. */
374 emit_queue ();
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->saveregs_value = saveregs_value;
379 p->apply_args_value = apply_args_value;
380 p->forced_labels = forced_labels;
382 pending_stack_adjust = 0;
383 inhibit_defer_pop = 0;
384 saveregs_value = 0;
385 apply_args_value = 0;
386 forced_labels = 0;
389 /* Restore all variables describing the current status from the structure *P.
390 This is used after a nested function. */
392 void
393 restore_expr_status (p)
394 struct function *p;
396 pending_stack_adjust = p->pending_stack_adjust;
397 inhibit_defer_pop = p->inhibit_defer_pop;
398 saveregs_value = p->saveregs_value;
399 apply_args_value = p->apply_args_value;
400 forced_labels = p->forced_labels;
403 /* Manage the queue of increment instructions to be output
404 for POSTINCREMENT_EXPR expressions, etc. */
406 static rtx pending_chain;
408 /* Queue up to increment (or change) VAR later. BODY says how:
409 BODY should be the same thing you would pass to emit_insn
410 to increment right away. It will go to emit_insn later on.
412 The value is a QUEUED expression to be used in place of VAR
413 where you want to guarantee the pre-incrementation value of VAR. */
415 static rtx
416 enqueue_insn (var, body)
417 rtx var, body;
419 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
420 var, NULL_RTX, NULL_RTX, body, pending_chain);
421 return pending_chain;
424 /* Use protect_from_queue to convert a QUEUED expression
425 into something that you can put immediately into an instruction.
426 If the queued incrementation has not happened yet,
427 protect_from_queue returns the variable itself.
428 If the incrementation has happened, protect_from_queue returns a temp
429 that contains a copy of the old value of the variable.
431 Any time an rtx which might possibly be a QUEUED is to be put
432 into an instruction, it must be passed through protect_from_queue first.
433 QUEUED expressions are not meaningful in instructions.
435 Do not pass a value through protect_from_queue and then hold
436 on to it for a while before putting it in an instruction!
437 If the queue is flushed in between, incorrect code will result. */
440 protect_from_queue (x, modify)
441 register rtx x;
442 int modify;
444 register RTX_CODE code = GET_CODE (x);
446 #if 0 /* A QUEUED can hang around after the queue is forced out. */
447 /* Shortcut for most common case. */
448 if (pending_chain == 0)
449 return x;
450 #endif
452 if (code != QUEUED)
454 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
455 use of autoincrement. Make a copy of the contents of the memory
456 location rather than a copy of the address, but not if the value is
457 of mode BLKmode. Don't modify X in place since it might be
458 shared. */
459 if (code == MEM && GET_MODE (x) != BLKmode
460 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
462 register rtx y = XEXP (x, 0);
463 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
465 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
466 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
467 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
469 if (QUEUED_INSN (y))
471 register rtx temp = gen_reg_rtx (GET_MODE (new));
472 emit_insn_before (gen_move_insn (temp, new),
473 QUEUED_INSN (y));
474 return temp;
476 return new;
478 /* Otherwise, recursively protect the subexpressions of all
479 the kinds of rtx's that can contain a QUEUED. */
480 if (code == MEM)
482 rtx tem = protect_from_queue (XEXP (x, 0), 0);
483 if (tem != XEXP (x, 0))
485 x = copy_rtx (x);
486 XEXP (x, 0) = tem;
489 else if (code == PLUS || code == MULT)
491 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
492 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
493 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
495 x = copy_rtx (x);
496 XEXP (x, 0) = new0;
497 XEXP (x, 1) = new1;
500 return x;
502 /* If the increment has not happened, use the variable itself. */
503 if (QUEUED_INSN (x) == 0)
504 return QUEUED_VAR (x);
505 /* If the increment has happened and a pre-increment copy exists,
506 use that copy. */
507 if (QUEUED_COPY (x) != 0)
508 return QUEUED_COPY (x);
509 /* The increment has happened but we haven't set up a pre-increment copy.
510 Set one up now, and use it. */
511 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
512 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
513 QUEUED_INSN (x));
514 return QUEUED_COPY (x);
517 /* Return nonzero if X contains a QUEUED expression:
518 if it contains anything that will be altered by a queued increment.
519 We handle only combinations of MEM, PLUS, MINUS and MULT operators
520 since memory addresses generally contain only those. */
522 static int
523 queued_subexp_p (x)
524 rtx x;
526 register enum rtx_code code = GET_CODE (x);
527 switch (code)
529 case QUEUED:
530 return 1;
531 case MEM:
532 return queued_subexp_p (XEXP (x, 0));
533 case MULT:
534 case PLUS:
535 case MINUS:
536 return (queued_subexp_p (XEXP (x, 0))
537 || queued_subexp_p (XEXP (x, 1)));
538 default:
539 return 0;
543 /* Perform all the pending incrementations. */
545 void
546 emit_queue ()
548 register rtx p;
549 while (p = pending_chain)
551 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
552 pending_chain = QUEUED_NEXT (p);
556 static void
557 init_queue ()
559 if (pending_chain)
560 abort ();
563 /* Copy data from FROM to TO, where the machine modes are not the same.
564 Both modes may be integer, or both may be floating.
565 UNSIGNEDP should be nonzero if FROM is an unsigned type.
566 This causes zero-extension instead of sign-extension. */
568 void
569 convert_move (to, from, unsignedp)
570 register rtx to, from;
571 int unsignedp;
573 enum machine_mode to_mode = GET_MODE (to);
574 enum machine_mode from_mode = GET_MODE (from);
575 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
576 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
577 enum insn_code code;
578 rtx libcall;
580 /* rtx code for making an equivalent value. */
581 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
583 to = protect_from_queue (to, 1);
584 from = protect_from_queue (from, 0);
586 if (to_real != from_real)
587 abort ();
589 /* If FROM is a SUBREG that indicates that we have already done at least
590 the required extension, strip it. We don't handle such SUBREGs as
591 TO here. */
593 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
594 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
595 >= GET_MODE_SIZE (to_mode))
596 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
597 from = gen_lowpart (to_mode, from), from_mode = to_mode;
599 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
600 abort ();
602 if (to_mode == from_mode
603 || (from_mode == VOIDmode && CONSTANT_P (from)))
605 emit_move_insn (to, from);
606 return;
609 if (to_real)
611 rtx value;
613 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
615 /* Try converting directly if the insn is supported. */
616 if ((code = can_extend_p (to_mode, from_mode, 0))
617 != CODE_FOR_nothing)
619 emit_unop_insn (code, to, from, UNKNOWN);
620 return;
624 #ifdef HAVE_trunchfqf2
625 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
627 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
628 return;
630 #endif
631 #ifdef HAVE_trunctqfqf2
632 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
634 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
635 return;
637 #endif
638 #ifdef HAVE_truncsfqf2
639 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
641 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
642 return;
644 #endif
645 #ifdef HAVE_truncdfqf2
646 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
648 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
649 return;
651 #endif
652 #ifdef HAVE_truncxfqf2
653 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
655 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
656 return;
658 #endif
659 #ifdef HAVE_trunctfqf2
660 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
662 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
663 return;
665 #endif
667 #ifdef HAVE_trunctqfhf2
668 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
670 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
671 return;
673 #endif
674 #ifdef HAVE_truncsfhf2
675 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
677 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
678 return;
680 #endif
681 #ifdef HAVE_truncdfhf2
682 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
684 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
685 return;
687 #endif
688 #ifdef HAVE_truncxfhf2
689 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
691 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
692 return;
694 #endif
695 #ifdef HAVE_trunctfhf2
696 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
698 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
699 return;
701 #endif
703 #ifdef HAVE_truncsftqf2
704 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
706 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
707 return;
709 #endif
710 #ifdef HAVE_truncdftqf2
711 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
713 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
714 return;
716 #endif
717 #ifdef HAVE_truncxftqf2
718 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
720 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
721 return;
723 #endif
724 #ifdef HAVE_trunctftqf2
725 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
727 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
728 return;
730 #endif
732 #ifdef HAVE_truncdfsf2
733 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
735 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
736 return;
738 #endif
739 #ifdef HAVE_truncxfsf2
740 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
742 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
743 return;
745 #endif
746 #ifdef HAVE_trunctfsf2
747 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
749 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
750 return;
752 #endif
753 #ifdef HAVE_truncxfdf2
754 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
756 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
757 return;
759 #endif
760 #ifdef HAVE_trunctfdf2
761 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
763 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
764 return;
766 #endif
768 libcall = (rtx) 0;
769 switch (from_mode)
771 case SFmode:
772 switch (to_mode)
774 case DFmode:
775 libcall = extendsfdf2_libfunc;
776 break;
778 case XFmode:
779 libcall = extendsfxf2_libfunc;
780 break;
782 case TFmode:
783 libcall = extendsftf2_libfunc;
784 break;
786 default:
787 break;
789 break;
791 case DFmode:
792 switch (to_mode)
794 case SFmode:
795 libcall = truncdfsf2_libfunc;
796 break;
798 case XFmode:
799 libcall = extenddfxf2_libfunc;
800 break;
802 case TFmode:
803 libcall = extenddftf2_libfunc;
804 break;
806 default:
807 break;
809 break;
811 case XFmode:
812 switch (to_mode)
814 case SFmode:
815 libcall = truncxfsf2_libfunc;
816 break;
818 case DFmode:
819 libcall = truncxfdf2_libfunc;
820 break;
822 default:
823 break;
825 break;
827 case TFmode:
828 switch (to_mode)
830 case SFmode:
831 libcall = trunctfsf2_libfunc;
832 break;
834 case DFmode:
835 libcall = trunctfdf2_libfunc;
836 break;
838 default:
839 break;
841 break;
843 default:
844 break;
847 if (libcall == (rtx) 0)
848 /* This conversion is not implemented yet. */
849 abort ();
851 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
852 1, from, from_mode);
853 emit_move_insn (to, value);
854 return;
857 /* Now both modes are integers. */
859 /* Handle expanding beyond a word. */
860 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
861 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
863 rtx insns;
864 rtx lowpart;
865 rtx fill_value;
866 rtx lowfrom;
867 int i;
868 enum machine_mode lowpart_mode;
869 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
871 /* Try converting directly if the insn is supported. */
872 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
873 != CODE_FOR_nothing)
875 /* If FROM is a SUBREG, put it into a register. Do this
876 so that we always generate the same set of insns for
877 better cse'ing; if an intermediate assignment occurred,
878 we won't be doing the operation directly on the SUBREG. */
879 if (optimize > 0 && GET_CODE (from) == SUBREG)
880 from = force_reg (from_mode, from);
881 emit_unop_insn (code, to, from, equiv_code);
882 return;
884 /* Next, try converting via full word. */
885 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
886 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
887 != CODE_FOR_nothing))
889 if (GET_CODE (to) == REG)
890 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
891 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
892 emit_unop_insn (code, to,
893 gen_lowpart (word_mode, to), equiv_code);
894 return;
897 /* No special multiword conversion insn; do it by hand. */
898 start_sequence ();
900 /* Since we will turn this into a no conflict block, we must ensure
901 that the source does not overlap the target. */
903 if (reg_overlap_mentioned_p (to, from))
904 from = force_reg (from_mode, from);
906 /* Get a copy of FROM widened to a word, if necessary. */
907 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
908 lowpart_mode = word_mode;
909 else
910 lowpart_mode = from_mode;
912 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
914 lowpart = gen_lowpart (lowpart_mode, to);
915 emit_move_insn (lowpart, lowfrom);
917 /* Compute the value to put in each remaining word. */
918 if (unsignedp)
919 fill_value = const0_rtx;
920 else
922 #ifdef HAVE_slt
923 if (HAVE_slt
924 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
925 && STORE_FLAG_VALUE == -1)
927 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
928 lowpart_mode, 0, 0);
929 fill_value = gen_reg_rtx (word_mode);
930 emit_insn (gen_slt (fill_value));
932 else
933 #endif
935 fill_value
936 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
937 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
938 NULL_RTX, 0);
939 fill_value = convert_to_mode (word_mode, fill_value, 1);
943 /* Fill the remaining words. */
944 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
946 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
947 rtx subword = operand_subword (to, index, 1, to_mode);
949 if (subword == 0)
950 abort ();
952 if (fill_value != subword)
953 emit_move_insn (subword, fill_value);
956 insns = get_insns ();
957 end_sequence ();
959 emit_no_conflict_block (insns, to, from, NULL_RTX,
960 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
961 return;
964 /* Truncating multi-word to a word or less. */
965 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
966 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
968 if (!((GET_CODE (from) == MEM
969 && ! MEM_VOLATILE_P (from)
970 && direct_load[(int) to_mode]
971 && ! mode_dependent_address_p (XEXP (from, 0)))
972 || GET_CODE (from) == REG
973 || GET_CODE (from) == SUBREG))
974 from = force_reg (from_mode, from);
975 convert_move (to, gen_lowpart (word_mode, from), 0);
976 return;
979 /* Handle pointer conversion */ /* SPEE 900220 */
980 if (to_mode == PSImode)
982 if (from_mode != SImode)
983 from = convert_to_mode (SImode, from, unsignedp);
985 #ifdef HAVE_truncsipsi2
986 if (HAVE_truncsipsi2)
988 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
989 return;
991 #endif /* HAVE_truncsipsi2 */
992 abort ();
995 if (from_mode == PSImode)
997 if (to_mode != SImode)
999 from = convert_to_mode (SImode, from, unsignedp);
1000 from_mode = SImode;
1002 else
1004 #ifdef HAVE_extendpsisi2
1005 if (HAVE_extendpsisi2)
1007 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1008 return;
1010 #endif /* HAVE_extendpsisi2 */
1011 abort ();
1015 if (to_mode == PDImode)
1017 if (from_mode != DImode)
1018 from = convert_to_mode (DImode, from, unsignedp);
1020 #ifdef HAVE_truncdipdi2
1021 if (HAVE_truncdipdi2)
1023 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1024 return;
1026 #endif /* HAVE_truncdipdi2 */
1027 abort ();
1030 if (from_mode == PDImode)
1032 if (to_mode != DImode)
1034 from = convert_to_mode (DImode, from, unsignedp);
1035 from_mode = DImode;
1037 else
1039 #ifdef HAVE_extendpdidi2
1040 if (HAVE_extendpdidi2)
1042 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1043 return;
1045 #endif /* HAVE_extendpdidi2 */
1046 abort ();
1050 /* Now follow all the conversions between integers
1051 no more than a word long. */
1053 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1054 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1055 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1056 GET_MODE_BITSIZE (from_mode)))
1058 if (!((GET_CODE (from) == MEM
1059 && ! MEM_VOLATILE_P (from)
1060 && direct_load[(int) to_mode]
1061 && ! mode_dependent_address_p (XEXP (from, 0)))
1062 || GET_CODE (from) == REG
1063 || GET_CODE (from) == SUBREG))
1064 from = force_reg (from_mode, from);
1065 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1066 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1067 from = copy_to_reg (from);
1068 emit_move_insn (to, gen_lowpart (to_mode, from));
1069 return;
1072 /* Handle extension. */
1073 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1075 /* Convert directly if that works. */
1076 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1077 != CODE_FOR_nothing)
1079 emit_unop_insn (code, to, from, equiv_code);
1080 return;
1082 else
1084 enum machine_mode intermediate;
1086 /* Search for a mode to convert via. */
1087 for (intermediate = from_mode; intermediate != VOIDmode;
1088 intermediate = GET_MODE_WIDER_MODE (intermediate))
1089 if (((can_extend_p (to_mode, intermediate, unsignedp)
1090 != CODE_FOR_nothing)
1091 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1092 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1093 && (can_extend_p (intermediate, from_mode, unsignedp)
1094 != CODE_FOR_nothing))
1096 convert_move (to, convert_to_mode (intermediate, from,
1097 unsignedp), unsignedp);
1098 return;
1101 /* No suitable intermediate mode. */
1102 abort ();
1106 /* Support special truncate insns for certain modes. */
1108 if (from_mode == DImode && to_mode == SImode)
1110 #ifdef HAVE_truncdisi2
1111 if (HAVE_truncdisi2)
1113 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1114 return;
1116 #endif
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1118 return;
1121 if (from_mode == DImode && to_mode == HImode)
1123 #ifdef HAVE_truncdihi2
1124 if (HAVE_truncdihi2)
1126 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1127 return;
1129 #endif
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 return;
1134 if (from_mode == DImode && to_mode == QImode)
1136 #ifdef HAVE_truncdiqi2
1137 if (HAVE_truncdiqi2)
1139 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1140 return;
1142 #endif
1143 convert_move (to, force_reg (from_mode, from), unsignedp);
1144 return;
1147 if (from_mode == SImode && to_mode == HImode)
1149 #ifdef HAVE_truncsihi2
1150 if (HAVE_truncsihi2)
1152 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1153 return;
1155 #endif
1156 convert_move (to, force_reg (from_mode, from), unsignedp);
1157 return;
1160 if (from_mode == SImode && to_mode == QImode)
1162 #ifdef HAVE_truncsiqi2
1163 if (HAVE_truncsiqi2)
1165 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1166 return;
1168 #endif
1169 convert_move (to, force_reg (from_mode, from), unsignedp);
1170 return;
1173 if (from_mode == HImode && to_mode == QImode)
1175 #ifdef HAVE_trunchiqi2
1176 if (HAVE_trunchiqi2)
1178 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1179 return;
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1186 if (from_mode == TImode && to_mode == DImode)
1188 #ifdef HAVE_trunctidi2
1189 if (HAVE_trunctidi2)
1191 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1192 return;
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1199 if (from_mode == TImode && to_mode == SImode)
1201 #ifdef HAVE_trunctisi2
1202 if (HAVE_trunctisi2)
1204 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1205 return;
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1212 if (from_mode == TImode && to_mode == HImode)
1214 #ifdef HAVE_trunctihi2
1215 if (HAVE_trunctihi2)
1217 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1218 return;
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1225 if (from_mode == TImode && to_mode == QImode)
1227 #ifdef HAVE_trunctiqi2
1228 if (HAVE_trunctiqi2)
1230 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1231 return;
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1238 /* Handle truncation of volatile memrefs, and so on;
1239 the things that couldn't be truncated directly,
1240 and for which there was no special instruction. */
1241 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1243 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1244 emit_move_insn (to, temp);
1245 return;
1248 /* Mode combination is not recognized. */
1249 abort ();
1252 /* Return an rtx for a value that would result
1253 from converting X to mode MODE.
1254 Both X and MODE may be floating, or both integer.
1255 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 This function *must not* call protect_from_queue
1260 except when putting X into an insn (in which case convert_move does it). */
1263 convert_to_mode (mode, x, unsignedp)
1264 enum machine_mode mode;
1265 rtx x;
1266 int unsignedp;
1268 return convert_modes (mode, VOIDmode, x, unsignedp);
1271 /* Return an rtx for a value that would result
1272 from converting X from mode OLDMODE to mode MODE.
1273 Both modes may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1276 This can be done by referring to a part of X in place
1277 or by copying to a new temporary with conversion.
1279 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1281 This function *must not* call protect_from_queue
1282 except when putting X into an insn (in which case convert_move does it). */
1285 convert_modes (mode, oldmode, x, unsignedp)
1286 enum machine_mode mode, oldmode;
1287 rtx x;
1288 int unsignedp;
1290 register rtx temp;
1292 /* If FROM is a SUBREG that indicates that we have already done at least
1293 the required extension, strip it. */
1295 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1296 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1297 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1298 x = gen_lowpart (mode, x);
1300 if (GET_MODE (x) != VOIDmode)
1301 oldmode = GET_MODE (x);
1303 if (mode == oldmode)
1304 return x;
1306 /* There is one case that we must handle specially: If we are converting
1307 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1308 we are to interpret the constant as unsigned, gen_lowpart will do
1309 the wrong if the constant appears negative. What we want to do is
1310 make the high-order word of the constant zero, not all ones. */
1312 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1313 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1314 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1316 HOST_WIDE_INT val = INTVAL (x);
1318 if (oldmode != VOIDmode
1319 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1321 int width = GET_MODE_BITSIZE (oldmode);
1323 /* We need to zero extend VAL. */
1324 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1327 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1330 /* We can do this with a gen_lowpart if both desired and current modes
1331 are integer, and this is either a constant integer, a register, or a
1332 non-volatile MEM. Except for the constant case where MODE is no
1333 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1335 if ((GET_CODE (x) == CONST_INT
1336 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1337 || (GET_MODE_CLASS (mode) == MODE_INT
1338 && GET_MODE_CLASS (oldmode) == MODE_INT
1339 && (GET_CODE (x) == CONST_DOUBLE
1340 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1341 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1342 && direct_load[(int) mode])
1343 || (GET_CODE (x) == REG
1344 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1345 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1347 /* ?? If we don't know OLDMODE, we have to assume here that
1348 X does not need sign- or zero-extension. This may not be
1349 the case, but it's the best we can do. */
1350 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1351 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1353 HOST_WIDE_INT val = INTVAL (x);
1354 int width = GET_MODE_BITSIZE (oldmode);
1356 /* We must sign or zero-extend in this case. Start by
1357 zero-extending, then sign extend if we need to. */
1358 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1359 if (! unsignedp
1360 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1361 val |= (HOST_WIDE_INT) (-1) << width;
1363 return GEN_INT (val);
1366 return gen_lowpart (mode, x);
1369 temp = gen_reg_rtx (mode);
1370 convert_move (temp, x, unsignedp);
1371 return temp;
1374 /* Generate several move instructions to copy LEN bytes
1375 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1376 The caller must pass FROM and TO
1377 through protect_from_queue before calling.
1378 ALIGN (in bytes) is maximum alignment we can assume. */
1380 static void
1381 move_by_pieces (to, from, len, align)
1382 rtx to, from;
1383 int len, align;
1385 struct move_by_pieces data;
1386 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1387 int max_size = MOVE_MAX + 1;
1389 data.offset = 0;
1390 data.to_addr = to_addr;
1391 data.from_addr = from_addr;
1392 data.to = to;
1393 data.from = from;
1394 data.autinc_to
1395 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1396 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1397 data.autinc_from
1398 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1399 || GET_CODE (from_addr) == POST_INC
1400 || GET_CODE (from_addr) == POST_DEC);
1402 data.explicit_inc_from = 0;
1403 data.explicit_inc_to = 0;
1404 data.reverse
1405 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1406 if (data.reverse) data.offset = len;
1407 data.len = len;
1409 data.to_struct = MEM_IN_STRUCT_P (to);
1410 data.from_struct = MEM_IN_STRUCT_P (from);
1412 /* If copying requires more than two move insns,
1413 copy addresses to registers (to make displacements shorter)
1414 and use post-increment if available. */
1415 if (!(data.autinc_from && data.autinc_to)
1416 && move_by_pieces_ninsns (len, align) > 2)
1418 #ifdef HAVE_PRE_DECREMENT
1419 if (data.reverse && ! data.autinc_from)
1421 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1422 data.autinc_from = 1;
1423 data.explicit_inc_from = -1;
1425 #endif
1426 #ifdef HAVE_POST_INCREMENT
1427 if (! data.autinc_from)
1429 data.from_addr = copy_addr_to_reg (from_addr);
1430 data.autinc_from = 1;
1431 data.explicit_inc_from = 1;
1433 #endif
1434 if (!data.autinc_from && CONSTANT_P (from_addr))
1435 data.from_addr = copy_addr_to_reg (from_addr);
1436 #ifdef HAVE_PRE_DECREMENT
1437 if (data.reverse && ! data.autinc_to)
1439 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1440 data.autinc_to = 1;
1441 data.explicit_inc_to = -1;
1443 #endif
1444 #ifdef HAVE_POST_INCREMENT
1445 if (! data.reverse && ! data.autinc_to)
1447 data.to_addr = copy_addr_to_reg (to_addr);
1448 data.autinc_to = 1;
1449 data.explicit_inc_to = 1;
1451 #endif
1452 if (!data.autinc_to && CONSTANT_P (to_addr))
1453 data.to_addr = copy_addr_to_reg (to_addr);
1456 if (! SLOW_UNALIGNED_ACCESS
1457 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1458 align = MOVE_MAX;
1460 /* First move what we can in the largest integer mode, then go to
1461 successively smaller modes. */
1463 while (max_size > 1)
1465 enum machine_mode mode = VOIDmode, tmode;
1466 enum insn_code icode;
1468 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1469 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1470 if (GET_MODE_SIZE (tmode) < max_size)
1471 mode = tmode;
1473 if (mode == VOIDmode)
1474 break;
1476 icode = mov_optab->handlers[(int) mode].insn_code;
1477 if (icode != CODE_FOR_nothing
1478 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1479 GET_MODE_SIZE (mode)))
1480 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1482 max_size = GET_MODE_SIZE (mode);
1485 /* The code above should have handled everything. */
1486 if (data.len > 0)
1487 abort ();
1490 /* Return number of insns required to move L bytes by pieces.
1491 ALIGN (in bytes) is maximum alignment we can assume. */
1493 static int
1494 move_by_pieces_ninsns (l, align)
1495 unsigned int l;
1496 int align;
1498 register int n_insns = 0;
1499 int max_size = MOVE_MAX + 1;
1501 if (! SLOW_UNALIGNED_ACCESS
1502 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1503 align = MOVE_MAX;
1505 while (max_size > 1)
1507 enum machine_mode mode = VOIDmode, tmode;
1508 enum insn_code icode;
1510 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1511 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1512 if (GET_MODE_SIZE (tmode) < max_size)
1513 mode = tmode;
1515 if (mode == VOIDmode)
1516 break;
1518 icode = mov_optab->handlers[(int) mode].insn_code;
1519 if (icode != CODE_FOR_nothing
1520 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1521 GET_MODE_SIZE (mode)))
1522 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1524 max_size = GET_MODE_SIZE (mode);
1527 return n_insns;
1530 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1531 with move instructions for mode MODE. GENFUN is the gen_... function
1532 to make a move insn for that mode. DATA has all the other info. */
1534 static void
1535 move_by_pieces_1 (genfun, mode, data)
1536 rtx (*genfun) ();
1537 enum machine_mode mode;
1538 struct move_by_pieces *data;
1540 register int size = GET_MODE_SIZE (mode);
1541 register rtx to1, from1;
1543 while (data->len >= size)
1545 if (data->reverse) data->offset -= size;
1547 to1 = (data->autinc_to
1548 ? gen_rtx (MEM, mode, data->to_addr)
1549 : copy_rtx (change_address (data->to, mode,
1550 plus_constant (data->to_addr,
1551 data->offset))));
1552 MEM_IN_STRUCT_P (to1) = data->to_struct;
1554 from1
1555 = (data->autinc_from
1556 ? gen_rtx (MEM, mode, data->from_addr)
1557 : copy_rtx (change_address (data->from, mode,
1558 plus_constant (data->from_addr,
1559 data->offset))));
1560 MEM_IN_STRUCT_P (from1) = data->from_struct;
1562 #ifdef HAVE_PRE_DECREMENT
1563 if (data->explicit_inc_to < 0)
1564 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1565 if (data->explicit_inc_from < 0)
1566 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1567 #endif
1569 emit_insn ((*genfun) (to1, from1));
1570 #ifdef HAVE_POST_INCREMENT
1571 if (data->explicit_inc_to > 0)
1572 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1573 if (data->explicit_inc_from > 0)
1574 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1575 #endif
1577 if (! data->reverse) data->offset += size;
1579 data->len -= size;
1583 /* Emit code to move a block Y to a block X.
1584 This may be done with string-move instructions,
1585 with multiple scalar move instructions, or with a library call.
1587 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1588 with mode BLKmode.
1589 SIZE is an rtx that says how long they are.
1590 ALIGN is the maximum alignment we can assume they have,
1591 measured in bytes.
1593 Return the address of the new block, if memcpy is called and returns it,
1594 0 otherwise. */
1597 emit_block_move (x, y, size, align)
1598 rtx x, y;
1599 rtx size;
1600 int align;
1602 rtx retval = 0;
1604 if (GET_MODE (x) != BLKmode)
1605 abort ();
1607 if (GET_MODE (y) != BLKmode)
1608 abort ();
1610 x = protect_from_queue (x, 1);
1611 y = protect_from_queue (y, 0);
1612 size = protect_from_queue (size, 0);
1614 if (GET_CODE (x) != MEM)
1615 abort ();
1616 if (GET_CODE (y) != MEM)
1617 abort ();
1618 if (size == 0)
1619 abort ();
1621 if (GET_CODE (size) == CONST_INT
1622 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1623 move_by_pieces (x, y, INTVAL (size), align);
1624 else
1626 /* Try the most limited insn first, because there's no point
1627 including more than one in the machine description unless
1628 the more limited one has some advantage. */
1630 rtx opalign = GEN_INT (align);
1631 enum machine_mode mode;
1633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1634 mode = GET_MODE_WIDER_MODE (mode))
1636 enum insn_code code = movstr_optab[(int) mode];
1638 if (code != CODE_FOR_nothing
1639 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1640 here because if SIZE is less than the mode mask, as it is
1641 returned by the macro, it will definitely be less than the
1642 actual mode mask. */
1643 && ((GET_CODE (size) == CONST_INT
1644 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1645 <= GET_MODE_MASK (mode)))
1646 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1647 && (insn_operand_predicate[(int) code][0] == 0
1648 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1649 && (insn_operand_predicate[(int) code][1] == 0
1650 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1651 && (insn_operand_predicate[(int) code][3] == 0
1652 || (*insn_operand_predicate[(int) code][3]) (opalign,
1653 VOIDmode)))
1655 rtx op2;
1656 rtx last = get_last_insn ();
1657 rtx pat;
1659 op2 = convert_to_mode (mode, size, 1);
1660 if (insn_operand_predicate[(int) code][2] != 0
1661 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1662 op2 = copy_to_mode_reg (mode, op2);
1664 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1665 if (pat)
1667 emit_insn (pat);
1668 return 0;
1670 else
1671 delete_insns_since (last);
1675 #ifdef TARGET_MEM_FUNCTIONS
1676 retval
1677 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1678 ptr_mode, 3, XEXP (x, 0), Pmode,
1679 XEXP (y, 0), Pmode,
1680 convert_to_mode (TYPE_MODE (sizetype), size,
1681 TREE_UNSIGNED (sizetype)),
1682 TYPE_MODE (sizetype));
1683 #else
1684 emit_library_call (bcopy_libfunc, 0,
1685 VOIDmode, 3, XEXP (y, 0), Pmode,
1686 XEXP (x, 0), Pmode,
1687 convert_to_mode (TYPE_MODE (integer_type_node), size,
1688 TREE_UNSIGNED (integer_type_node)),
1689 TYPE_MODE (integer_type_node));
1690 #endif
1693 return retval;
1696 /* Copy all or part of a value X into registers starting at REGNO.
1697 The number of registers to be filled is NREGS. */
1699 void
1700 move_block_to_reg (regno, x, nregs, mode)
1701 int regno;
1702 rtx x;
1703 int nregs;
1704 enum machine_mode mode;
1706 int i;
1707 rtx pat, last;
1709 if (nregs == 0)
1710 return;
1712 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1713 x = validize_mem (force_const_mem (mode, x));
1715 /* See if the machine can do this with a load multiple insn. */
1716 #ifdef HAVE_load_multiple
1717 if (HAVE_load_multiple)
1719 last = get_last_insn ();
1720 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1721 GEN_INT (nregs));
1722 if (pat)
1724 emit_insn (pat);
1725 return;
1727 else
1728 delete_insns_since (last);
1730 #endif
1732 for (i = 0; i < nregs; i++)
1733 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1734 operand_subword_force (x, i, mode));
1737 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1738 The number of registers to be filled is NREGS. SIZE indicates the number
1739 of bytes in the object X. */
1742 void
1743 move_block_from_reg (regno, x, nregs, size)
1744 int regno;
1745 rtx x;
1746 int nregs;
1747 int size;
1749 int i;
1750 rtx pat, last;
1751 enum machine_mode mode;
1753 /* If SIZE is that of a mode no bigger than a word, just use that
1754 mode's store operation. */
1755 if (size <= UNITS_PER_WORD
1756 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1758 emit_move_insn (change_address (x, mode, NULL),
1759 gen_rtx (REG, mode, regno));
1760 return;
1763 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1764 to the left before storing to memory. Note that the previous test
1765 doesn't handle all cases (e.g. SIZE == 3). */
1766 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1768 rtx tem = operand_subword (x, 0, 1, BLKmode);
1769 rtx shift;
1771 if (tem == 0)
1772 abort ();
1774 shift = expand_shift (LSHIFT_EXPR, word_mode,
1775 gen_rtx (REG, word_mode, regno),
1776 build_int_2 ((UNITS_PER_WORD - size)
1777 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1778 emit_move_insn (tem, shift);
1779 return;
1782 /* See if the machine can do this with a store multiple insn. */
1783 #ifdef HAVE_store_multiple
1784 if (HAVE_store_multiple)
1786 last = get_last_insn ();
1787 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1788 GEN_INT (nregs));
1789 if (pat)
1791 emit_insn (pat);
1792 return;
1794 else
1795 delete_insns_since (last);
1797 #endif
1799 for (i = 0; i < nregs; i++)
1801 rtx tem = operand_subword (x, i, 1, BLKmode);
1803 if (tem == 0)
1804 abort ();
1806 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1810 /* Emit code to move a block Y to a block X, where X is non-consecutive
1811 registers represented by a PARALLEL. */
1813 void
1814 emit_group_load (x, y)
1815 rtx x, y;
1817 rtx target_reg, source;
1818 int i;
1820 if (GET_CODE (x) != PARALLEL)
1821 abort ();
1823 /* Check for a NULL entry, used to indicate that the parameter goes
1824 both on the stack and in registers. */
1825 if (XEXP (XVECEXP (x, 0, 0), 0))
1826 i = 0;
1827 else
1828 i = 1;
1830 for (; i < XVECLEN (x, 0); i++)
1832 rtx element = XVECEXP (x, 0, i);
1834 target_reg = XEXP (element, 0);
1836 if (GET_CODE (y) == MEM)
1837 source = change_address (y, GET_MODE (target_reg),
1838 plus_constant (XEXP (y, 0),
1839 INTVAL (XEXP (element, 1))));
1840 else if (XEXP (element, 1) == const0_rtx)
1842 if (GET_MODE (target_reg) == GET_MODE (y))
1843 source = y;
1844 /* Allow for the target_reg to be smaller than the input register
1845 to allow for AIX with 4 DF arguments after a single SI arg. The
1846 last DF argument will only load 1 word into the integer registers,
1847 but load a DF value into the float registers. */
1848 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1849 <= GET_MODE_SIZE (GET_MODE (y)))
1850 && GET_MODE (target_reg) == word_mode)
1851 /* This might be a const_double, so we can't just use SUBREG. */
1852 source = operand_subword (y, 0, 0, VOIDmode);
1853 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1854 == GET_MODE_SIZE (GET_MODE (y)))
1855 source = gen_lowpart (GET_MODE (target_reg), y);
1856 else
1857 abort ();
1859 else
1860 abort ();
1862 emit_move_insn (target_reg, source);
1866 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1867 registers represented by a PARALLEL. */
1869 void
1870 emit_group_store (x, y)
1871 rtx x, y;
1873 rtx source_reg, target;
1874 int i;
1876 if (GET_CODE (y) != PARALLEL)
1877 abort ();
1879 /* Check for a NULL entry, used to indicate that the parameter goes
1880 both on the stack and in registers. */
1881 if (XEXP (XVECEXP (y, 0, 0), 0))
1882 i = 0;
1883 else
1884 i = 1;
1886 for (; i < XVECLEN (y, 0); i++)
1888 rtx element = XVECEXP (y, 0, i);
1890 source_reg = XEXP (element, 0);
1892 if (GET_CODE (x) == MEM)
1893 target = change_address (x, GET_MODE (source_reg),
1894 plus_constant (XEXP (x, 0),
1895 INTVAL (XEXP (element, 1))));
1896 else if (XEXP (element, 1) == const0_rtx)
1898 target = x;
1899 if (GET_MODE (target) != GET_MODE (source_reg))
1900 target = gen_lowpart (GET_MODE (source_reg), target);
1902 else
1903 abort ();
1905 emit_move_insn (target, source_reg);
1909 /* Add a USE expression for REG to the (possibly empty) list pointed
1910 to by CALL_FUSAGE. REG must denote a hard register. */
1912 void
1913 use_reg (call_fusage, reg)
1914 rtx *call_fusage, reg;
1916 if (GET_CODE (reg) != REG
1917 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1918 abort();
1920 *call_fusage
1921 = gen_rtx (EXPR_LIST, VOIDmode,
1922 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1925 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1926 starting at REGNO. All of these registers must be hard registers. */
1928 void
1929 use_regs (call_fusage, regno, nregs)
1930 rtx *call_fusage;
1931 int regno;
1932 int nregs;
1934 int i;
1936 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1937 abort ();
1939 for (i = 0; i < nregs; i++)
1940 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1943 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1944 PARALLEL REGS. This is for calls that pass values in multiple
1945 non-contiguous locations. The Irix 6 ABI has examples of this. */
1947 void
1948 use_group_regs (call_fusage, regs)
1949 rtx *call_fusage;
1950 rtx regs;
1952 int i;
1954 for (i = 0; i < XVECLEN (regs, 0); i++)
1956 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1958 /* A NULL entry means the parameter goes both on the stack and in
1959 registers. This can also be a MEM for targets that pass values
1960 partially on the stack and partially in registers. */
1961 if (reg != 0 && GET_CODE (reg) == REG)
1962 use_reg (call_fusage, reg);
1966 /* Generate several move instructions to clear LEN bytes of block TO.
1967 (A MEM rtx with BLKmode). The caller must pass TO through
1968 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1969 we can assume. */
1971 static void
1972 clear_by_pieces (to, len, align)
1973 rtx to;
1974 int len, align;
1976 struct clear_by_pieces data;
1977 rtx to_addr = XEXP (to, 0);
1978 int max_size = MOVE_MAX + 1;
1980 data.offset = 0;
1981 data.to_addr = to_addr;
1982 data.to = to;
1983 data.autinc_to
1984 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1985 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1987 data.explicit_inc_to = 0;
1988 data.reverse
1989 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1990 if (data.reverse) data.offset = len;
1991 data.len = len;
1993 data.to_struct = MEM_IN_STRUCT_P (to);
1995 /* If copying requires more than two move insns,
1996 copy addresses to registers (to make displacements shorter)
1997 and use post-increment if available. */
1998 if (!data.autinc_to
1999 && move_by_pieces_ninsns (len, align) > 2)
2001 #ifdef HAVE_PRE_DECREMENT
2002 if (data.reverse && ! data.autinc_to)
2004 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2005 data.autinc_to = 1;
2006 data.explicit_inc_to = -1;
2008 #endif
2009 #ifdef HAVE_POST_INCREMENT
2010 if (! data.reverse && ! data.autinc_to)
2012 data.to_addr = copy_addr_to_reg (to_addr);
2013 data.autinc_to = 1;
2014 data.explicit_inc_to = 1;
2016 #endif
2017 if (!data.autinc_to && CONSTANT_P (to_addr))
2018 data.to_addr = copy_addr_to_reg (to_addr);
2021 if (! SLOW_UNALIGNED_ACCESS
2022 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2023 align = MOVE_MAX;
2025 /* First move what we can in the largest integer mode, then go to
2026 successively smaller modes. */
2028 while (max_size > 1)
2030 enum machine_mode mode = VOIDmode, tmode;
2031 enum insn_code icode;
2033 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2034 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2035 if (GET_MODE_SIZE (tmode) < max_size)
2036 mode = tmode;
2038 if (mode == VOIDmode)
2039 break;
2041 icode = mov_optab->handlers[(int) mode].insn_code;
2042 if (icode != CODE_FOR_nothing
2043 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2044 GET_MODE_SIZE (mode)))
2045 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2047 max_size = GET_MODE_SIZE (mode);
2050 /* The code above should have handled everything. */
2051 if (data.len != 0)
2052 abort ();
2055 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2056 with move instructions for mode MODE. GENFUN is the gen_... function
2057 to make a move insn for that mode. DATA has all the other info. */
2059 static void
2060 clear_by_pieces_1 (genfun, mode, data)
2061 rtx (*genfun) ();
2062 enum machine_mode mode;
2063 struct clear_by_pieces *data;
2065 register int size = GET_MODE_SIZE (mode);
2066 register rtx to1;
2068 while (data->len >= size)
2070 if (data->reverse) data->offset -= size;
2072 to1 = (data->autinc_to
2073 ? gen_rtx (MEM, mode, data->to_addr)
2074 : copy_rtx (change_address (data->to, mode,
2075 plus_constant (data->to_addr,
2076 data->offset))));
2077 MEM_IN_STRUCT_P (to1) = data->to_struct;
2079 #ifdef HAVE_PRE_DECREMENT
2080 if (data->explicit_inc_to < 0)
2081 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2082 #endif
2084 emit_insn ((*genfun) (to1, const0_rtx));
2085 #ifdef HAVE_POST_INCREMENT
2086 if (data->explicit_inc_to > 0)
2087 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2088 #endif
2090 if (! data->reverse) data->offset += size;
2092 data->len -= size;
2096 /* Write zeros through the storage of OBJECT.
2097 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2098 the maximum alignment we can is has, measured in bytes.
2100 If we call a function that returns the length of the block, return it. */
2103 clear_storage (object, size, align)
2104 rtx object;
2105 rtx size;
2106 int align;
2108 rtx retval = 0;
2110 if (GET_MODE (object) == BLKmode)
2112 object = protect_from_queue (object, 1);
2113 size = protect_from_queue (size, 0);
2115 if (GET_CODE (size) == CONST_INT
2116 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2117 clear_by_pieces (object, INTVAL (size), align);
2119 else
2121 /* Try the most limited insn first, because there's no point
2122 including more than one in the machine description unless
2123 the more limited one has some advantage. */
2125 rtx opalign = GEN_INT (align);
2126 enum machine_mode mode;
2128 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2129 mode = GET_MODE_WIDER_MODE (mode))
2131 enum insn_code code = clrstr_optab[(int) mode];
2133 if (code != CODE_FOR_nothing
2134 /* We don't need MODE to be narrower than
2135 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2136 the mode mask, as it is returned by the macro, it will
2137 definitely be less than the actual mode mask. */
2138 && ((GET_CODE (size) == CONST_INT
2139 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2140 <= GET_MODE_MASK (mode)))
2141 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2142 && (insn_operand_predicate[(int) code][0] == 0
2143 || (*insn_operand_predicate[(int) code][0]) (object,
2144 BLKmode))
2145 && (insn_operand_predicate[(int) code][2] == 0
2146 || (*insn_operand_predicate[(int) code][2]) (opalign,
2147 VOIDmode)))
2149 rtx op1;
2150 rtx last = get_last_insn ();
2151 rtx pat;
2153 op1 = convert_to_mode (mode, size, 1);
2154 if (insn_operand_predicate[(int) code][1] != 0
2155 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2156 mode))
2157 op1 = copy_to_mode_reg (mode, op1);
2159 pat = GEN_FCN ((int) code) (object, op1, opalign);
2160 if (pat)
2162 emit_insn (pat);
2163 return 0;
2165 else
2166 delete_insns_since (last);
2171 #ifdef TARGET_MEM_FUNCTIONS
2172 retval
2173 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2174 ptr_mode, 3,
2175 XEXP (object, 0), Pmode,
2176 const0_rtx,
2177 TYPE_MODE (integer_type_node),
2178 convert_to_mode
2179 (TYPE_MODE (sizetype), size,
2180 TREE_UNSIGNED (sizetype)),
2181 TYPE_MODE (sizetype));
2182 #else
2183 emit_library_call (bzero_libfunc, 0,
2184 VOIDmode, 2,
2185 XEXP (object, 0), Pmode,
2186 convert_to_mode
2187 (TYPE_MODE (integer_type_node), size,
2188 TREE_UNSIGNED (integer_type_node)),
2189 TYPE_MODE (integer_type_node));
2190 #endif
2193 else
2194 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2196 return retval;
2199 /* Generate code to copy Y into X.
2200 Both Y and X must have the same mode, except that
2201 Y can be a constant with VOIDmode.
2202 This mode cannot be BLKmode; use emit_block_move for that.
2204 Return the last instruction emitted. */
2207 emit_move_insn (x, y)
2208 rtx x, y;
2210 enum machine_mode mode = GET_MODE (x);
2212 x = protect_from_queue (x, 1);
2213 y = protect_from_queue (y, 0);
2215 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2216 abort ();
2218 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2219 y = force_const_mem (mode, y);
2221 /* If X or Y are memory references, verify that their addresses are valid
2222 for the machine. */
2223 if (GET_CODE (x) == MEM
2224 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2225 && ! push_operand (x, GET_MODE (x)))
2226 || (flag_force_addr
2227 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2228 x = change_address (x, VOIDmode, XEXP (x, 0));
2230 if (GET_CODE (y) == MEM
2231 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2232 || (flag_force_addr
2233 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2234 y = change_address (y, VOIDmode, XEXP (y, 0));
2236 if (mode == BLKmode)
2237 abort ();
2239 return emit_move_insn_1 (x, y);
2242 /* Low level part of emit_move_insn.
2243 Called just like emit_move_insn, but assumes X and Y
2244 are basically valid. */
2247 emit_move_insn_1 (x, y)
2248 rtx x, y;
2250 enum machine_mode mode = GET_MODE (x);
2251 enum machine_mode submode;
2252 enum mode_class class = GET_MODE_CLASS (mode);
2253 int i;
2255 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2256 return
2257 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2259 /* Expand complex moves by moving real part and imag part, if possible. */
2260 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2261 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2262 * BITS_PER_UNIT),
2263 (class == MODE_COMPLEX_INT
2264 ? MODE_INT : MODE_FLOAT),
2266 && (mov_optab->handlers[(int) submode].insn_code
2267 != CODE_FOR_nothing))
2269 /* Don't split destination if it is a stack push. */
2270 int stack = push_operand (x, GET_MODE (x));
2271 rtx insns;
2273 /* If this is a stack, push the highpart first, so it
2274 will be in the argument order.
2276 In that case, change_address is used only to convert
2277 the mode, not to change the address. */
2278 if (stack)
2280 /* Note that the real part always precedes the imag part in memory
2281 regardless of machine's endianness. */
2282 #ifdef STACK_GROWS_DOWNWARD
2283 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2284 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2285 gen_imagpart (submode, y)));
2286 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2287 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2288 gen_realpart (submode, y)));
2289 #else
2290 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2291 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2292 gen_realpart (submode, y)));
2293 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2294 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2295 gen_imagpart (submode, y)));
2296 #endif
2298 else
2300 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2301 (gen_realpart (submode, x), gen_realpart (submode, y)));
2302 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2303 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2306 return get_last_insn ();
2309 /* This will handle any multi-word mode that lacks a move_insn pattern.
2310 However, you will get better code if you define such patterns,
2311 even if they must turn into multiple assembler instructions. */
2312 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2314 rtx last_insn = 0;
2315 rtx insns;
2317 #ifdef PUSH_ROUNDING
2319 /* If X is a push on the stack, do the push now and replace
2320 X with a reference to the stack pointer. */
2321 if (push_operand (x, GET_MODE (x)))
2323 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2324 x = change_address (x, VOIDmode, stack_pointer_rtx);
2326 #endif
2328 /* Show the output dies here. */
2329 if (x != y)
2330 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2332 for (i = 0;
2333 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2334 i++)
2336 rtx xpart = operand_subword (x, i, 1, mode);
2337 rtx ypart = operand_subword (y, i, 1, mode);
2339 /* If we can't get a part of Y, put Y into memory if it is a
2340 constant. Otherwise, force it into a register. If we still
2341 can't get a part of Y, abort. */
2342 if (ypart == 0 && CONSTANT_P (y))
2344 y = force_const_mem (mode, y);
2345 ypart = operand_subword (y, i, 1, mode);
2347 else if (ypart == 0)
2348 ypart = operand_subword_force (y, i, mode);
2350 if (xpart == 0 || ypart == 0)
2351 abort ();
2353 last_insn = emit_move_insn (xpart, ypart);
2356 return last_insn;
2358 else
2359 abort ();
2362 /* Pushing data onto the stack. */
2364 /* Push a block of length SIZE (perhaps variable)
2365 and return an rtx to address the beginning of the block.
2366 Note that it is not possible for the value returned to be a QUEUED.
2367 The value may be virtual_outgoing_args_rtx.
2369 EXTRA is the number of bytes of padding to push in addition to SIZE.
2370 BELOW nonzero means this padding comes at low addresses;
2371 otherwise, the padding comes at high addresses. */
2374 push_block (size, extra, below)
2375 rtx size;
2376 int extra, below;
2378 register rtx temp;
2380 size = convert_modes (Pmode, ptr_mode, size, 1);
2381 if (CONSTANT_P (size))
2382 anti_adjust_stack (plus_constant (size, extra));
2383 else if (GET_CODE (size) == REG && extra == 0)
2384 anti_adjust_stack (size);
2385 else
2387 rtx temp = copy_to_mode_reg (Pmode, size);
2388 if (extra != 0)
2389 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2390 temp, 0, OPTAB_LIB_WIDEN);
2391 anti_adjust_stack (temp);
2394 #ifdef STACK_GROWS_DOWNWARD
2395 temp = virtual_outgoing_args_rtx;
2396 if (extra != 0 && below)
2397 temp = plus_constant (temp, extra);
2398 #else
2399 if (GET_CODE (size) == CONST_INT)
2400 temp = plus_constant (virtual_outgoing_args_rtx,
2401 - INTVAL (size) - (below ? 0 : extra));
2402 else if (extra != 0 && !below)
2403 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2404 negate_rtx (Pmode, plus_constant (size, extra)));
2405 else
2406 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2407 negate_rtx (Pmode, size));
2408 #endif
2410 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2414 gen_push_operand ()
2416 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2419 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2420 block of SIZE bytes. */
2422 static rtx
2423 get_push_address (size)
2424 int size;
2426 register rtx temp;
2428 if (STACK_PUSH_CODE == POST_DEC)
2429 temp = gen_rtx (PLUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2430 else if (STACK_PUSH_CODE == POST_INC)
2431 temp = gen_rtx (MINUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2432 else
2433 temp = stack_pointer_rtx;
2435 return force_operand (temp, NULL_RTX);
2438 /* Generate code to push X onto the stack, assuming it has mode MODE and
2439 type TYPE.
2440 MODE is redundant except when X is a CONST_INT (since they don't
2441 carry mode info).
2442 SIZE is an rtx for the size of data to be copied (in bytes),
2443 needed only if X is BLKmode.
2445 ALIGN (in bytes) is maximum alignment we can assume.
2447 If PARTIAL and REG are both nonzero, then copy that many of the first
2448 words of X into registers starting with REG, and push the rest of X.
2449 The amount of space pushed is decreased by PARTIAL words,
2450 rounded *down* to a multiple of PARM_BOUNDARY.
2451 REG must be a hard register in this case.
2452 If REG is zero but PARTIAL is not, take any all others actions for an
2453 argument partially in registers, but do not actually load any
2454 registers.
2456 EXTRA is the amount in bytes of extra space to leave next to this arg.
2457 This is ignored if an argument block has already been allocated.
2459 On a machine that lacks real push insns, ARGS_ADDR is the address of
2460 the bottom of the argument block for this call. We use indexing off there
2461 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2462 argument block has not been preallocated.
2464 ARGS_SO_FAR is the size of args previously pushed for this call. */
2466 void
2467 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2468 args_addr, args_so_far)
2469 register rtx x;
2470 enum machine_mode mode;
2471 tree type;
2472 rtx size;
2473 int align;
2474 int partial;
2475 rtx reg;
2476 int extra;
2477 rtx args_addr;
2478 rtx args_so_far;
2480 rtx xinner;
2481 enum direction stack_direction
2482 #ifdef STACK_GROWS_DOWNWARD
2483 = downward;
2484 #else
2485 = upward;
2486 #endif
2488 /* Decide where to pad the argument: `downward' for below,
2489 `upward' for above, or `none' for don't pad it.
2490 Default is below for small data on big-endian machines; else above. */
2491 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2493 /* Invert direction if stack is post-update. */
2494 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2495 if (where_pad != none)
2496 where_pad = (where_pad == downward ? upward : downward);
2498 xinner = x = protect_from_queue (x, 0);
2500 if (mode == BLKmode)
2502 /* Copy a block into the stack, entirely or partially. */
2504 register rtx temp;
2505 int used = partial * UNITS_PER_WORD;
2506 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2507 int skip;
2509 if (size == 0)
2510 abort ();
2512 used -= offset;
2514 /* USED is now the # of bytes we need not copy to the stack
2515 because registers will take care of them. */
2517 if (partial != 0)
2518 xinner = change_address (xinner, BLKmode,
2519 plus_constant (XEXP (xinner, 0), used));
2521 /* If the partial register-part of the arg counts in its stack size,
2522 skip the part of stack space corresponding to the registers.
2523 Otherwise, start copying to the beginning of the stack space,
2524 by setting SKIP to 0. */
2525 #ifndef REG_PARM_STACK_SPACE
2526 skip = 0;
2527 #else
2528 skip = used;
2529 #endif
2531 #ifdef PUSH_ROUNDING
2532 /* Do it with several push insns if that doesn't take lots of insns
2533 and if there is no difficulty with push insns that skip bytes
2534 on the stack for alignment purposes. */
2535 if (args_addr == 0
2536 && GET_CODE (size) == CONST_INT
2537 && skip == 0
2538 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2539 < MOVE_RATIO)
2540 /* Here we avoid the case of a structure whose weak alignment
2541 forces many pushes of a small amount of data,
2542 and such small pushes do rounding that causes trouble. */
2543 && ((! SLOW_UNALIGNED_ACCESS)
2544 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2545 || PUSH_ROUNDING (align) == align)
2546 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2548 /* Push padding now if padding above and stack grows down,
2549 or if padding below and stack grows up.
2550 But if space already allocated, this has already been done. */
2551 if (extra && args_addr == 0
2552 && where_pad != none && where_pad != stack_direction)
2553 anti_adjust_stack (GEN_INT (extra));
2555 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2556 INTVAL (size) - used, align);
2558 if (flag_check_memory_usage && ! in_check_memory_usage)
2560 rtx temp;
2562 in_check_memory_usage = 1;
2563 temp = get_push_address (INTVAL(size) - used);
2564 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2565 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2566 temp, ptr_mode,
2567 XEXP (xinner, 0), ptr_mode,
2568 GEN_INT (INTVAL(size) - used),
2569 TYPE_MODE (sizetype));
2570 else
2571 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2572 temp, ptr_mode,
2573 GEN_INT (INTVAL(size) - used),
2574 TYPE_MODE (sizetype),
2575 GEN_INT (MEMORY_USE_RW),
2576 TYPE_MODE (integer_type_node));
2577 in_check_memory_usage = 0;
2580 else
2581 #endif /* PUSH_ROUNDING */
2583 /* Otherwise make space on the stack and copy the data
2584 to the address of that space. */
2586 /* Deduct words put into registers from the size we must copy. */
2587 if (partial != 0)
2589 if (GET_CODE (size) == CONST_INT)
2590 size = GEN_INT (INTVAL (size) - used);
2591 else
2592 size = expand_binop (GET_MODE (size), sub_optab, size,
2593 GEN_INT (used), NULL_RTX, 0,
2594 OPTAB_LIB_WIDEN);
2597 /* Get the address of the stack space.
2598 In this case, we do not deal with EXTRA separately.
2599 A single stack adjust will do. */
2600 if (! args_addr)
2602 temp = push_block (size, extra, where_pad == downward);
2603 extra = 0;
2605 else if (GET_CODE (args_so_far) == CONST_INT)
2606 temp = memory_address (BLKmode,
2607 plus_constant (args_addr,
2608 skip + INTVAL (args_so_far)));
2609 else
2610 temp = memory_address (BLKmode,
2611 plus_constant (gen_rtx (PLUS, Pmode,
2612 args_addr, args_so_far),
2613 skip));
2614 if (flag_check_memory_usage && ! in_check_memory_usage)
2616 rtx target;
2618 in_check_memory_usage = 1;
2619 target = copy_to_reg (temp);
2620 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2621 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2622 target, ptr_mode,
2623 XEXP (xinner, 0), ptr_mode,
2624 size, TYPE_MODE (sizetype));
2625 else
2626 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2627 target, ptr_mode,
2628 size, TYPE_MODE (sizetype),
2629 GEN_INT (MEMORY_USE_RW),
2630 TYPE_MODE (integer_type_node));
2631 in_check_memory_usage = 0;
2634 /* TEMP is the address of the block. Copy the data there. */
2635 if (GET_CODE (size) == CONST_INT
2636 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2637 < MOVE_RATIO))
2639 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2640 INTVAL (size), align);
2641 goto ret;
2643 /* Try the most limited insn first, because there's no point
2644 including more than one in the machine description unless
2645 the more limited one has some advantage. */
2646 #ifdef HAVE_movstrqi
2647 if (HAVE_movstrqi
2648 && GET_CODE (size) == CONST_INT
2649 && ((unsigned) INTVAL (size)
2650 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2652 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2653 xinner, size, GEN_INT (align));
2654 if (pat != 0)
2656 emit_insn (pat);
2657 goto ret;
2660 #endif
2661 #ifdef HAVE_movstrhi
2662 if (HAVE_movstrhi
2663 && GET_CODE (size) == CONST_INT
2664 && ((unsigned) INTVAL (size)
2665 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2667 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2668 xinner, size, GEN_INT (align));
2669 if (pat != 0)
2671 emit_insn (pat);
2672 goto ret;
2675 #endif
2676 #ifdef HAVE_movstrsi
2677 if (HAVE_movstrsi)
2679 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2680 xinner, size, GEN_INT (align));
2681 if (pat != 0)
2683 emit_insn (pat);
2684 goto ret;
2687 #endif
2688 #ifdef HAVE_movstrdi
2689 if (HAVE_movstrdi)
2691 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2692 xinner, size, GEN_INT (align));
2693 if (pat != 0)
2695 emit_insn (pat);
2696 goto ret;
2699 #endif
2701 #ifndef ACCUMULATE_OUTGOING_ARGS
2702 /* If the source is referenced relative to the stack pointer,
2703 copy it to another register to stabilize it. We do not need
2704 to do this if we know that we won't be changing sp. */
2706 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2707 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2708 temp = copy_to_reg (temp);
2709 #endif
2711 /* Make inhibit_defer_pop nonzero around the library call
2712 to force it to pop the bcopy-arguments right away. */
2713 NO_DEFER_POP;
2714 #ifdef TARGET_MEM_FUNCTIONS
2715 emit_library_call (memcpy_libfunc, 0,
2716 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2717 convert_to_mode (TYPE_MODE (sizetype),
2718 size, TREE_UNSIGNED (sizetype)),
2719 TYPE_MODE (sizetype));
2720 #else
2721 emit_library_call (bcopy_libfunc, 0,
2722 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2723 convert_to_mode (TYPE_MODE (integer_type_node),
2724 size,
2725 TREE_UNSIGNED (integer_type_node)),
2726 TYPE_MODE (integer_type_node));
2727 #endif
2728 OK_DEFER_POP;
2731 else if (partial > 0)
2733 /* Scalar partly in registers. */
2735 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2736 int i;
2737 int not_stack;
2738 /* # words of start of argument
2739 that we must make space for but need not store. */
2740 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2741 int args_offset = INTVAL (args_so_far);
2742 int skip;
2744 /* Push padding now if padding above and stack grows down,
2745 or if padding below and stack grows up.
2746 But if space already allocated, this has already been done. */
2747 if (extra && args_addr == 0
2748 && where_pad != none && where_pad != stack_direction)
2749 anti_adjust_stack (GEN_INT (extra));
2751 /* If we make space by pushing it, we might as well push
2752 the real data. Otherwise, we can leave OFFSET nonzero
2753 and leave the space uninitialized. */
2754 if (args_addr == 0)
2755 offset = 0;
2757 /* Now NOT_STACK gets the number of words that we don't need to
2758 allocate on the stack. */
2759 not_stack = partial - offset;
2761 /* If the partial register-part of the arg counts in its stack size,
2762 skip the part of stack space corresponding to the registers.
2763 Otherwise, start copying to the beginning of the stack space,
2764 by setting SKIP to 0. */
2765 #ifndef REG_PARM_STACK_SPACE
2766 skip = 0;
2767 #else
2768 skip = not_stack;
2769 #endif
2771 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2772 x = validize_mem (force_const_mem (mode, x));
2774 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2775 SUBREGs of such registers are not allowed. */
2776 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2777 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2778 x = copy_to_reg (x);
2780 /* Loop over all the words allocated on the stack for this arg. */
2781 /* We can do it by words, because any scalar bigger than a word
2782 has a size a multiple of a word. */
2783 #ifndef PUSH_ARGS_REVERSED
2784 for (i = not_stack; i < size; i++)
2785 #else
2786 for (i = size - 1; i >= not_stack; i--)
2787 #endif
2788 if (i >= not_stack + offset)
2789 emit_push_insn (operand_subword_force (x, i, mode),
2790 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2791 0, args_addr,
2792 GEN_INT (args_offset + ((i - not_stack + skip)
2793 * UNITS_PER_WORD)));
2795 else
2797 rtx addr;
2798 rtx target = NULL_RTX;
2800 /* Push padding now if padding above and stack grows down,
2801 or if padding below and stack grows up.
2802 But if space already allocated, this has already been done. */
2803 if (extra && args_addr == 0
2804 && where_pad != none && where_pad != stack_direction)
2805 anti_adjust_stack (GEN_INT (extra));
2807 #ifdef PUSH_ROUNDING
2808 if (args_addr == 0)
2809 addr = gen_push_operand ();
2810 else
2811 #endif
2813 if (GET_CODE (args_so_far) == CONST_INT)
2814 addr
2815 = memory_address (mode,
2816 plus_constant (args_addr,
2817 INTVAL (args_so_far)));
2818 else
2819 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2820 args_so_far));
2821 target = addr;
2824 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2826 if (flag_check_memory_usage && ! in_check_memory_usage)
2828 in_check_memory_usage = 1;
2829 if (target == 0)
2830 target = get_push_address (GET_MODE_SIZE (mode));
2832 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2833 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2834 target, ptr_mode,
2835 XEXP (x, 0), ptr_mode,
2836 GEN_INT (GET_MODE_SIZE (mode)),
2837 TYPE_MODE (sizetype));
2838 else
2839 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2840 target, ptr_mode,
2841 GEN_INT (GET_MODE_SIZE (mode)),
2842 TYPE_MODE (sizetype),
2843 GEN_INT (MEMORY_USE_RW),
2844 TYPE_MODE (integer_type_node));
2845 in_check_memory_usage = 0;
2849 ret:
2850 /* If part should go in registers, copy that part
2851 into the appropriate registers. Do this now, at the end,
2852 since mem-to-mem copies above may do function calls. */
2853 if (partial > 0 && reg != 0)
2855 /* Handle calls that pass values in multiple non-contiguous locations.
2856 The Irix 6 ABI has examples of this. */
2857 if (GET_CODE (reg) == PARALLEL)
2858 emit_group_load (reg, x);
2859 else
2860 move_block_to_reg (REGNO (reg), x, partial, mode);
2863 if (extra && args_addr == 0 && where_pad == stack_direction)
2864 anti_adjust_stack (GEN_INT (extra));
2867 /* Expand an assignment that stores the value of FROM into TO.
2868 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2869 (This may contain a QUEUED rtx;
2870 if the value is constant, this rtx is a constant.)
2871 Otherwise, the returned value is NULL_RTX.
2873 SUGGEST_REG is no longer actually used.
2874 It used to mean, copy the value through a register
2875 and return that register, if that is possible.
2876 We now use WANT_VALUE to decide whether to do this. */
2879 expand_assignment (to, from, want_value, suggest_reg)
2880 tree to, from;
2881 int want_value;
2882 int suggest_reg;
2884 register rtx to_rtx = 0;
2885 rtx result;
2887 /* Don't crash if the lhs of the assignment was erroneous. */
2889 if (TREE_CODE (to) == ERROR_MARK)
2891 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2892 return want_value ? result : NULL_RTX;
2895 if (output_bytecode)
2897 tree dest_innermost;
2899 bc_expand_expr (from);
2900 bc_emit_instruction (duplicate);
2902 dest_innermost = bc_expand_address (to);
2904 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2905 take care of it here. */
2907 bc_store_memory (TREE_TYPE (to), dest_innermost);
2908 return NULL;
2911 /* Assignment of a structure component needs special treatment
2912 if the structure component's rtx is not simply a MEM.
2913 Assignment of an array element at a constant index, and assignment of
2914 an array element in an unaligned packed structure field, has the same
2915 problem. */
2917 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2918 || TREE_CODE (to) == ARRAY_REF)
2920 enum machine_mode mode1;
2921 int bitsize;
2922 int bitpos;
2923 tree offset;
2924 int unsignedp;
2925 int volatilep = 0;
2926 tree tem;
2927 int alignment;
2929 push_temp_slots ();
2930 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2931 &unsignedp, &volatilep, &alignment);
2933 /* If we are going to use store_bit_field and extract_bit_field,
2934 make sure to_rtx will be safe for multiple use. */
2936 if (mode1 == VOIDmode && want_value)
2937 tem = stabilize_reference (tem);
2939 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2940 if (offset != 0)
2942 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2944 if (GET_CODE (to_rtx) != MEM)
2945 abort ();
2946 to_rtx = change_address (to_rtx, VOIDmode,
2947 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2948 force_reg (ptr_mode, offset_rtx)));
2950 if (volatilep)
2952 if (GET_CODE (to_rtx) == MEM)
2954 /* When the offset is zero, to_rtx is the address of the
2955 structure we are storing into, and hence may be shared.
2956 We must make a new MEM before setting the volatile bit. */
2957 if (offset == 0)
2958 to_rtx = copy_rtx (to_rtx);
2960 MEM_VOLATILE_P (to_rtx) = 1;
2962 #if 0 /* This was turned off because, when a field is volatile
2963 in an object which is not volatile, the object may be in a register,
2964 and then we would abort over here. */
2965 else
2966 abort ();
2967 #endif
2970 if (TREE_CODE (to) == COMPONENT_REF
2971 && TREE_READONLY (TREE_OPERAND (to, 1)))
2973 if (offset = 0)
2974 to_rtx = copy_rtx (to_rtx);
2976 RTX_UNCHANGING_P (to_rtx) = 1;
2979 /* Check the access. */
2980 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2982 rtx to_addr;
2983 int size;
2984 int best_mode_size;
2985 enum machine_mode best_mode;
2987 best_mode = get_best_mode (bitsize, bitpos,
2988 TYPE_ALIGN (TREE_TYPE (tem)),
2989 mode1, volatilep);
2990 if (best_mode == VOIDmode)
2991 best_mode = QImode;
2993 best_mode_size = GET_MODE_BITSIZE (best_mode);
2994 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2995 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2996 size *= GET_MODE_SIZE (best_mode);
2998 /* Check the access right of the pointer. */
2999 if (size)
3000 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3001 to_addr, ptr_mode,
3002 GEN_INT (size), TYPE_MODE (sizetype),
3003 GEN_INT (MEMORY_USE_WO),
3004 TYPE_MODE (integer_type_node));
3007 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3008 (want_value
3009 /* Spurious cast makes HPUX compiler happy. */
3010 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3011 : VOIDmode),
3012 unsignedp,
3013 /* Required alignment of containing datum. */
3014 alignment,
3015 int_size_in_bytes (TREE_TYPE (tem)));
3016 preserve_temp_slots (result);
3017 free_temp_slots ();
3018 pop_temp_slots ();
3020 /* If the value is meaningful, convert RESULT to the proper mode.
3021 Otherwise, return nothing. */
3022 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3023 TYPE_MODE (TREE_TYPE (from)),
3024 result,
3025 TREE_UNSIGNED (TREE_TYPE (to)))
3026 : NULL_RTX);
3029 /* If the rhs is a function call and its value is not an aggregate,
3030 call the function before we start to compute the lhs.
3031 This is needed for correct code for cases such as
3032 val = setjmp (buf) on machines where reference to val
3033 requires loading up part of an address in a separate insn.
3035 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3036 a promoted variable where the zero- or sign- extension needs to be done.
3037 Handling this in the normal way is safe because no computation is done
3038 before the call. */
3039 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3040 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3041 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3043 rtx value;
3045 push_temp_slots ();
3046 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3047 if (to_rtx == 0)
3048 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3050 /* Handle calls that return values in multiple non-contiguous locations.
3051 The Irix 6 ABI has examples of this. */
3052 if (GET_CODE (to_rtx) == PARALLEL)
3053 emit_group_load (to_rtx, value);
3054 else if (GET_MODE (to_rtx) == BLKmode)
3055 emit_block_move (to_rtx, value, expr_size (from),
3056 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3057 else
3058 emit_move_insn (to_rtx, value);
3059 preserve_temp_slots (to_rtx);
3060 free_temp_slots ();
3061 pop_temp_slots ();
3062 return want_value ? to_rtx : NULL_RTX;
3065 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3066 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3068 if (to_rtx == 0)
3069 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3071 /* Don't move directly into a return register. */
3072 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3074 rtx temp;
3076 push_temp_slots ();
3077 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3078 emit_move_insn (to_rtx, temp);
3079 preserve_temp_slots (to_rtx);
3080 free_temp_slots ();
3081 pop_temp_slots ();
3082 return want_value ? to_rtx : NULL_RTX;
3085 /* In case we are returning the contents of an object which overlaps
3086 the place the value is being stored, use a safe function when copying
3087 a value through a pointer into a structure value return block. */
3088 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3089 && current_function_returns_struct
3090 && !current_function_returns_pcc_struct)
3092 rtx from_rtx, size;
3094 push_temp_slots ();
3095 size = expr_size (from);
3096 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3097 EXPAND_MEMORY_USE_DONT);
3099 /* Copy the rights of the bitmap. */
3100 if (flag_check_memory_usage)
3101 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3102 XEXP (to_rtx, 0), ptr_mode,
3103 XEXP (from_rtx, 0), ptr_mode,
3104 convert_to_mode (TYPE_MODE (sizetype),
3105 size, TREE_UNSIGNED (sizetype)),
3106 TYPE_MODE (sizetype));
3108 #ifdef TARGET_MEM_FUNCTIONS
3109 emit_library_call (memcpy_libfunc, 0,
3110 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3111 XEXP (from_rtx, 0), Pmode,
3112 convert_to_mode (TYPE_MODE (sizetype),
3113 size, TREE_UNSIGNED (sizetype)),
3114 TYPE_MODE (sizetype));
3115 #else
3116 emit_library_call (bcopy_libfunc, 0,
3117 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3118 XEXP (to_rtx, 0), Pmode,
3119 convert_to_mode (TYPE_MODE (integer_type_node),
3120 size, TREE_UNSIGNED (integer_type_node)),
3121 TYPE_MODE (integer_type_node));
3122 #endif
3124 preserve_temp_slots (to_rtx);
3125 free_temp_slots ();
3126 pop_temp_slots ();
3127 return want_value ? to_rtx : NULL_RTX;
3130 /* Compute FROM and store the value in the rtx we got. */
3132 push_temp_slots ();
3133 result = store_expr (from, to_rtx, want_value);
3134 preserve_temp_slots (result);
3135 free_temp_slots ();
3136 pop_temp_slots ();
3137 return want_value ? result : NULL_RTX;
3140 /* Generate code for computing expression EXP,
3141 and storing the value into TARGET.
3142 TARGET may contain a QUEUED rtx.
3144 If WANT_VALUE is nonzero, return a copy of the value
3145 not in TARGET, so that we can be sure to use the proper
3146 value in a containing expression even if TARGET has something
3147 else stored in it. If possible, we copy the value through a pseudo
3148 and return that pseudo. Or, if the value is constant, we try to
3149 return the constant. In some cases, we return a pseudo
3150 copied *from* TARGET.
3152 If the mode is BLKmode then we may return TARGET itself.
3153 It turns out that in BLKmode it doesn't cause a problem.
3154 because C has no operators that could combine two different
3155 assignments into the same BLKmode object with different values
3156 with no sequence point. Will other languages need this to
3157 be more thorough?
3159 If WANT_VALUE is 0, we return NULL, to make sure
3160 to catch quickly any cases where the caller uses the value
3161 and fails to set WANT_VALUE. */
3164 store_expr (exp, target, want_value)
3165 register tree exp;
3166 register rtx target;
3167 int want_value;
3169 register rtx temp;
3170 int dont_return_target = 0;
3172 if (TREE_CODE (exp) == COMPOUND_EXPR)
3174 /* Perform first part of compound expression, then assign from second
3175 part. */
3176 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3177 emit_queue ();
3178 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3180 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3182 /* For conditional expression, get safe form of the target. Then
3183 test the condition, doing the appropriate assignment on either
3184 side. This avoids the creation of unnecessary temporaries.
3185 For non-BLKmode, it is more efficient not to do this. */
3187 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3189 emit_queue ();
3190 target = protect_from_queue (target, 1);
3192 do_pending_stack_adjust ();
3193 NO_DEFER_POP;
3194 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3195 start_cleanup_deferral ();
3196 store_expr (TREE_OPERAND (exp, 1), target, 0);
3197 end_cleanup_deferral ();
3198 emit_queue ();
3199 emit_jump_insn (gen_jump (lab2));
3200 emit_barrier ();
3201 emit_label (lab1);
3202 start_cleanup_deferral ();
3203 store_expr (TREE_OPERAND (exp, 2), target, 0);
3204 end_cleanup_deferral ();
3205 emit_queue ();
3206 emit_label (lab2);
3207 OK_DEFER_POP;
3209 return want_value ? target : NULL_RTX;
3211 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3212 && GET_MODE (target) != BLKmode)
3213 /* If target is in memory and caller wants value in a register instead,
3214 arrange that. Pass TARGET as target for expand_expr so that,
3215 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3216 We know expand_expr will not use the target in that case.
3217 Don't do this if TARGET is volatile because we are supposed
3218 to write it and then read it. */
3220 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3221 GET_MODE (target), 0);
3222 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3223 temp = copy_to_reg (temp);
3224 dont_return_target = 1;
3226 else if (queued_subexp_p (target))
3227 /* If target contains a postincrement, let's not risk
3228 using it as the place to generate the rhs. */
3230 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3232 /* Expand EXP into a new pseudo. */
3233 temp = gen_reg_rtx (GET_MODE (target));
3234 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3236 else
3237 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3239 /* If target is volatile, ANSI requires accessing the value
3240 *from* the target, if it is accessed. So make that happen.
3241 In no case return the target itself. */
3242 if (! MEM_VOLATILE_P (target) && want_value)
3243 dont_return_target = 1;
3245 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3246 /* If this is an scalar in a register that is stored in a wider mode
3247 than the declared mode, compute the result into its declared mode
3248 and then convert to the wider mode. Our value is the computed
3249 expression. */
3251 /* If we don't want a value, we can do the conversion inside EXP,
3252 which will often result in some optimizations. Do the conversion
3253 in two steps: first change the signedness, if needed, then
3254 the extend. But don't do this if the type of EXP is a subtype
3255 of something else since then the conversion might involve
3256 more than just converting modes. */
3257 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3258 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3260 if (TREE_UNSIGNED (TREE_TYPE (exp))
3261 != SUBREG_PROMOTED_UNSIGNED_P (target))
3263 = convert
3264 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3265 TREE_TYPE (exp)),
3266 exp);
3268 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3269 SUBREG_PROMOTED_UNSIGNED_P (target)),
3270 exp);
3273 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3275 /* If TEMP is a volatile MEM and we want a result value, make
3276 the access now so it gets done only once. Likewise if
3277 it contains TARGET. */
3278 if (GET_CODE (temp) == MEM && want_value
3279 && (MEM_VOLATILE_P (temp)
3280 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3281 temp = copy_to_reg (temp);
3283 /* If TEMP is a VOIDmode constant, use convert_modes to make
3284 sure that we properly convert it. */
3285 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3286 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3287 TYPE_MODE (TREE_TYPE (exp)), temp,
3288 SUBREG_PROMOTED_UNSIGNED_P (target));
3290 convert_move (SUBREG_REG (target), temp,
3291 SUBREG_PROMOTED_UNSIGNED_P (target));
3292 return want_value ? temp : NULL_RTX;
3294 else
3296 temp = expand_expr (exp, target, GET_MODE (target), 0);
3297 /* Return TARGET if it's a specified hardware register.
3298 If TARGET is a volatile mem ref, either return TARGET
3299 or return a reg copied *from* TARGET; ANSI requires this.
3301 Otherwise, if TEMP is not TARGET, return TEMP
3302 if it is constant (for efficiency),
3303 or if we really want the correct value. */
3304 if (!(target && GET_CODE (target) == REG
3305 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3306 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3307 && ! rtx_equal_p (temp, target)
3308 && (CONSTANT_P (temp) || want_value))
3309 dont_return_target = 1;
3312 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3313 the same as that of TARGET, adjust the constant. This is needed, for
3314 example, in case it is a CONST_DOUBLE and we want only a word-sized
3315 value. */
3316 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3317 && TREE_CODE (exp) != ERROR_MARK
3318 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3319 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3320 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3322 if (flag_check_memory_usage
3323 && GET_CODE (target) == MEM
3324 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3326 if (GET_CODE (temp) == MEM)
3327 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3328 XEXP (target, 0), ptr_mode,
3329 XEXP (temp, 0), ptr_mode,
3330 expr_size (exp), TYPE_MODE (sizetype));
3331 else
3332 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3333 XEXP (target, 0), ptr_mode,
3334 expr_size (exp), TYPE_MODE (sizetype),
3335 GEN_INT (MEMORY_USE_WO),
3336 TYPE_MODE (integer_type_node));
3339 /* If value was not generated in the target, store it there.
3340 Convert the value to TARGET's type first if nec. */
3342 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3344 target = protect_from_queue (target, 1);
3345 if (GET_MODE (temp) != GET_MODE (target)
3346 && GET_MODE (temp) != VOIDmode)
3348 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3349 if (dont_return_target)
3351 /* In this case, we will return TEMP,
3352 so make sure it has the proper mode.
3353 But don't forget to store the value into TARGET. */
3354 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3355 emit_move_insn (target, temp);
3357 else
3358 convert_move (target, temp, unsignedp);
3361 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3363 /* Handle copying a string constant into an array.
3364 The string constant may be shorter than the array.
3365 So copy just the string's actual length, and clear the rest. */
3366 rtx size;
3367 rtx addr;
3369 /* Get the size of the data type of the string,
3370 which is actually the size of the target. */
3371 size = expr_size (exp);
3372 if (GET_CODE (size) == CONST_INT
3373 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3374 emit_block_move (target, temp, size,
3375 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3376 else
3378 /* Compute the size of the data to copy from the string. */
3379 tree copy_size
3380 = size_binop (MIN_EXPR,
3381 make_tree (sizetype, size),
3382 convert (sizetype,
3383 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3384 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3385 VOIDmode, 0);
3386 rtx label = 0;
3388 /* Copy that much. */
3389 emit_block_move (target, temp, copy_size_rtx,
3390 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3392 /* Figure out how much is left in TARGET that we have to clear.
3393 Do all calculations in ptr_mode. */
3395 addr = XEXP (target, 0);
3396 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3398 if (GET_CODE (copy_size_rtx) == CONST_INT)
3400 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3401 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3403 else
3405 addr = force_reg (ptr_mode, addr);
3406 addr = expand_binop (ptr_mode, add_optab, addr,
3407 copy_size_rtx, NULL_RTX, 0,
3408 OPTAB_LIB_WIDEN);
3410 size = expand_binop (ptr_mode, sub_optab, size,
3411 copy_size_rtx, NULL_RTX, 0,
3412 OPTAB_LIB_WIDEN);
3414 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3415 GET_MODE (size), 0, 0);
3416 label = gen_label_rtx ();
3417 emit_jump_insn (gen_blt (label));
3420 if (size != const0_rtx)
3422 /* Be sure we can write on ADDR. */
3423 if (flag_check_memory_usage)
3424 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3425 addr, ptr_mode,
3426 size, TYPE_MODE (sizetype),
3427 GEN_INT (MEMORY_USE_WO),
3428 TYPE_MODE (integer_type_node));
3429 #ifdef TARGET_MEM_FUNCTIONS
3430 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3431 addr, ptr_mode,
3432 const0_rtx, TYPE_MODE (integer_type_node),
3433 convert_to_mode (TYPE_MODE (sizetype),
3434 size,
3435 TREE_UNSIGNED (sizetype)),
3436 TYPE_MODE (sizetype));
3437 #else
3438 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3439 addr, ptr_mode,
3440 convert_to_mode (TYPE_MODE (integer_type_node),
3441 size,
3442 TREE_UNSIGNED (integer_type_node)),
3443 TYPE_MODE (integer_type_node));
3444 #endif
3447 if (label)
3448 emit_label (label);
3451 /* Handle calls that return values in multiple non-contiguous locations.
3452 The Irix 6 ABI has examples of this. */
3453 else if (GET_CODE (target) == PARALLEL)
3454 emit_group_load (target, temp);
3455 else if (GET_MODE (temp) == BLKmode)
3456 emit_block_move (target, temp, expr_size (exp),
3457 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3458 else
3459 emit_move_insn (target, temp);
3462 /* If we don't want a value, return NULL_RTX. */
3463 if (! want_value)
3464 return NULL_RTX;
3466 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3467 ??? The latter test doesn't seem to make sense. */
3468 else if (dont_return_target && GET_CODE (temp) != MEM)
3469 return temp;
3471 /* Return TARGET itself if it is a hard register. */
3472 else if (want_value && GET_MODE (target) != BLKmode
3473 && ! (GET_CODE (target) == REG
3474 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3475 return copy_to_reg (target);
3477 else
3478 return target;
3481 /* Return 1 if EXP just contains zeros. */
3483 static int
3484 is_zeros_p (exp)
3485 tree exp;
3487 tree elt;
3489 switch (TREE_CODE (exp))
3491 case CONVERT_EXPR:
3492 case NOP_EXPR:
3493 case NON_LVALUE_EXPR:
3494 return is_zeros_p (TREE_OPERAND (exp, 0));
3496 case INTEGER_CST:
3497 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3499 case COMPLEX_CST:
3500 return
3501 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3503 case REAL_CST:
3504 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3506 case CONSTRUCTOR:
3507 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3508 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3509 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3510 if (! is_zeros_p (TREE_VALUE (elt)))
3511 return 0;
3513 return 1;
3515 default:
3516 return 0;
3520 /* Return 1 if EXP contains mostly (3/4) zeros. */
3522 static int
3523 mostly_zeros_p (exp)
3524 tree exp;
3526 if (TREE_CODE (exp) == CONSTRUCTOR)
3528 int elts = 0, zeros = 0;
3529 tree elt = CONSTRUCTOR_ELTS (exp);
3530 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3532 /* If there are no ranges of true bits, it is all zero. */
3533 return elt == NULL_TREE;
3535 for (; elt; elt = TREE_CHAIN (elt))
3537 /* We do not handle the case where the index is a RANGE_EXPR,
3538 so the statistic will be somewhat inaccurate.
3539 We do make a more accurate count in store_constructor itself,
3540 so since this function is only used for nested array elements,
3541 this should be close enough. */
3542 if (mostly_zeros_p (TREE_VALUE (elt)))
3543 zeros++;
3544 elts++;
3547 return 4 * zeros >= 3 * elts;
3550 return is_zeros_p (exp);
3553 /* Helper function for store_constructor.
3554 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3555 TYPE is the type of the CONSTRUCTOR, not the element type.
3556 CLEARED is as for store_constructor.
3558 This provides a recursive shortcut back to store_constructor when it isn't
3559 necessary to go through store_field. This is so that we can pass through
3560 the cleared field to let store_constructor know that we may not have to
3561 clear a substructure if the outer structure has already been cleared. */
3563 static void
3564 store_constructor_field (target, bitsize, bitpos,
3565 mode, exp, type, cleared)
3566 rtx target;
3567 int bitsize, bitpos;
3568 enum machine_mode mode;
3569 tree exp, type;
3570 int cleared;
3572 if (TREE_CODE (exp) == CONSTRUCTOR
3573 && bitpos % BITS_PER_UNIT == 0
3574 /* If we have a non-zero bitpos for a register target, then we just
3575 let store_field do the bitfield handling. This is unlikely to
3576 generate unnecessary clear instructions anyways. */
3577 && (bitpos == 0 || GET_CODE (target) == MEM))
3579 if (bitpos != 0)
3580 target = change_address (target, VOIDmode,
3581 plus_constant (XEXP (target, 0),
3582 bitpos / BITS_PER_UNIT));
3583 store_constructor (exp, target, cleared);
3585 else
3586 store_field (target, bitsize, bitpos, mode, exp,
3587 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3588 int_size_in_bytes (type));
3591 /* Store the value of constructor EXP into the rtx TARGET.
3592 TARGET is either a REG or a MEM.
3593 CLEARED is true if TARGET is known to have been zero'd. */
3595 static void
3596 store_constructor (exp, target, cleared)
3597 tree exp;
3598 rtx target;
3599 int cleared;
3601 tree type = TREE_TYPE (exp);
3603 /* We know our target cannot conflict, since safe_from_p has been called. */
3604 #if 0
3605 /* Don't try copying piece by piece into a hard register
3606 since that is vulnerable to being clobbered by EXP.
3607 Instead, construct in a pseudo register and then copy it all. */
3608 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3610 rtx temp = gen_reg_rtx (GET_MODE (target));
3611 store_constructor (exp, temp, 0);
3612 emit_move_insn (target, temp);
3613 return;
3615 #endif
3617 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3618 || TREE_CODE (type) == QUAL_UNION_TYPE)
3620 register tree elt;
3622 /* Inform later passes that the whole union value is dead. */
3623 if (TREE_CODE (type) == UNION_TYPE
3624 || TREE_CODE (type) == QUAL_UNION_TYPE)
3625 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3627 /* If we are building a static constructor into a register,
3628 set the initial value as zero so we can fold the value into
3629 a constant. But if more than one register is involved,
3630 this probably loses. */
3631 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3632 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3634 if (! cleared)
3635 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3637 cleared = 1;
3640 /* If the constructor has fewer fields than the structure
3641 or if we are initializing the structure to mostly zeros,
3642 clear the whole structure first. */
3643 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3644 != list_length (TYPE_FIELDS (type)))
3645 || mostly_zeros_p (exp))
3647 if (! cleared)
3648 clear_storage (target, expr_size (exp),
3649 TYPE_ALIGN (type) / BITS_PER_UNIT);
3651 cleared = 1;
3653 else
3654 /* Inform later passes that the old value is dead. */
3655 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3657 /* Store each element of the constructor into
3658 the corresponding field of TARGET. */
3660 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3662 register tree field = TREE_PURPOSE (elt);
3663 register enum machine_mode mode;
3664 int bitsize;
3665 int bitpos = 0;
3666 int unsignedp;
3667 tree pos, constant = 0, offset = 0;
3668 rtx to_rtx = target;
3670 /* Just ignore missing fields.
3671 We cleared the whole structure, above,
3672 if any fields are missing. */
3673 if (field == 0)
3674 continue;
3676 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3677 continue;
3679 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3680 unsignedp = TREE_UNSIGNED (field);
3681 mode = DECL_MODE (field);
3682 if (DECL_BIT_FIELD (field))
3683 mode = VOIDmode;
3685 pos = DECL_FIELD_BITPOS (field);
3686 if (TREE_CODE (pos) == INTEGER_CST)
3687 constant = pos;
3688 else if (TREE_CODE (pos) == PLUS_EXPR
3689 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3690 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3691 else
3692 offset = pos;
3694 if (constant)
3695 bitpos = TREE_INT_CST_LOW (constant);
3697 if (offset)
3699 rtx offset_rtx;
3701 if (contains_placeholder_p (offset))
3702 offset = build (WITH_RECORD_EXPR, sizetype,
3703 offset, make_tree (TREE_TYPE (exp), target));
3705 offset = size_binop (FLOOR_DIV_EXPR, offset,
3706 size_int (BITS_PER_UNIT));
3708 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3709 if (GET_CODE (to_rtx) != MEM)
3710 abort ();
3712 to_rtx
3713 = change_address (to_rtx, VOIDmode,
3714 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3715 force_reg (ptr_mode, offset_rtx)));
3717 if (TREE_READONLY (field))
3719 if (GET_CODE (to_rtx) == MEM)
3720 to_rtx = copy_rtx (to_rtx);
3722 RTX_UNCHANGING_P (to_rtx) = 1;
3725 store_constructor_field (to_rtx, bitsize, bitpos,
3726 mode, TREE_VALUE (elt), type, cleared);
3729 else if (TREE_CODE (type) == ARRAY_TYPE)
3731 register tree elt;
3732 register int i;
3733 int need_to_clear;
3734 tree domain = TYPE_DOMAIN (type);
3735 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3736 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3737 tree elttype = TREE_TYPE (type);
3739 /* If the constructor has fewer elements than the array,
3740 clear the whole array first. Similarly if this this is
3741 static constructor of a non-BLKmode object. */
3742 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3743 need_to_clear = 1;
3744 else
3746 HOST_WIDE_INT count = 0, zero_count = 0;
3747 need_to_clear = 0;
3748 /* This loop is a more accurate version of the loop in
3749 mostly_zeros_p (it handles RANGE_EXPR in an index).
3750 It is also needed to check for missing elements. */
3751 for (elt = CONSTRUCTOR_ELTS (exp);
3752 elt != NULL_TREE;
3753 elt = TREE_CHAIN (elt))
3755 tree index = TREE_PURPOSE (elt);
3756 HOST_WIDE_INT this_node_count;
3757 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3759 tree lo_index = TREE_OPERAND (index, 0);
3760 tree hi_index = TREE_OPERAND (index, 1);
3761 if (TREE_CODE (lo_index) != INTEGER_CST
3762 || TREE_CODE (hi_index) != INTEGER_CST)
3764 need_to_clear = 1;
3765 break;
3767 this_node_count = TREE_INT_CST_LOW (hi_index)
3768 - TREE_INT_CST_LOW (lo_index) + 1;
3770 else
3771 this_node_count = 1;
3772 count += this_node_count;
3773 if (mostly_zeros_p (TREE_VALUE (elt)))
3774 zero_count += this_node_count;
3776 /* Clear the entire array first if there are any missing elements,
3777 or if the incidence of zero elements is >= 75%. */
3778 if (count < maxelt - minelt + 1
3779 || 4 * zero_count >= 3 * count)
3780 need_to_clear = 1;
3782 if (need_to_clear)
3784 if (! cleared)
3785 clear_storage (target, expr_size (exp),
3786 TYPE_ALIGN (type) / BITS_PER_UNIT);
3787 cleared = 1;
3789 else
3790 /* Inform later passes that the old value is dead. */
3791 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3793 /* Store each element of the constructor into
3794 the corresponding element of TARGET, determined
3795 by counting the elements. */
3796 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3797 elt;
3798 elt = TREE_CHAIN (elt), i++)
3800 register enum machine_mode mode;
3801 int bitsize;
3802 int bitpos;
3803 int unsignedp;
3804 tree value = TREE_VALUE (elt);
3805 tree index = TREE_PURPOSE (elt);
3806 rtx xtarget = target;
3808 if (cleared && is_zeros_p (value))
3809 continue;
3811 mode = TYPE_MODE (elttype);
3812 bitsize = GET_MODE_BITSIZE (mode);
3813 unsignedp = TREE_UNSIGNED (elttype);
3815 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3817 tree lo_index = TREE_OPERAND (index, 0);
3818 tree hi_index = TREE_OPERAND (index, 1);
3819 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3820 struct nesting *loop;
3821 HOST_WIDE_INT lo, hi, count;
3822 tree position;
3824 /* If the range is constant and "small", unroll the loop. */
3825 if (TREE_CODE (lo_index) == INTEGER_CST
3826 && TREE_CODE (hi_index) == INTEGER_CST
3827 && (lo = TREE_INT_CST_LOW (lo_index),
3828 hi = TREE_INT_CST_LOW (hi_index),
3829 count = hi - lo + 1,
3830 (GET_CODE (target) != MEM
3831 || count <= 2
3832 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3833 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3834 <= 40 * 8))))
3836 lo -= minelt; hi -= minelt;
3837 for (; lo <= hi; lo++)
3839 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3840 store_constructor_field (target, bitsize, bitpos,
3841 mode, value, type, cleared);
3844 else
3846 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3847 loop_top = gen_label_rtx ();
3848 loop_end = gen_label_rtx ();
3850 unsignedp = TREE_UNSIGNED (domain);
3852 index = build_decl (VAR_DECL, NULL_TREE, domain);
3854 DECL_RTL (index) = index_r
3855 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3856 &unsignedp, 0));
3858 if (TREE_CODE (value) == SAVE_EXPR
3859 && SAVE_EXPR_RTL (value) == 0)
3861 /* Make sure value gets expanded once before the
3862 loop. */
3863 expand_expr (value, const0_rtx, VOIDmode, 0);
3864 emit_queue ();
3866 store_expr (lo_index, index_r, 0);
3867 loop = expand_start_loop (0);
3869 /* Assign value to element index. */
3870 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3871 size_int (BITS_PER_UNIT));
3872 position = size_binop (MULT_EXPR,
3873 size_binop (MINUS_EXPR, index,
3874 TYPE_MIN_VALUE (domain)),
3875 position);
3876 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3877 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3878 xtarget = change_address (target, mode, addr);
3879 if (TREE_CODE (value) == CONSTRUCTOR)
3880 store_constructor (value, xtarget, cleared);
3881 else
3882 store_expr (value, xtarget, 0);
3884 expand_exit_loop_if_false (loop,
3885 build (LT_EXPR, integer_type_node,
3886 index, hi_index));
3888 expand_increment (build (PREINCREMENT_EXPR,
3889 TREE_TYPE (index),
3890 index, integer_one_node), 0, 0);
3891 expand_end_loop ();
3892 emit_label (loop_end);
3894 /* Needed by stupid register allocation. to extend the
3895 lifetime of pseudo-regs used by target past the end
3896 of the loop. */
3897 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3900 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3901 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3903 rtx pos_rtx, addr;
3904 tree position;
3906 if (index == 0)
3907 index = size_int (i);
3909 if (minelt)
3910 index = size_binop (MINUS_EXPR, index,
3911 TYPE_MIN_VALUE (domain));
3912 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3913 size_int (BITS_PER_UNIT));
3914 position = size_binop (MULT_EXPR, index, position);
3915 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3916 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3917 xtarget = change_address (target, mode, addr);
3918 store_expr (value, xtarget, 0);
3920 else
3922 if (index != 0)
3923 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3924 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3925 else
3926 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3927 store_constructor_field (target, bitsize, bitpos,
3928 mode, value, type, cleared);
3932 /* set constructor assignments */
3933 else if (TREE_CODE (type) == SET_TYPE)
3935 tree elt = CONSTRUCTOR_ELTS (exp);
3936 rtx xtarget = XEXP (target, 0);
3937 int set_word_size = TYPE_ALIGN (type);
3938 int nbytes = int_size_in_bytes (type), nbits;
3939 tree domain = TYPE_DOMAIN (type);
3940 tree domain_min, domain_max, bitlength;
3942 /* The default implementation strategy is to extract the constant
3943 parts of the constructor, use that to initialize the target,
3944 and then "or" in whatever non-constant ranges we need in addition.
3946 If a large set is all zero or all ones, it is
3947 probably better to set it using memset (if available) or bzero.
3948 Also, if a large set has just a single range, it may also be
3949 better to first clear all the first clear the set (using
3950 bzero/memset), and set the bits we want. */
3952 /* Check for all zeros. */
3953 if (elt == NULL_TREE)
3955 if (!cleared)
3956 clear_storage (target, expr_size (exp),
3957 TYPE_ALIGN (type) / BITS_PER_UNIT);
3958 return;
3961 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3962 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3963 bitlength = size_binop (PLUS_EXPR,
3964 size_binop (MINUS_EXPR, domain_max, domain_min),
3965 size_one_node);
3967 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3968 abort ();
3969 nbits = TREE_INT_CST_LOW (bitlength);
3971 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3972 are "complicated" (more than one range), initialize (the
3973 constant parts) by copying from a constant. */
3974 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3975 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3977 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3978 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3979 char *bit_buffer = (char *) alloca (nbits);
3980 HOST_WIDE_INT word = 0;
3981 int bit_pos = 0;
3982 int ibit = 0;
3983 int offset = 0; /* In bytes from beginning of set. */
3984 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3985 for (;;)
3987 if (bit_buffer[ibit])
3989 if (BYTES_BIG_ENDIAN)
3990 word |= (1 << (set_word_size - 1 - bit_pos));
3991 else
3992 word |= 1 << bit_pos;
3994 bit_pos++; ibit++;
3995 if (bit_pos >= set_word_size || ibit == nbits)
3997 if (word != 0 || ! cleared)
3999 rtx datum = GEN_INT (word);
4000 rtx to_rtx;
4001 /* The assumption here is that it is safe to use
4002 XEXP if the set is multi-word, but not if
4003 it's single-word. */
4004 if (GET_CODE (target) == MEM)
4006 to_rtx = plus_constant (XEXP (target, 0), offset);
4007 to_rtx = change_address (target, mode, to_rtx);
4009 else if (offset == 0)
4010 to_rtx = target;
4011 else
4012 abort ();
4013 emit_move_insn (to_rtx, datum);
4015 if (ibit == nbits)
4016 break;
4017 word = 0;
4018 bit_pos = 0;
4019 offset += set_word_size / BITS_PER_UNIT;
4023 else if (!cleared)
4025 /* Don't bother clearing storage if the set is all ones. */
4026 if (TREE_CHAIN (elt) != NULL_TREE
4027 || (TREE_PURPOSE (elt) == NULL_TREE
4028 ? nbits != 1
4029 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4030 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4031 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4032 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4033 != nbits))))
4034 clear_storage (target, expr_size (exp),
4035 TYPE_ALIGN (type) / BITS_PER_UNIT);
4038 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4040 /* start of range of element or NULL */
4041 tree startbit = TREE_PURPOSE (elt);
4042 /* end of range of element, or element value */
4043 tree endbit = TREE_VALUE (elt);
4044 HOST_WIDE_INT startb, endb;
4045 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4047 bitlength_rtx = expand_expr (bitlength,
4048 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4050 /* handle non-range tuple element like [ expr ] */
4051 if (startbit == NULL_TREE)
4053 startbit = save_expr (endbit);
4054 endbit = startbit;
4056 startbit = convert (sizetype, startbit);
4057 endbit = convert (sizetype, endbit);
4058 if (! integer_zerop (domain_min))
4060 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4061 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4063 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4064 EXPAND_CONST_ADDRESS);
4065 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4066 EXPAND_CONST_ADDRESS);
4068 if (REG_P (target))
4070 targetx = assign_stack_temp (GET_MODE (target),
4071 GET_MODE_SIZE (GET_MODE (target)),
4073 emit_move_insn (targetx, target);
4075 else if (GET_CODE (target) == MEM)
4076 targetx = target;
4077 else
4078 abort ();
4080 #ifdef TARGET_MEM_FUNCTIONS
4081 /* Optimization: If startbit and endbit are
4082 constants divisible by BITS_PER_UNIT,
4083 call memset instead. */
4084 if (TREE_CODE (startbit) == INTEGER_CST
4085 && TREE_CODE (endbit) == INTEGER_CST
4086 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4087 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4089 emit_library_call (memset_libfunc, 0,
4090 VOIDmode, 3,
4091 plus_constant (XEXP (targetx, 0),
4092 startb / BITS_PER_UNIT),
4093 Pmode,
4094 constm1_rtx, TYPE_MODE (integer_type_node),
4095 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4096 TYPE_MODE (sizetype));
4098 else
4099 #endif
4101 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
4102 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4103 bitlength_rtx, TYPE_MODE (sizetype),
4104 startbit_rtx, TYPE_MODE (sizetype),
4105 endbit_rtx, TYPE_MODE (sizetype));
4107 if (REG_P (target))
4108 emit_move_insn (target, targetx);
4112 else
4113 abort ();
4116 /* Store the value of EXP (an expression tree)
4117 into a subfield of TARGET which has mode MODE and occupies
4118 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4119 If MODE is VOIDmode, it means that we are storing into a bit-field.
4121 If VALUE_MODE is VOIDmode, return nothing in particular.
4122 UNSIGNEDP is not used in this case.
4124 Otherwise, return an rtx for the value stored. This rtx
4125 has mode VALUE_MODE if that is convenient to do.
4126 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4128 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4129 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4131 static rtx
4132 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4133 unsignedp, align, total_size)
4134 rtx target;
4135 int bitsize, bitpos;
4136 enum machine_mode mode;
4137 tree exp;
4138 enum machine_mode value_mode;
4139 int unsignedp;
4140 int align;
4141 int total_size;
4143 HOST_WIDE_INT width_mask = 0;
4145 if (TREE_CODE (exp) == ERROR_MARK)
4146 return const0_rtx;
4148 if (bitsize < HOST_BITS_PER_WIDE_INT)
4149 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4151 /* If we are storing into an unaligned field of an aligned union that is
4152 in a register, we may have the mode of TARGET being an integer mode but
4153 MODE == BLKmode. In that case, get an aligned object whose size and
4154 alignment are the same as TARGET and store TARGET into it (we can avoid
4155 the store if the field being stored is the entire width of TARGET). Then
4156 call ourselves recursively to store the field into a BLKmode version of
4157 that object. Finally, load from the object into TARGET. This is not
4158 very efficient in general, but should only be slightly more expensive
4159 than the otherwise-required unaligned accesses. Perhaps this can be
4160 cleaned up later. */
4162 if (mode == BLKmode
4163 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4165 rtx object = assign_stack_temp (GET_MODE (target),
4166 GET_MODE_SIZE (GET_MODE (target)), 0);
4167 rtx blk_object = copy_rtx (object);
4169 MEM_IN_STRUCT_P (object) = 1;
4170 MEM_IN_STRUCT_P (blk_object) = 1;
4171 PUT_MODE (blk_object, BLKmode);
4173 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4174 emit_move_insn (object, target);
4176 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4177 align, total_size);
4179 /* Even though we aren't returning target, we need to
4180 give it the updated value. */
4181 emit_move_insn (target, object);
4183 return blk_object;
4186 /* If the structure is in a register or if the component
4187 is a bit field, we cannot use addressing to access it.
4188 Use bit-field techniques or SUBREG to store in it. */
4190 if (mode == VOIDmode
4191 || (mode != BLKmode && ! direct_store[(int) mode])
4192 || GET_CODE (target) == REG
4193 || GET_CODE (target) == SUBREG
4194 /* If the field isn't aligned enough to store as an ordinary memref,
4195 store it as a bit field. */
4196 || (SLOW_UNALIGNED_ACCESS
4197 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4198 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4200 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4202 /* If BITSIZE is narrower than the size of the type of EXP
4203 we will be narrowing TEMP. Normally, what's wanted are the
4204 low-order bits. However, if EXP's type is a record and this is
4205 big-endian machine, we want the upper BITSIZE bits. */
4206 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4207 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4208 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4209 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4210 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4211 - bitsize),
4212 temp, 1);
4214 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4215 MODE. */
4216 if (mode != VOIDmode && mode != BLKmode
4217 && mode != TYPE_MODE (TREE_TYPE (exp)))
4218 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4220 /* If the modes of TARGET and TEMP are both BLKmode, both
4221 must be in memory and BITPOS must be aligned on a byte
4222 boundary. If so, we simply do a block copy. */
4223 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4225 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4226 || bitpos % BITS_PER_UNIT != 0)
4227 abort ();
4229 target = change_address (target, VOIDmode,
4230 plus_constant (XEXP (target, 0),
4231 bitpos / BITS_PER_UNIT));
4233 emit_block_move (target, temp,
4234 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4235 / BITS_PER_UNIT),
4238 return value_mode == VOIDmode ? const0_rtx : target;
4241 /* Store the value in the bitfield. */
4242 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4243 if (value_mode != VOIDmode)
4245 /* The caller wants an rtx for the value. */
4246 /* If possible, avoid refetching from the bitfield itself. */
4247 if (width_mask != 0
4248 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4250 tree count;
4251 enum machine_mode tmode;
4253 if (unsignedp)
4254 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4255 tmode = GET_MODE (temp);
4256 if (tmode == VOIDmode)
4257 tmode = value_mode;
4258 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4259 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4260 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4262 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4263 NULL_RTX, value_mode, 0, align,
4264 total_size);
4266 return const0_rtx;
4268 else
4270 rtx addr = XEXP (target, 0);
4271 rtx to_rtx;
4273 /* If a value is wanted, it must be the lhs;
4274 so make the address stable for multiple use. */
4276 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4277 && ! CONSTANT_ADDRESS_P (addr)
4278 /* A frame-pointer reference is already stable. */
4279 && ! (GET_CODE (addr) == PLUS
4280 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4281 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4282 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4283 addr = copy_to_reg (addr);
4285 /* Now build a reference to just the desired component. */
4287 to_rtx = copy_rtx (change_address (target, mode,
4288 plus_constant (addr,
4289 (bitpos
4290 / BITS_PER_UNIT))));
4291 MEM_IN_STRUCT_P (to_rtx) = 1;
4293 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4297 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4298 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4299 ARRAY_REFs and find the ultimate containing object, which we return.
4301 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4302 bit position, and *PUNSIGNEDP to the signedness of the field.
4303 If the position of the field is variable, we store a tree
4304 giving the variable offset (in units) in *POFFSET.
4305 This offset is in addition to the bit position.
4306 If the position is not variable, we store 0 in *POFFSET.
4307 We set *PALIGNMENT to the alignment in bytes of the address that will be
4308 computed. This is the alignment of the thing we return if *POFFSET
4309 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4311 If any of the extraction expressions is volatile,
4312 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4314 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4315 is a mode that can be used to access the field. In that case, *PBITSIZE
4316 is redundant.
4318 If the field describes a variable-sized object, *PMODE is set to
4319 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4320 this case, but the address of the object can be found. */
4322 tree
4323 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4324 punsignedp, pvolatilep, palignment)
4325 tree exp;
4326 int *pbitsize;
4327 int *pbitpos;
4328 tree *poffset;
4329 enum machine_mode *pmode;
4330 int *punsignedp;
4331 int *pvolatilep;
4332 int *palignment;
4334 tree orig_exp = exp;
4335 tree size_tree = 0;
4336 enum machine_mode mode = VOIDmode;
4337 tree offset = integer_zero_node;
4338 int alignment = BIGGEST_ALIGNMENT;
4340 if (TREE_CODE (exp) == COMPONENT_REF)
4342 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4343 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4344 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4345 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4347 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4349 size_tree = TREE_OPERAND (exp, 1);
4350 *punsignedp = TREE_UNSIGNED (exp);
4352 else
4354 mode = TYPE_MODE (TREE_TYPE (exp));
4355 *pbitsize = GET_MODE_BITSIZE (mode);
4356 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4359 if (size_tree)
4361 if (TREE_CODE (size_tree) != INTEGER_CST)
4362 mode = BLKmode, *pbitsize = -1;
4363 else
4364 *pbitsize = TREE_INT_CST_LOW (size_tree);
4367 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4368 and find the ultimate containing object. */
4370 *pbitpos = 0;
4372 while (1)
4374 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4376 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4377 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4378 : TREE_OPERAND (exp, 2));
4379 tree constant = integer_zero_node, var = pos;
4381 /* If this field hasn't been filled in yet, don't go
4382 past it. This should only happen when folding expressions
4383 made during type construction. */
4384 if (pos == 0)
4385 break;
4387 /* Assume here that the offset is a multiple of a unit.
4388 If not, there should be an explicitly added constant. */
4389 if (TREE_CODE (pos) == PLUS_EXPR
4390 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4391 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4392 else if (TREE_CODE (pos) == INTEGER_CST)
4393 constant = pos, var = integer_zero_node;
4395 *pbitpos += TREE_INT_CST_LOW (constant);
4396 offset = size_binop (PLUS_EXPR, offset,
4397 size_binop (EXACT_DIV_EXPR, var,
4398 size_int (BITS_PER_UNIT)));
4401 else if (TREE_CODE (exp) == ARRAY_REF)
4403 /* This code is based on the code in case ARRAY_REF in expand_expr
4404 below. We assume here that the size of an array element is
4405 always an integral multiple of BITS_PER_UNIT. */
4407 tree index = TREE_OPERAND (exp, 1);
4408 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4409 tree low_bound
4410 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4411 tree index_type = TREE_TYPE (index);
4413 if (! integer_zerop (low_bound))
4414 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4416 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4418 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4419 index);
4420 index_type = TREE_TYPE (index);
4423 index = fold (build (MULT_EXPR, index_type, index,
4424 convert (index_type,
4425 TYPE_SIZE (TREE_TYPE (exp)))));
4427 if (TREE_CODE (index) == INTEGER_CST
4428 && TREE_INT_CST_HIGH (index) == 0)
4429 *pbitpos += TREE_INT_CST_LOW (index);
4430 else
4432 offset = size_binop (PLUS_EXPR, offset,
4433 size_binop (FLOOR_DIV_EXPR, index,
4434 size_int (BITS_PER_UNIT)));
4436 if (contains_placeholder_p (offset))
4437 offset = build (WITH_RECORD_EXPR, sizetype, offset, exp);
4440 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4441 && ! ((TREE_CODE (exp) == NOP_EXPR
4442 || TREE_CODE (exp) == CONVERT_EXPR)
4443 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4444 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4445 != UNION_TYPE))
4446 && (TYPE_MODE (TREE_TYPE (exp))
4447 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4448 break;
4450 /* If any reference in the chain is volatile, the effect is volatile. */
4451 if (TREE_THIS_VOLATILE (exp))
4452 *pvolatilep = 1;
4454 /* If the offset is non-constant already, then we can't assume any
4455 alignment more than the alignment here. */
4456 if (! integer_zerop (offset))
4457 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4459 exp = TREE_OPERAND (exp, 0);
4462 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4463 alignment = MIN (alignment, DECL_ALIGN (exp));
4464 else if (TREE_TYPE (exp) != 0)
4465 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4467 if (integer_zerop (offset))
4468 offset = 0;
4470 if (offset != 0 && contains_placeholder_p (offset))
4471 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4473 *pmode = mode;
4474 *poffset = offset;
4475 *palignment = alignment / BITS_PER_UNIT;
4476 return exp;
4479 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4480 static enum memory_use_mode
4481 get_memory_usage_from_modifier (modifier)
4482 enum expand_modifier modifier;
4484 switch (modifier)
4486 case EXPAND_NORMAL:
4487 return MEMORY_USE_RO;
4488 break;
4489 case EXPAND_MEMORY_USE_WO:
4490 return MEMORY_USE_WO;
4491 break;
4492 case EXPAND_MEMORY_USE_RW:
4493 return MEMORY_USE_RW;
4494 break;
4495 case EXPAND_INITIALIZER:
4496 case EXPAND_MEMORY_USE_DONT:
4497 case EXPAND_SUM:
4498 case EXPAND_CONST_ADDRESS:
4499 return MEMORY_USE_DONT;
4500 case EXPAND_MEMORY_USE_BAD:
4501 default:
4502 abort ();
4506 /* Given an rtx VALUE that may contain additions and multiplications,
4507 return an equivalent value that just refers to a register or memory.
4508 This is done by generating instructions to perform the arithmetic
4509 and returning a pseudo-register containing the value.
4511 The returned value may be a REG, SUBREG, MEM or constant. */
4514 force_operand (value, target)
4515 rtx value, target;
4517 register optab binoptab = 0;
4518 /* Use a temporary to force order of execution of calls to
4519 `force_operand'. */
4520 rtx tmp;
4521 register rtx op2;
4522 /* Use subtarget as the target for operand 0 of a binary operation. */
4523 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4525 if (GET_CODE (value) == PLUS)
4526 binoptab = add_optab;
4527 else if (GET_CODE (value) == MINUS)
4528 binoptab = sub_optab;
4529 else if (GET_CODE (value) == MULT)
4531 op2 = XEXP (value, 1);
4532 if (!CONSTANT_P (op2)
4533 && !(GET_CODE (op2) == REG && op2 != subtarget))
4534 subtarget = 0;
4535 tmp = force_operand (XEXP (value, 0), subtarget);
4536 return expand_mult (GET_MODE (value), tmp,
4537 force_operand (op2, NULL_RTX),
4538 target, 0);
4541 if (binoptab)
4543 op2 = XEXP (value, 1);
4544 if (!CONSTANT_P (op2)
4545 && !(GET_CODE (op2) == REG && op2 != subtarget))
4546 subtarget = 0;
4547 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4549 binoptab = add_optab;
4550 op2 = negate_rtx (GET_MODE (value), op2);
4553 /* Check for an addition with OP2 a constant integer and our first
4554 operand a PLUS of a virtual register and something else. In that
4555 case, we want to emit the sum of the virtual register and the
4556 constant first and then add the other value. This allows virtual
4557 register instantiation to simply modify the constant rather than
4558 creating another one around this addition. */
4559 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4560 && GET_CODE (XEXP (value, 0)) == PLUS
4561 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4562 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4563 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4565 rtx temp = expand_binop (GET_MODE (value), binoptab,
4566 XEXP (XEXP (value, 0), 0), op2,
4567 subtarget, 0, OPTAB_LIB_WIDEN);
4568 return expand_binop (GET_MODE (value), binoptab, temp,
4569 force_operand (XEXP (XEXP (value, 0), 1), 0),
4570 target, 0, OPTAB_LIB_WIDEN);
4573 tmp = force_operand (XEXP (value, 0), subtarget);
4574 return expand_binop (GET_MODE (value), binoptab, tmp,
4575 force_operand (op2, NULL_RTX),
4576 target, 0, OPTAB_LIB_WIDEN);
4577 /* We give UNSIGNEDP = 0 to expand_binop
4578 because the only operations we are expanding here are signed ones. */
4580 return value;
4583 /* Subroutine of expand_expr:
4584 save the non-copied parts (LIST) of an expr (LHS), and return a list
4585 which can restore these values to their previous values,
4586 should something modify their storage. */
4588 static tree
4589 save_noncopied_parts (lhs, list)
4590 tree lhs;
4591 tree list;
4593 tree tail;
4594 tree parts = 0;
4596 for (tail = list; tail; tail = TREE_CHAIN (tail))
4597 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4598 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4599 else
4601 tree part = TREE_VALUE (tail);
4602 tree part_type = TREE_TYPE (part);
4603 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4604 rtx target = assign_temp (part_type, 0, 1, 1);
4605 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4606 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4607 parts = tree_cons (to_be_saved,
4608 build (RTL_EXPR, part_type, NULL_TREE,
4609 (tree) target),
4610 parts);
4611 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4613 return parts;
4616 /* Subroutine of expand_expr:
4617 record the non-copied parts (LIST) of an expr (LHS), and return a list
4618 which specifies the initial values of these parts. */
4620 static tree
4621 init_noncopied_parts (lhs, list)
4622 tree lhs;
4623 tree list;
4625 tree tail;
4626 tree parts = 0;
4628 for (tail = list; tail; tail = TREE_CHAIN (tail))
4629 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4630 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4631 else
4633 tree part = TREE_VALUE (tail);
4634 tree part_type = TREE_TYPE (part);
4635 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4636 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4638 return parts;
4641 /* Subroutine of expand_expr: return nonzero iff there is no way that
4642 EXP can reference X, which is being modified. */
4644 static int
4645 safe_from_p (x, exp)
4646 rtx x;
4647 tree exp;
4649 rtx exp_rtl = 0;
4650 int i, nops;
4652 if (x == 0
4653 /* If EXP has varying size, we MUST use a target since we currently
4654 have no way of allocating temporaries of variable size
4655 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4656 So we assume here that something at a higher level has prevented a
4657 clash. This is somewhat bogus, but the best we can do. Only
4658 do this when X is BLKmode. */
4659 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4660 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4661 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4662 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4663 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4664 != INTEGER_CST)
4665 && GET_MODE (x) == BLKmode))
4666 return 1;
4668 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4669 find the underlying pseudo. */
4670 if (GET_CODE (x) == SUBREG)
4672 x = SUBREG_REG (x);
4673 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4674 return 0;
4677 /* If X is a location in the outgoing argument area, it is always safe. */
4678 if (GET_CODE (x) == MEM
4679 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4680 || (GET_CODE (XEXP (x, 0)) == PLUS
4681 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4682 return 1;
4684 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4686 case 'd':
4687 exp_rtl = DECL_RTL (exp);
4688 break;
4690 case 'c':
4691 return 1;
4693 case 'x':
4694 if (TREE_CODE (exp) == TREE_LIST)
4695 return ((TREE_VALUE (exp) == 0
4696 || safe_from_p (x, TREE_VALUE (exp)))
4697 && (TREE_CHAIN (exp) == 0
4698 || safe_from_p (x, TREE_CHAIN (exp))));
4699 else
4700 return 0;
4702 case '1':
4703 return safe_from_p (x, TREE_OPERAND (exp, 0));
4705 case '2':
4706 case '<':
4707 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4708 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4710 case 'e':
4711 case 'r':
4712 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4713 the expression. If it is set, we conflict iff we are that rtx or
4714 both are in memory. Otherwise, we check all operands of the
4715 expression recursively. */
4717 switch (TREE_CODE (exp))
4719 case ADDR_EXPR:
4720 return (staticp (TREE_OPERAND (exp, 0))
4721 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4723 case INDIRECT_REF:
4724 if (GET_CODE (x) == MEM)
4725 return 0;
4726 break;
4728 case CALL_EXPR:
4729 exp_rtl = CALL_EXPR_RTL (exp);
4730 if (exp_rtl == 0)
4732 /* Assume that the call will clobber all hard registers and
4733 all of memory. */
4734 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4735 || GET_CODE (x) == MEM)
4736 return 0;
4739 break;
4741 case RTL_EXPR:
4742 /* If a sequence exists, we would have to scan every instruction
4743 in the sequence to see if it was safe. This is probably not
4744 worthwhile. */
4745 if (RTL_EXPR_SEQUENCE (exp))
4746 return 0;
4748 exp_rtl = RTL_EXPR_RTL (exp);
4749 break;
4751 case WITH_CLEANUP_EXPR:
4752 exp_rtl = RTL_EXPR_RTL (exp);
4753 break;
4755 case CLEANUP_POINT_EXPR:
4756 return safe_from_p (x, TREE_OPERAND (exp, 0));
4758 case SAVE_EXPR:
4759 exp_rtl = SAVE_EXPR_RTL (exp);
4760 break;
4762 case BIND_EXPR:
4763 /* The only operand we look at is operand 1. The rest aren't
4764 part of the expression. */
4765 return safe_from_p (x, TREE_OPERAND (exp, 1));
4767 case METHOD_CALL_EXPR:
4768 /* This takes a rtx argument, but shouldn't appear here. */
4769 abort ();
4771 default:
4772 break;
4775 /* If we have an rtx, we do not need to scan our operands. */
4776 if (exp_rtl)
4777 break;
4779 nops = tree_code_length[(int) TREE_CODE (exp)];
4780 for (i = 0; i < nops; i++)
4781 if (TREE_OPERAND (exp, i) != 0
4782 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4783 return 0;
4786 /* If we have an rtl, find any enclosed object. Then see if we conflict
4787 with it. */
4788 if (exp_rtl)
4790 if (GET_CODE (exp_rtl) == SUBREG)
4792 exp_rtl = SUBREG_REG (exp_rtl);
4793 if (GET_CODE (exp_rtl) == REG
4794 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4795 return 0;
4798 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4799 are memory and EXP is not readonly. */
4800 return ! (rtx_equal_p (x, exp_rtl)
4801 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4802 && ! TREE_READONLY (exp)));
4805 /* If we reach here, it is safe. */
4806 return 1;
4809 /* Subroutine of expand_expr: return nonzero iff EXP is an
4810 expression whose type is statically determinable. */
4812 static int
4813 fixed_type_p (exp)
4814 tree exp;
4816 if (TREE_CODE (exp) == PARM_DECL
4817 || TREE_CODE (exp) == VAR_DECL
4818 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4819 || TREE_CODE (exp) == COMPONENT_REF
4820 || TREE_CODE (exp) == ARRAY_REF)
4821 return 1;
4822 return 0;
4825 /* Subroutine of expand_expr: return rtx if EXP is a
4826 variable or parameter; else return 0. */
4828 static rtx
4829 var_rtx (exp)
4830 tree exp;
4832 STRIP_NOPS (exp);
4833 switch (TREE_CODE (exp))
4835 case PARM_DECL:
4836 case VAR_DECL:
4837 return DECL_RTL (exp);
4838 default:
4839 return 0;
4843 /* expand_expr: generate code for computing expression EXP.
4844 An rtx for the computed value is returned. The value is never null.
4845 In the case of a void EXP, const0_rtx is returned.
4847 The value may be stored in TARGET if TARGET is nonzero.
4848 TARGET is just a suggestion; callers must assume that
4849 the rtx returned may not be the same as TARGET.
4851 If TARGET is CONST0_RTX, it means that the value will be ignored.
4853 If TMODE is not VOIDmode, it suggests generating the
4854 result in mode TMODE. But this is done only when convenient.
4855 Otherwise, TMODE is ignored and the value generated in its natural mode.
4856 TMODE is just a suggestion; callers must assume that
4857 the rtx returned may not have mode TMODE.
4859 Note that TARGET may have neither TMODE nor MODE. In that case, it
4860 probably will not be used.
4862 If MODIFIER is EXPAND_SUM then when EXP is an addition
4863 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4864 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4865 products as above, or REG or MEM, or constant.
4866 Ordinarily in such cases we would output mul or add instructions
4867 and then return a pseudo reg containing the sum.
4869 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4870 it also marks a label as absolutely required (it can't be dead).
4871 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4872 This is used for outputting expressions used in initializers.
4874 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4875 with a constant address even if that address is not normally legitimate.
4876 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4879 expand_expr (exp, target, tmode, modifier)
4880 register tree exp;
4881 rtx target;
4882 enum machine_mode tmode;
4883 enum expand_modifier modifier;
4885 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4886 This is static so it will be accessible to our recursive callees. */
4887 static tree placeholder_list = 0;
4888 register rtx op0, op1, temp;
4889 tree type = TREE_TYPE (exp);
4890 int unsignedp = TREE_UNSIGNED (type);
4891 register enum machine_mode mode = TYPE_MODE (type);
4892 register enum tree_code code = TREE_CODE (exp);
4893 optab this_optab;
4894 /* Use subtarget as the target for operand 0 of a binary operation. */
4895 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4896 rtx original_target = target;
4897 /* Maybe defer this until sure not doing bytecode? */
4898 int ignore = (target == const0_rtx
4899 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4900 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4901 || code == COND_EXPR)
4902 && TREE_CODE (type) == VOID_TYPE));
4903 tree context;
4904 /* Used by check-memory-usage to make modifier read only. */
4905 enum expand_modifier ro_modifier;
4907 /* Make a read-only version of the modifier. */
4908 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4909 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4910 ro_modifier = modifier;
4911 else
4912 ro_modifier = EXPAND_NORMAL;
4914 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4916 bc_expand_expr (exp);
4917 return NULL;
4920 /* Don't use hard regs as subtargets, because the combiner
4921 can only handle pseudo regs. */
4922 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4923 subtarget = 0;
4924 /* Avoid subtargets inside loops,
4925 since they hide some invariant expressions. */
4926 if (preserve_subexpressions_p ())
4927 subtarget = 0;
4929 /* If we are going to ignore this result, we need only do something
4930 if there is a side-effect somewhere in the expression. If there
4931 is, short-circuit the most common cases here. Note that we must
4932 not call expand_expr with anything but const0_rtx in case this
4933 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4935 if (ignore)
4937 if (! TREE_SIDE_EFFECTS (exp))
4938 return const0_rtx;
4940 /* Ensure we reference a volatile object even if value is ignored. */
4941 if (TREE_THIS_VOLATILE (exp)
4942 && TREE_CODE (exp) != FUNCTION_DECL
4943 && mode != VOIDmode && mode != BLKmode)
4945 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
4946 if (GET_CODE (temp) == MEM)
4947 temp = copy_to_reg (temp);
4948 return const0_rtx;
4951 if (TREE_CODE_CLASS (code) == '1')
4952 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4953 VOIDmode, ro_modifier);
4954 else if (TREE_CODE_CLASS (code) == '2'
4955 || TREE_CODE_CLASS (code) == '<')
4957 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4958 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
4959 return const0_rtx;
4961 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4962 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4963 /* If the second operand has no side effects, just evaluate
4964 the first. */
4965 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4966 VOIDmode, ro_modifier);
4968 target = 0;
4971 /* If will do cse, generate all results into pseudo registers
4972 since 1) that allows cse to find more things
4973 and 2) otherwise cse could produce an insn the machine
4974 cannot support. */
4976 if (! cse_not_expected && mode != BLKmode && target
4977 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4978 target = subtarget;
4980 switch (code)
4982 case LABEL_DECL:
4984 tree function = decl_function_context (exp);
4985 /* Handle using a label in a containing function. */
4986 if (function != current_function_decl
4987 && function != inline_function_decl && function != 0)
4989 struct function *p = find_function_data (function);
4990 /* Allocate in the memory associated with the function
4991 that the label is in. */
4992 push_obstacks (p->function_obstack,
4993 p->function_maybepermanent_obstack);
4995 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4996 label_rtx (exp), p->forced_labels);
4997 pop_obstacks ();
4999 else if (modifier == EXPAND_INITIALIZER)
5000 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
5001 label_rtx (exp), forced_labels);
5002 temp = gen_rtx (MEM, FUNCTION_MODE,
5003 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
5004 if (function != current_function_decl
5005 && function != inline_function_decl && function != 0)
5006 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5007 return temp;
5010 case PARM_DECL:
5011 if (DECL_RTL (exp) == 0)
5013 error_with_decl (exp, "prior parameter's size depends on `%s'");
5014 return CONST0_RTX (mode);
5017 /* ... fall through ... */
5019 case VAR_DECL:
5020 /* If a static var's type was incomplete when the decl was written,
5021 but the type is complete now, lay out the decl now. */
5022 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5023 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5025 push_obstacks_nochange ();
5026 end_temporary_allocation ();
5027 layout_decl (exp, 0);
5028 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5029 pop_obstacks ();
5032 /* Only check automatic variables. Currently, function arguments are
5033 not checked (this can be done at compile-time with prototypes).
5034 Aggregates are not checked. */
5035 if (flag_check_memory_usage && code == VAR_DECL
5036 && GET_CODE (DECL_RTL (exp)) == MEM
5037 && DECL_CONTEXT (exp) != NULL_TREE
5038 && ! TREE_STATIC (exp)
5039 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5041 enum memory_use_mode memory_usage;
5042 memory_usage = get_memory_usage_from_modifier (modifier);
5044 if (memory_usage != MEMORY_USE_DONT)
5045 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5046 XEXP (DECL_RTL (exp), 0), ptr_mode,
5047 GEN_INT (int_size_in_bytes (type)),
5048 TYPE_MODE (sizetype),
5049 GEN_INT (memory_usage),
5050 TYPE_MODE (integer_type_node));
5053 /* ... fall through ... */
5055 case FUNCTION_DECL:
5056 case RESULT_DECL:
5057 if (DECL_RTL (exp) == 0)
5058 abort ();
5060 /* Ensure variable marked as used even if it doesn't go through
5061 a parser. If it hasn't be used yet, write out an external
5062 definition. */
5063 if (! TREE_USED (exp))
5065 assemble_external (exp);
5066 TREE_USED (exp) = 1;
5069 /* Show we haven't gotten RTL for this yet. */
5070 temp = 0;
5072 /* Handle variables inherited from containing functions. */
5073 context = decl_function_context (exp);
5075 /* We treat inline_function_decl as an alias for the current function
5076 because that is the inline function whose vars, types, etc.
5077 are being merged into the current function.
5078 See expand_inline_function. */
5080 if (context != 0 && context != current_function_decl
5081 && context != inline_function_decl
5082 /* If var is static, we don't need a static chain to access it. */
5083 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5084 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5086 rtx addr;
5088 /* Mark as non-local and addressable. */
5089 DECL_NONLOCAL (exp) = 1;
5090 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5091 abort ();
5092 mark_addressable (exp);
5093 if (GET_CODE (DECL_RTL (exp)) != MEM)
5094 abort ();
5095 addr = XEXP (DECL_RTL (exp), 0);
5096 if (GET_CODE (addr) == MEM)
5097 addr = gen_rtx (MEM, Pmode,
5098 fix_lexical_addr (XEXP (addr, 0), exp));
5099 else
5100 addr = fix_lexical_addr (addr, exp);
5101 temp = change_address (DECL_RTL (exp), mode, addr);
5104 /* This is the case of an array whose size is to be determined
5105 from its initializer, while the initializer is still being parsed.
5106 See expand_decl. */
5108 else if (GET_CODE (DECL_RTL (exp)) == MEM
5109 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5110 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5111 XEXP (DECL_RTL (exp), 0));
5113 /* If DECL_RTL is memory, we are in the normal case and either
5114 the address is not valid or it is not a register and -fforce-addr
5115 is specified, get the address into a register. */
5117 else if (GET_CODE (DECL_RTL (exp)) == MEM
5118 && modifier != EXPAND_CONST_ADDRESS
5119 && modifier != EXPAND_SUM
5120 && modifier != EXPAND_INITIALIZER
5121 && (! memory_address_p (DECL_MODE (exp),
5122 XEXP (DECL_RTL (exp), 0))
5123 || (flag_force_addr
5124 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5125 temp = change_address (DECL_RTL (exp), VOIDmode,
5126 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5128 /* If we got something, return it. But first, set the alignment
5129 the address is a register. */
5130 if (temp != 0)
5132 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5133 mark_reg_pointer (XEXP (temp, 0),
5134 DECL_ALIGN (exp) / BITS_PER_UNIT);
5136 return temp;
5139 /* If the mode of DECL_RTL does not match that of the decl, it
5140 must be a promoted value. We return a SUBREG of the wanted mode,
5141 but mark it so that we know that it was already extended. */
5143 if (GET_CODE (DECL_RTL (exp)) == REG
5144 && GET_MODE (DECL_RTL (exp)) != mode)
5146 /* Get the signedness used for this variable. Ensure we get the
5147 same mode we got when the variable was declared. */
5148 if (GET_MODE (DECL_RTL (exp))
5149 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5150 abort ();
5152 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
5153 SUBREG_PROMOTED_VAR_P (temp) = 1;
5154 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5155 return temp;
5158 return DECL_RTL (exp);
5160 case INTEGER_CST:
5161 return immed_double_const (TREE_INT_CST_LOW (exp),
5162 TREE_INT_CST_HIGH (exp),
5163 mode);
5165 case CONST_DECL:
5166 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5167 EXPAND_MEMORY_USE_BAD);
5169 case REAL_CST:
5170 /* If optimized, generate immediate CONST_DOUBLE
5171 which will be turned into memory by reload if necessary.
5173 We used to force a register so that loop.c could see it. But
5174 this does not allow gen_* patterns to perform optimizations with
5175 the constants. It also produces two insns in cases like "x = 1.0;".
5176 On most machines, floating-point constants are not permitted in
5177 many insns, so we'd end up copying it to a register in any case.
5179 Now, we do the copying in expand_binop, if appropriate. */
5180 return immed_real_const (exp);
5182 case COMPLEX_CST:
5183 case STRING_CST:
5184 if (! TREE_CST_RTL (exp))
5185 output_constant_def (exp);
5187 /* TREE_CST_RTL probably contains a constant address.
5188 On RISC machines where a constant address isn't valid,
5189 make some insns to get that address into a register. */
5190 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5191 && modifier != EXPAND_CONST_ADDRESS
5192 && modifier != EXPAND_INITIALIZER
5193 && modifier != EXPAND_SUM
5194 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5195 || (flag_force_addr
5196 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5197 return change_address (TREE_CST_RTL (exp), VOIDmode,
5198 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5199 return TREE_CST_RTL (exp);
5201 case SAVE_EXPR:
5202 context = decl_function_context (exp);
5204 /* If this SAVE_EXPR was at global context, assume we are an
5205 initialization function and move it into our context. */
5206 if (context == 0)
5207 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5209 /* We treat inline_function_decl as an alias for the current function
5210 because that is the inline function whose vars, types, etc.
5211 are being merged into the current function.
5212 See expand_inline_function. */
5213 if (context == current_function_decl || context == inline_function_decl)
5214 context = 0;
5216 /* If this is non-local, handle it. */
5217 if (context)
5219 /* The following call just exists to abort if the context is
5220 not of a containing function. */
5221 find_function_data (context);
5223 temp = SAVE_EXPR_RTL (exp);
5224 if (temp && GET_CODE (temp) == REG)
5226 put_var_into_stack (exp);
5227 temp = SAVE_EXPR_RTL (exp);
5229 if (temp == 0 || GET_CODE (temp) != MEM)
5230 abort ();
5231 return change_address (temp, mode,
5232 fix_lexical_addr (XEXP (temp, 0), exp));
5234 if (SAVE_EXPR_RTL (exp) == 0)
5236 if (mode == VOIDmode)
5237 temp = const0_rtx;
5238 else
5239 temp = assign_temp (type, 0, 0, 0);
5241 SAVE_EXPR_RTL (exp) = temp;
5242 if (!optimize && GET_CODE (temp) == REG)
5243 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5244 save_expr_regs);
5246 /* If the mode of TEMP does not match that of the expression, it
5247 must be a promoted value. We pass store_expr a SUBREG of the
5248 wanted mode but mark it so that we know that it was already
5249 extended. Note that `unsignedp' was modified above in
5250 this case. */
5252 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5254 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5255 SUBREG_PROMOTED_VAR_P (temp) = 1;
5256 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5259 if (temp == const0_rtx)
5260 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5261 EXPAND_MEMORY_USE_BAD);
5262 else
5263 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5266 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5267 must be a promoted value. We return a SUBREG of the wanted mode,
5268 but mark it so that we know that it was already extended. */
5270 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5271 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5273 /* Compute the signedness and make the proper SUBREG. */
5274 promote_mode (type, mode, &unsignedp, 0);
5275 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5276 SUBREG_PROMOTED_VAR_P (temp) = 1;
5277 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5278 return temp;
5281 return SAVE_EXPR_RTL (exp);
5283 case UNSAVE_EXPR:
5285 rtx temp;
5286 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5287 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5288 return temp;
5291 case PLACEHOLDER_EXPR:
5293 tree placeholder_expr;
5295 /* If there is an object on the head of the placeholder list,
5296 see if some object in it's references is of type TYPE. For
5297 further information, see tree.def. */
5298 for (placeholder_expr = placeholder_list;
5299 placeholder_expr != 0;
5300 placeholder_expr = TREE_CHAIN (placeholder_expr))
5302 tree need_type = TYPE_MAIN_VARIANT (type);
5303 tree object = 0;
5304 tree old_list = placeholder_list;
5305 tree elt;
5307 /* See if the object is the type that we want. */
5308 if ((TYPE_MAIN_VARIANT (TREE_TYPE
5309 (TREE_PURPOSE (placeholder_expr)))
5310 == need_type))
5311 object = TREE_PURPOSE (placeholder_expr);
5313 /* Find the innermost reference that is of the type we want. */
5314 for (elt = TREE_PURPOSE (placeholder_expr);
5315 elt != 0
5316 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5317 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5318 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5319 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5320 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5321 || TREE_CODE (elt) == COND_EXPR)
5322 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5323 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5324 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5325 == need_type))
5327 object = TREE_OPERAND (elt, 0);
5328 break;
5331 if (object != 0)
5333 /* Expand this object skipping the list entries before
5334 it was found in case it is also a PLACEHOLDER_EXPR.
5335 In that case, we want to translate it using subsequent
5336 entries. */
5337 placeholder_list = TREE_CHAIN (placeholder_expr);
5338 temp = expand_expr (object, original_target, tmode,
5339 ro_modifier);
5340 placeholder_list = old_list;
5341 return temp;
5346 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5347 abort ();
5349 case WITH_RECORD_EXPR:
5350 /* Put the object on the placeholder list, expand our first operand,
5351 and pop the list. */
5352 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5353 placeholder_list);
5354 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5355 tmode, ro_modifier);
5356 placeholder_list = TREE_CHAIN (placeholder_list);
5357 return target;
5359 case EXIT_EXPR:
5360 expand_exit_loop_if_false (NULL_PTR,
5361 invert_truthvalue (TREE_OPERAND (exp, 0)));
5362 return const0_rtx;
5364 case LOOP_EXPR:
5365 push_temp_slots ();
5366 expand_start_loop (1);
5367 expand_expr_stmt (TREE_OPERAND (exp, 0));
5368 expand_end_loop ();
5369 pop_temp_slots ();
5371 return const0_rtx;
5373 case BIND_EXPR:
5375 tree vars = TREE_OPERAND (exp, 0);
5376 int vars_need_expansion = 0;
5378 /* Need to open a binding contour here because
5379 if there are any cleanups they must be contained here. */
5380 expand_start_bindings (0);
5382 /* Mark the corresponding BLOCK for output in its proper place. */
5383 if (TREE_OPERAND (exp, 2) != 0
5384 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5385 insert_block (TREE_OPERAND (exp, 2));
5387 /* If VARS have not yet been expanded, expand them now. */
5388 while (vars)
5390 if (DECL_RTL (vars) == 0)
5392 vars_need_expansion = 1;
5393 expand_decl (vars);
5395 expand_decl_init (vars);
5396 vars = TREE_CHAIN (vars);
5399 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5401 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5403 return temp;
5406 case RTL_EXPR:
5407 if (RTL_EXPR_SEQUENCE (exp))
5409 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5410 abort ();
5411 emit_insns (RTL_EXPR_SEQUENCE (exp));
5412 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5414 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5415 free_temps_for_rtl_expr (exp);
5416 return RTL_EXPR_RTL (exp);
5418 case CONSTRUCTOR:
5419 /* If we don't need the result, just ensure we evaluate any
5420 subexpressions. */
5421 if (ignore)
5423 tree elt;
5424 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5425 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5426 EXPAND_MEMORY_USE_BAD);
5427 return const0_rtx;
5430 /* All elts simple constants => refer to a constant in memory. But
5431 if this is a non-BLKmode mode, let it store a field at a time
5432 since that should make a CONST_INT or CONST_DOUBLE when we
5433 fold. Likewise, if we have a target we can use, it is best to
5434 store directly into the target unless the type is large enough
5435 that memcpy will be used. If we are making an initializer and
5436 all operands are constant, put it in memory as well. */
5437 else if ((TREE_STATIC (exp)
5438 && ((mode == BLKmode
5439 && ! (target != 0 && safe_from_p (target, exp)))
5440 || TREE_ADDRESSABLE (exp)
5441 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5442 && (move_by_pieces_ninsns
5443 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5444 TYPE_ALIGN (type) / BITS_PER_UNIT)
5445 > MOVE_RATIO)
5446 && ! mostly_zeros_p (exp))))
5447 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5449 rtx constructor = output_constant_def (exp);
5450 if (modifier != EXPAND_CONST_ADDRESS
5451 && modifier != EXPAND_INITIALIZER
5452 && modifier != EXPAND_SUM
5453 && (! memory_address_p (GET_MODE (constructor),
5454 XEXP (constructor, 0))
5455 || (flag_force_addr
5456 && GET_CODE (XEXP (constructor, 0)) != REG)))
5457 constructor = change_address (constructor, VOIDmode,
5458 XEXP (constructor, 0));
5459 return constructor;
5462 else
5464 /* Handle calls that pass values in multiple non-contiguous
5465 locations. The Irix 6 ABI has examples of this. */
5466 if (target == 0 || ! safe_from_p (target, exp)
5467 || GET_CODE (target) == PARALLEL)
5469 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5470 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5471 else
5472 target = assign_temp (type, 0, 1, 1);
5475 if (TREE_READONLY (exp))
5477 if (GET_CODE (target) == MEM)
5478 target = copy_rtx (target);
5480 RTX_UNCHANGING_P (target) = 1;
5483 store_constructor (exp, target, 0);
5484 return target;
5487 case INDIRECT_REF:
5489 tree exp1 = TREE_OPERAND (exp, 0);
5490 tree exp2;
5492 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5493 op0 = memory_address (mode, op0);
5495 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5497 enum memory_use_mode memory_usage;
5498 memory_usage = get_memory_usage_from_modifier (modifier);
5500 if (memory_usage != MEMORY_USE_DONT)
5501 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5502 op0, ptr_mode,
5503 GEN_INT (int_size_in_bytes (type)),
5504 TYPE_MODE (sizetype),
5505 GEN_INT (memory_usage),
5506 TYPE_MODE (integer_type_node));
5509 temp = gen_rtx (MEM, mode, op0);
5510 /* If address was computed by addition,
5511 mark this as an element of an aggregate. */
5512 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5513 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5514 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5515 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5516 || (TREE_CODE (exp1) == ADDR_EXPR
5517 && (exp2 = TREE_OPERAND (exp1, 0))
5518 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5519 MEM_IN_STRUCT_P (temp) = 1;
5520 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5522 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5523 here, because, in C and C++, the fact that a location is accessed
5524 through a pointer to const does not mean that the value there can
5525 never change. Languages where it can never change should
5526 also set TREE_STATIC. */
5527 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5528 return temp;
5531 case ARRAY_REF:
5532 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5533 abort ();
5536 tree array = TREE_OPERAND (exp, 0);
5537 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5538 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5539 tree index = TREE_OPERAND (exp, 1);
5540 tree index_type = TREE_TYPE (index);
5541 HOST_WIDE_INT i;
5543 /* Optimize the special-case of a zero lower bound.
5545 We convert the low_bound to sizetype to avoid some problems
5546 with constant folding. (E.g. suppose the lower bound is 1,
5547 and its mode is QI. Without the conversion, (ARRAY
5548 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5549 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5551 But sizetype isn't quite right either (especially if
5552 the lowbound is negative). FIXME */
5554 if (! integer_zerop (low_bound))
5555 index = fold (build (MINUS_EXPR, index_type, index,
5556 convert (sizetype, low_bound)));
5558 /* Fold an expression like: "foo"[2].
5559 This is not done in fold so it won't happen inside &.
5560 Don't fold if this is for wide characters since it's too
5561 difficult to do correctly and this is a very rare case. */
5563 if (TREE_CODE (array) == STRING_CST
5564 && TREE_CODE (index) == INTEGER_CST
5565 && !TREE_INT_CST_HIGH (index)
5566 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5567 && GET_MODE_CLASS (mode) == MODE_INT
5568 && GET_MODE_SIZE (mode) == 1)
5569 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5571 /* If this is a constant index into a constant array,
5572 just get the value from the array. Handle both the cases when
5573 we have an explicit constructor and when our operand is a variable
5574 that was declared const. */
5576 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5578 if (TREE_CODE (index) == INTEGER_CST
5579 && TREE_INT_CST_HIGH (index) == 0)
5581 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5583 i = TREE_INT_CST_LOW (index);
5584 while (elem && i--)
5585 elem = TREE_CHAIN (elem);
5586 if (elem)
5587 return expand_expr (fold (TREE_VALUE (elem)), target,
5588 tmode, ro_modifier);
5592 else if (optimize >= 1
5593 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5594 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5595 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5597 if (TREE_CODE (index) == INTEGER_CST)
5599 tree init = DECL_INITIAL (array);
5601 i = TREE_INT_CST_LOW (index);
5602 if (TREE_CODE (init) == CONSTRUCTOR)
5604 tree elem = CONSTRUCTOR_ELTS (init);
5606 while (elem
5607 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5608 elem = TREE_CHAIN (elem);
5609 if (elem)
5610 return expand_expr (fold (TREE_VALUE (elem)), target,
5611 tmode, ro_modifier);
5613 else if (TREE_CODE (init) == STRING_CST
5614 && TREE_INT_CST_HIGH (index) == 0
5615 && (TREE_INT_CST_LOW (index)
5616 < TREE_STRING_LENGTH (init)))
5617 return (GEN_INT
5618 (TREE_STRING_POINTER
5619 (init)[TREE_INT_CST_LOW (index)]));
5624 /* ... fall through ... */
5626 case COMPONENT_REF:
5627 case BIT_FIELD_REF:
5628 /* If the operand is a CONSTRUCTOR, we can just extract the
5629 appropriate field if it is present. Don't do this if we have
5630 already written the data since we want to refer to that copy
5631 and varasm.c assumes that's what we'll do. */
5632 if (code != ARRAY_REF
5633 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5634 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5636 tree elt;
5638 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5639 elt = TREE_CHAIN (elt))
5640 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5641 /* We can normally use the value of the field in the
5642 CONSTRUCTOR. However, if this is a bitfield in
5643 an integral mode that we can fit in a HOST_WIDE_INT,
5644 we must mask only the number of bits in the bitfield,
5645 since this is done implicitly by the constructor. If
5646 the bitfield does not meet either of those conditions,
5647 we can't do this optimization. */
5648 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5649 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5650 == MODE_INT)
5651 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5652 <= HOST_BITS_PER_WIDE_INT))))
5654 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5655 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5657 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5658 enum machine_mode imode
5659 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5661 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5663 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5664 op0 = expand_and (op0, op1, target);
5666 else
5668 tree count
5669 = build_int_2 (imode - bitsize, 0);
5671 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5672 target, 0);
5673 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5674 target, 0);
5678 return op0;
5683 enum machine_mode mode1;
5684 int bitsize;
5685 int bitpos;
5686 tree offset;
5687 int volatilep = 0;
5688 int alignment;
5689 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5690 &mode1, &unsignedp, &volatilep,
5691 &alignment);
5693 /* If we got back the original object, something is wrong. Perhaps
5694 we are evaluating an expression too early. In any event, don't
5695 infinitely recurse. */
5696 if (tem == exp)
5697 abort ();
5699 /* If TEM's type is a union of variable size, pass TARGET to the inner
5700 computation, since it will need a temporary and TARGET is known
5701 to have to do. This occurs in unchecked conversion in Ada. */
5703 op0 = expand_expr (tem,
5704 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5705 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5706 != INTEGER_CST)
5707 ? target : NULL_RTX),
5708 VOIDmode,
5709 modifier == EXPAND_INITIALIZER ? modifier : 0);
5711 /* If this is a constant, put it into a register if it is a
5712 legitimate constant and memory if it isn't. */
5713 if (CONSTANT_P (op0))
5715 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5716 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5717 op0 = force_reg (mode, op0);
5718 else
5719 op0 = validize_mem (force_const_mem (mode, op0));
5722 if (offset != 0)
5724 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5726 if (GET_CODE (op0) != MEM)
5727 abort ();
5728 op0 = change_address (op0, VOIDmode,
5729 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5730 force_reg (ptr_mode, offset_rtx)));
5733 /* Don't forget about volatility even if this is a bitfield. */
5734 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5736 op0 = copy_rtx (op0);
5737 MEM_VOLATILE_P (op0) = 1;
5740 /* Check the access. */
5741 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5743 enum memory_use_mode memory_usage;
5744 memory_usage = get_memory_usage_from_modifier (modifier);
5746 if (memory_usage != MEMORY_USE_DONT)
5748 rtx to;
5749 int size;
5751 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5752 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5754 /* Check the access right of the pointer. */
5755 if (size > BITS_PER_UNIT)
5756 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5757 to, ptr_mode,
5758 GEN_INT (size / BITS_PER_UNIT),
5759 TYPE_MODE (sizetype),
5760 GEN_INT (memory_usage),
5761 TYPE_MODE (integer_type_node));
5765 /* In cases where an aligned union has an unaligned object
5766 as a field, we might be extracting a BLKmode value from
5767 an integer-mode (e.g., SImode) object. Handle this case
5768 by doing the extract into an object as wide as the field
5769 (which we know to be the width of a basic mode), then
5770 storing into memory, and changing the mode to BLKmode.
5771 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5772 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5773 if (mode1 == VOIDmode
5774 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5775 || (modifier != EXPAND_CONST_ADDRESS
5776 && modifier != EXPAND_INITIALIZER
5777 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5778 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5779 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5780 /* If the field isn't aligned enough to fetch as a memref,
5781 fetch it as a bit field. */
5782 || (SLOW_UNALIGNED_ACCESS
5783 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5784 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5786 enum machine_mode ext_mode = mode;
5788 if (ext_mode == BLKmode)
5789 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5791 if (ext_mode == BLKmode)
5793 /* In this case, BITPOS must start at a byte boundary and
5794 TARGET, if specified, must be a MEM. */
5795 if (GET_CODE (op0) != MEM
5796 || (target != 0 && GET_CODE (target) != MEM)
5797 || bitpos % BITS_PER_UNIT != 0)
5798 abort ();
5800 op0 = change_address (op0, VOIDmode,
5801 plus_constant (XEXP (op0, 0),
5802 bitpos / BITS_PER_UNIT));
5803 if (target == 0)
5804 target = assign_temp (type, 0, 1, 1);
5806 emit_block_move (target, op0,
5807 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5808 / BITS_PER_UNIT),
5811 return target;
5814 op0 = validize_mem (op0);
5816 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5817 mark_reg_pointer (XEXP (op0, 0), alignment);
5819 op0 = extract_bit_field (op0, bitsize, bitpos,
5820 unsignedp, target, ext_mode, ext_mode,
5821 alignment,
5822 int_size_in_bytes (TREE_TYPE (tem)));
5824 /* If the result is a record type and BITSIZE is narrower than
5825 the mode of OP0, an integral mode, and this is a big endian
5826 machine, we must put the field into the high-order bits. */
5827 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5828 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5829 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5830 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5831 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5832 - bitsize),
5833 op0, 1);
5835 if (mode == BLKmode)
5837 rtx new = assign_stack_temp (ext_mode,
5838 bitsize / BITS_PER_UNIT, 0);
5840 emit_move_insn (new, op0);
5841 op0 = copy_rtx (new);
5842 PUT_MODE (op0, BLKmode);
5843 MEM_IN_STRUCT_P (op0) = 1;
5846 return op0;
5849 /* If the result is BLKmode, use that to access the object
5850 now as well. */
5851 if (mode == BLKmode)
5852 mode1 = BLKmode;
5854 /* Get a reference to just this component. */
5855 if (modifier == EXPAND_CONST_ADDRESS
5856 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5857 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5858 (bitpos / BITS_PER_UNIT)));
5859 else
5860 op0 = change_address (op0, mode1,
5861 plus_constant (XEXP (op0, 0),
5862 (bitpos / BITS_PER_UNIT)));
5863 if (GET_CODE (XEXP (op0, 0)) == REG)
5864 mark_reg_pointer (XEXP (op0, 0), alignment);
5866 MEM_IN_STRUCT_P (op0) = 1;
5867 MEM_VOLATILE_P (op0) |= volatilep;
5868 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5869 || modifier == EXPAND_CONST_ADDRESS
5870 || modifier == EXPAND_INITIALIZER)
5871 return op0;
5872 else if (target == 0)
5873 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5875 convert_move (target, op0, unsignedp);
5876 return target;
5879 /* Intended for a reference to a buffer of a file-object in Pascal.
5880 But it's not certain that a special tree code will really be
5881 necessary for these. INDIRECT_REF might work for them. */
5882 case BUFFER_REF:
5883 abort ();
5885 case IN_EXPR:
5887 /* Pascal set IN expression.
5889 Algorithm:
5890 rlo = set_low - (set_low%bits_per_word);
5891 the_word = set [ (index - rlo)/bits_per_word ];
5892 bit_index = index % bits_per_word;
5893 bitmask = 1 << bit_index;
5894 return !!(the_word & bitmask); */
5896 tree set = TREE_OPERAND (exp, 0);
5897 tree index = TREE_OPERAND (exp, 1);
5898 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5899 tree set_type = TREE_TYPE (set);
5900 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5901 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5902 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5903 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5904 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5905 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5906 rtx setaddr = XEXP (setval, 0);
5907 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5908 rtx rlow;
5909 rtx diff, quo, rem, addr, bit, result;
5911 preexpand_calls (exp);
5913 /* If domain is empty, answer is no. Likewise if index is constant
5914 and out of bounds. */
5915 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5916 && TREE_CODE (set_low_bound) == INTEGER_CST
5917 && tree_int_cst_lt (set_high_bound, set_low_bound)
5918 || (TREE_CODE (index) == INTEGER_CST
5919 && TREE_CODE (set_low_bound) == INTEGER_CST
5920 && tree_int_cst_lt (index, set_low_bound))
5921 || (TREE_CODE (set_high_bound) == INTEGER_CST
5922 && TREE_CODE (index) == INTEGER_CST
5923 && tree_int_cst_lt (set_high_bound, index))))
5924 return const0_rtx;
5926 if (target == 0)
5927 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5929 /* If we get here, we have to generate the code for both cases
5930 (in range and out of range). */
5932 op0 = gen_label_rtx ();
5933 op1 = gen_label_rtx ();
5935 if (! (GET_CODE (index_val) == CONST_INT
5936 && GET_CODE (lo_r) == CONST_INT))
5938 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5939 GET_MODE (index_val), iunsignedp, 0);
5940 emit_jump_insn (gen_blt (op1));
5943 if (! (GET_CODE (index_val) == CONST_INT
5944 && GET_CODE (hi_r) == CONST_INT))
5946 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5947 GET_MODE (index_val), iunsignedp, 0);
5948 emit_jump_insn (gen_bgt (op1));
5951 /* Calculate the element number of bit zero in the first word
5952 of the set. */
5953 if (GET_CODE (lo_r) == CONST_INT)
5954 rlow = GEN_INT (INTVAL (lo_r)
5955 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5956 else
5957 rlow = expand_binop (index_mode, and_optab, lo_r,
5958 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5959 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5961 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5962 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5964 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5965 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5966 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5967 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5969 addr = memory_address (byte_mode,
5970 expand_binop (index_mode, add_optab, diff,
5971 setaddr, NULL_RTX, iunsignedp,
5972 OPTAB_LIB_WIDEN));
5974 /* Extract the bit we want to examine */
5975 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5976 gen_rtx (MEM, byte_mode, addr),
5977 make_tree (TREE_TYPE (index), rem),
5978 NULL_RTX, 1);
5979 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5980 GET_MODE (target) == byte_mode ? target : 0,
5981 1, OPTAB_LIB_WIDEN);
5983 if (result != target)
5984 convert_move (target, result, 1);
5986 /* Output the code to handle the out-of-range case. */
5987 emit_jump (op0);
5988 emit_label (op1);
5989 emit_move_insn (target, const0_rtx);
5990 emit_label (op0);
5991 return target;
5994 case WITH_CLEANUP_EXPR:
5995 if (RTL_EXPR_RTL (exp) == 0)
5997 RTL_EXPR_RTL (exp)
5998 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5999 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6001 /* That's it for this cleanup. */
6002 TREE_OPERAND (exp, 2) = 0;
6004 return RTL_EXPR_RTL (exp);
6006 case CLEANUP_POINT_EXPR:
6008 extern int temp_slot_level;
6009 /* Start a new binding layer that will keep track of all cleanup
6010 actions to be performed. */
6011 expand_start_bindings (0);
6013 target_temp_slot_level = temp_slot_level;
6015 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6016 /* If we're going to use this value, load it up now. */
6017 if (! ignore)
6018 op0 = force_not_mem (op0);
6019 preserve_temp_slots (op0);
6020 expand_end_bindings (NULL_TREE, 0, 0);
6022 return op0;
6024 case CALL_EXPR:
6025 /* Check for a built-in function. */
6026 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6027 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6028 == FUNCTION_DECL)
6029 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6030 return expand_builtin (exp, target, subtarget, tmode, ignore);
6032 /* If this call was expanded already by preexpand_calls,
6033 just return the result we got. */
6034 if (CALL_EXPR_RTL (exp) != 0)
6035 return CALL_EXPR_RTL (exp);
6037 return expand_call (exp, target, ignore);
6039 case NON_LVALUE_EXPR:
6040 case NOP_EXPR:
6041 case CONVERT_EXPR:
6042 case REFERENCE_EXPR:
6043 if (TREE_CODE (type) == UNION_TYPE)
6045 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6046 if (target == 0)
6048 if (mode != BLKmode)
6049 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6050 else
6051 target = assign_temp (type, 0, 1, 1);
6054 if (GET_CODE (target) == MEM)
6055 /* Store data into beginning of memory target. */
6056 store_expr (TREE_OPERAND (exp, 0),
6057 change_address (target, TYPE_MODE (valtype), 0), 0);
6059 else if (GET_CODE (target) == REG)
6060 /* Store this field into a union of the proper type. */
6061 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6062 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6063 VOIDmode, 0, 1,
6064 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6065 else
6066 abort ();
6068 /* Return the entire union. */
6069 return target;
6072 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6074 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6075 ro_modifier);
6077 /* If the signedness of the conversion differs and OP0 is
6078 a promoted SUBREG, clear that indication since we now
6079 have to do the proper extension. */
6080 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6081 && GET_CODE (op0) == SUBREG)
6082 SUBREG_PROMOTED_VAR_P (op0) = 0;
6084 return op0;
6087 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6088 if (GET_MODE (op0) == mode)
6089 return op0;
6091 /* If OP0 is a constant, just convert it into the proper mode. */
6092 if (CONSTANT_P (op0))
6093 return
6094 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6095 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6097 if (modifier == EXPAND_INITIALIZER)
6098 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6100 if (target == 0)
6101 return
6102 convert_to_mode (mode, op0,
6103 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6104 else
6105 convert_move (target, op0,
6106 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6107 return target;
6109 case PLUS_EXPR:
6110 /* We come here from MINUS_EXPR when the second operand is a
6111 constant. */
6112 plus_expr:
6113 this_optab = add_optab;
6115 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6116 something else, make sure we add the register to the constant and
6117 then to the other thing. This case can occur during strength
6118 reduction and doing it this way will produce better code if the
6119 frame pointer or argument pointer is eliminated.
6121 fold-const.c will ensure that the constant is always in the inner
6122 PLUS_EXPR, so the only case we need to do anything about is if
6123 sp, ap, or fp is our second argument, in which case we must swap
6124 the innermost first argument and our second argument. */
6126 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6127 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6128 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6129 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6130 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6131 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6133 tree t = TREE_OPERAND (exp, 1);
6135 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6136 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6139 /* If the result is to be ptr_mode and we are adding an integer to
6140 something, we might be forming a constant. So try to use
6141 plus_constant. If it produces a sum and we can't accept it,
6142 use force_operand. This allows P = &ARR[const] to generate
6143 efficient code on machines where a SYMBOL_REF is not a valid
6144 address.
6146 If this is an EXPAND_SUM call, always return the sum. */
6147 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6148 || mode == ptr_mode)
6150 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6151 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6152 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6154 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6155 EXPAND_SUM);
6156 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6157 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6158 op1 = force_operand (op1, target);
6159 return op1;
6162 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6163 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6164 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6166 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6167 EXPAND_SUM);
6168 if (! CONSTANT_P (op0))
6170 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6171 VOIDmode, modifier);
6172 /* Don't go to both_summands if modifier
6173 says it's not right to return a PLUS. */
6174 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6175 goto binop2;
6176 goto both_summands;
6178 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6179 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6180 op0 = force_operand (op0, target);
6181 return op0;
6185 /* No sense saving up arithmetic to be done
6186 if it's all in the wrong mode to form part of an address.
6187 And force_operand won't know whether to sign-extend or
6188 zero-extend. */
6189 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6190 || mode != ptr_mode)
6191 goto binop;
6193 preexpand_calls (exp);
6194 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6195 subtarget = 0;
6197 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6198 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6200 both_summands:
6201 /* Make sure any term that's a sum with a constant comes last. */
6202 if (GET_CODE (op0) == PLUS
6203 && CONSTANT_P (XEXP (op0, 1)))
6205 temp = op0;
6206 op0 = op1;
6207 op1 = temp;
6209 /* If adding to a sum including a constant,
6210 associate it to put the constant outside. */
6211 if (GET_CODE (op1) == PLUS
6212 && CONSTANT_P (XEXP (op1, 1)))
6214 rtx constant_term = const0_rtx;
6216 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6217 if (temp != 0)
6218 op0 = temp;
6219 /* Ensure that MULT comes first if there is one. */
6220 else if (GET_CODE (op0) == MULT)
6221 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6222 else
6223 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6225 /* Let's also eliminate constants from op0 if possible. */
6226 op0 = eliminate_constant_term (op0, &constant_term);
6228 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6229 their sum should be a constant. Form it into OP1, since the
6230 result we want will then be OP0 + OP1. */
6232 temp = simplify_binary_operation (PLUS, mode, constant_term,
6233 XEXP (op1, 1));
6234 if (temp != 0)
6235 op1 = temp;
6236 else
6237 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6240 /* Put a constant term last and put a multiplication first. */
6241 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6242 temp = op1, op1 = op0, op0 = temp;
6244 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6245 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6247 case MINUS_EXPR:
6248 /* For initializers, we are allowed to return a MINUS of two
6249 symbolic constants. Here we handle all cases when both operands
6250 are constant. */
6251 /* Handle difference of two symbolic constants,
6252 for the sake of an initializer. */
6253 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6254 && really_constant_p (TREE_OPERAND (exp, 0))
6255 && really_constant_p (TREE_OPERAND (exp, 1)))
6257 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6258 VOIDmode, ro_modifier);
6259 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6260 VOIDmode, ro_modifier);
6262 /* If the last operand is a CONST_INT, use plus_constant of
6263 the negated constant. Else make the MINUS. */
6264 if (GET_CODE (op1) == CONST_INT)
6265 return plus_constant (op0, - INTVAL (op1));
6266 else
6267 return gen_rtx (MINUS, mode, op0, op1);
6269 /* Convert A - const to A + (-const). */
6270 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6272 tree negated = fold (build1 (NEGATE_EXPR, type,
6273 TREE_OPERAND (exp, 1)));
6275 /* Deal with the case where we can't negate the constant
6276 in TYPE. */
6277 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6279 tree newtype = signed_type (type);
6280 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6281 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6282 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6284 if (! TREE_OVERFLOW (newneg))
6285 return expand_expr (convert (type,
6286 build (PLUS_EXPR, newtype,
6287 newop0, newneg)),
6288 target, tmode, ro_modifier);
6290 else
6292 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6293 goto plus_expr;
6296 this_optab = sub_optab;
6297 goto binop;
6299 case MULT_EXPR:
6300 preexpand_calls (exp);
6301 /* If first operand is constant, swap them.
6302 Thus the following special case checks need only
6303 check the second operand. */
6304 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6306 register tree t1 = TREE_OPERAND (exp, 0);
6307 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6308 TREE_OPERAND (exp, 1) = t1;
6311 /* Attempt to return something suitable for generating an
6312 indexed address, for machines that support that. */
6314 if (modifier == EXPAND_SUM && mode == ptr_mode
6315 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6318 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6319 EXPAND_SUM);
6321 /* Apply distributive law if OP0 is x+c. */
6322 if (GET_CODE (op0) == PLUS
6323 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6324 return gen_rtx (PLUS, mode,
6325 gen_rtx (MULT, mode, XEXP (op0, 0),
6326 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6327 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6328 * INTVAL (XEXP (op0, 1))));
6330 if (GET_CODE (op0) != REG)
6331 op0 = force_operand (op0, NULL_RTX);
6332 if (GET_CODE (op0) != REG)
6333 op0 = copy_to_mode_reg (mode, op0);
6335 return gen_rtx (MULT, mode, op0,
6336 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6339 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6340 subtarget = 0;
6342 /* Check for multiplying things that have been extended
6343 from a narrower type. If this machine supports multiplying
6344 in that narrower type with a result in the desired type,
6345 do it that way, and avoid the explicit type-conversion. */
6346 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6347 && TREE_CODE (type) == INTEGER_TYPE
6348 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6349 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6350 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6351 && int_fits_type_p (TREE_OPERAND (exp, 1),
6352 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6353 /* Don't use a widening multiply if a shift will do. */
6354 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6355 > HOST_BITS_PER_WIDE_INT)
6356 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6358 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6359 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6361 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6362 /* If both operands are extended, they must either both
6363 be zero-extended or both be sign-extended. */
6364 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6366 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6368 enum machine_mode innermode
6369 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6370 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6371 ? smul_widen_optab : umul_widen_optab);
6372 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6373 ? umul_widen_optab : smul_widen_optab);
6374 if (mode == GET_MODE_WIDER_MODE (innermode))
6376 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6378 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6379 NULL_RTX, VOIDmode, 0);
6380 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6381 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6382 VOIDmode, 0);
6383 else
6384 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6385 NULL_RTX, VOIDmode, 0);
6386 goto binop2;
6388 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6389 && innermode == word_mode)
6391 rtx htem;
6392 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6393 NULL_RTX, VOIDmode, 0);
6394 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6395 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6396 VOIDmode, 0);
6397 else
6398 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6399 NULL_RTX, VOIDmode, 0);
6400 temp = expand_binop (mode, other_optab, op0, op1, target,
6401 unsignedp, OPTAB_LIB_WIDEN);
6402 htem = expand_mult_highpart_adjust (innermode,
6403 gen_highpart (innermode, temp),
6404 op0, op1,
6405 gen_highpart (innermode, temp),
6406 unsignedp);
6407 emit_move_insn (gen_highpart (innermode, temp), htem);
6408 return temp;
6412 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6413 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6414 return expand_mult (mode, op0, op1, target, unsignedp);
6416 case TRUNC_DIV_EXPR:
6417 case FLOOR_DIV_EXPR:
6418 case CEIL_DIV_EXPR:
6419 case ROUND_DIV_EXPR:
6420 case EXACT_DIV_EXPR:
6421 preexpand_calls (exp);
6422 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6423 subtarget = 0;
6424 /* Possible optimization: compute the dividend with EXPAND_SUM
6425 then if the divisor is constant can optimize the case
6426 where some terms of the dividend have coeffs divisible by it. */
6427 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6428 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6429 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6431 case RDIV_EXPR:
6432 this_optab = flodiv_optab;
6433 goto binop;
6435 case TRUNC_MOD_EXPR:
6436 case FLOOR_MOD_EXPR:
6437 case CEIL_MOD_EXPR:
6438 case ROUND_MOD_EXPR:
6439 preexpand_calls (exp);
6440 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6441 subtarget = 0;
6442 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6443 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6444 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6446 case FIX_ROUND_EXPR:
6447 case FIX_FLOOR_EXPR:
6448 case FIX_CEIL_EXPR:
6449 abort (); /* Not used for C. */
6451 case FIX_TRUNC_EXPR:
6452 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6453 if (target == 0)
6454 target = gen_reg_rtx (mode);
6455 expand_fix (target, op0, unsignedp);
6456 return target;
6458 case FLOAT_EXPR:
6459 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6460 if (target == 0)
6461 target = gen_reg_rtx (mode);
6462 /* expand_float can't figure out what to do if FROM has VOIDmode.
6463 So give it the correct mode. With -O, cse will optimize this. */
6464 if (GET_MODE (op0) == VOIDmode)
6465 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6466 op0);
6467 expand_float (target, op0,
6468 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6469 return target;
6471 case NEGATE_EXPR:
6472 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6473 temp = expand_unop (mode, neg_optab, op0, target, 0);
6474 if (temp == 0)
6475 abort ();
6476 return temp;
6478 case ABS_EXPR:
6479 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6481 /* Handle complex values specially. */
6482 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6483 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6484 return expand_complex_abs (mode, op0, target, unsignedp);
6486 /* Unsigned abs is simply the operand. Testing here means we don't
6487 risk generating incorrect code below. */
6488 if (TREE_UNSIGNED (type))
6489 return op0;
6491 return expand_abs (mode, op0, target, unsignedp,
6492 safe_from_p (target, TREE_OPERAND (exp, 0)));
6494 case MAX_EXPR:
6495 case MIN_EXPR:
6496 target = original_target;
6497 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6498 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6499 || GET_MODE (target) != mode
6500 || (GET_CODE (target) == REG
6501 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6502 target = gen_reg_rtx (mode);
6503 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6504 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6506 /* First try to do it with a special MIN or MAX instruction.
6507 If that does not win, use a conditional jump to select the proper
6508 value. */
6509 this_optab = (TREE_UNSIGNED (type)
6510 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6511 : (code == MIN_EXPR ? smin_optab : smax_optab));
6513 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6514 OPTAB_WIDEN);
6515 if (temp != 0)
6516 return temp;
6518 /* At this point, a MEM target is no longer useful; we will get better
6519 code without it. */
6521 if (GET_CODE (target) == MEM)
6522 target = gen_reg_rtx (mode);
6524 if (target != op0)
6525 emit_move_insn (target, op0);
6527 op0 = gen_label_rtx ();
6529 /* If this mode is an integer too wide to compare properly,
6530 compare word by word. Rely on cse to optimize constant cases. */
6531 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6533 if (code == MAX_EXPR)
6534 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6535 target, op1, NULL_RTX, op0);
6536 else
6537 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6538 op1, target, NULL_RTX, op0);
6539 emit_move_insn (target, op1);
6541 else
6543 if (code == MAX_EXPR)
6544 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6545 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6546 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6547 else
6548 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6549 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6550 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6551 if (temp == const0_rtx)
6552 emit_move_insn (target, op1);
6553 else if (temp != const_true_rtx)
6555 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6556 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6557 else
6558 abort ();
6559 emit_move_insn (target, op1);
6562 emit_label (op0);
6563 return target;
6565 case BIT_NOT_EXPR:
6566 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6567 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6568 if (temp == 0)
6569 abort ();
6570 return temp;
6572 case FFS_EXPR:
6573 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6574 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6575 if (temp == 0)
6576 abort ();
6577 return temp;
6579 /* ??? Can optimize bitwise operations with one arg constant.
6580 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6581 and (a bitwise1 b) bitwise2 b (etc)
6582 but that is probably not worth while. */
6584 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6585 boolean values when we want in all cases to compute both of them. In
6586 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6587 as actual zero-or-1 values and then bitwise anding. In cases where
6588 there cannot be any side effects, better code would be made by
6589 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6590 how to recognize those cases. */
6592 case TRUTH_AND_EXPR:
6593 case BIT_AND_EXPR:
6594 this_optab = and_optab;
6595 goto binop;
6597 case TRUTH_OR_EXPR:
6598 case BIT_IOR_EXPR:
6599 this_optab = ior_optab;
6600 goto binop;
6602 case TRUTH_XOR_EXPR:
6603 case BIT_XOR_EXPR:
6604 this_optab = xor_optab;
6605 goto binop;
6607 case LSHIFT_EXPR:
6608 case RSHIFT_EXPR:
6609 case LROTATE_EXPR:
6610 case RROTATE_EXPR:
6611 preexpand_calls (exp);
6612 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6613 subtarget = 0;
6614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6615 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6616 unsignedp);
6618 /* Could determine the answer when only additive constants differ. Also,
6619 the addition of one can be handled by changing the condition. */
6620 case LT_EXPR:
6621 case LE_EXPR:
6622 case GT_EXPR:
6623 case GE_EXPR:
6624 case EQ_EXPR:
6625 case NE_EXPR:
6626 preexpand_calls (exp);
6627 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6628 if (temp != 0)
6629 return temp;
6631 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6632 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6633 && original_target
6634 && GET_CODE (original_target) == REG
6635 && (GET_MODE (original_target)
6636 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6638 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6639 VOIDmode, 0);
6641 if (temp != original_target)
6642 temp = copy_to_reg (temp);
6644 op1 = gen_label_rtx ();
6645 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6646 GET_MODE (temp), unsignedp, 0);
6647 emit_jump_insn (gen_beq (op1));
6648 emit_move_insn (temp, const1_rtx);
6649 emit_label (op1);
6650 return temp;
6653 /* If no set-flag instruction, must generate a conditional
6654 store into a temporary variable. Drop through
6655 and handle this like && and ||. */
6657 case TRUTH_ANDIF_EXPR:
6658 case TRUTH_ORIF_EXPR:
6659 if (! ignore
6660 && (target == 0 || ! safe_from_p (target, exp)
6661 /* Make sure we don't have a hard reg (such as function's return
6662 value) live across basic blocks, if not optimizing. */
6663 || (!optimize && GET_CODE (target) == REG
6664 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6665 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6667 if (target)
6668 emit_clr_insn (target);
6670 op1 = gen_label_rtx ();
6671 jumpifnot (exp, op1);
6673 if (target)
6674 emit_0_to_1_insn (target);
6676 emit_label (op1);
6677 return ignore ? const0_rtx : target;
6679 case TRUTH_NOT_EXPR:
6680 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6681 /* The parser is careful to generate TRUTH_NOT_EXPR
6682 only with operands that are always zero or one. */
6683 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6684 target, 1, OPTAB_LIB_WIDEN);
6685 if (temp == 0)
6686 abort ();
6687 return temp;
6689 case COMPOUND_EXPR:
6690 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6691 emit_queue ();
6692 return expand_expr (TREE_OPERAND (exp, 1),
6693 (ignore ? const0_rtx : target),
6694 VOIDmode, 0);
6696 case COND_EXPR:
6697 /* If we would have a "singleton" (see below) were it not for a
6698 conversion in each arm, bring that conversion back out. */
6699 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6700 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6701 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6702 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6704 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6705 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6707 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6708 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6709 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6710 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6711 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6712 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6713 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6714 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6715 return expand_expr (build1 (NOP_EXPR, type,
6716 build (COND_EXPR, TREE_TYPE (true),
6717 TREE_OPERAND (exp, 0),
6718 true, false)),
6719 target, tmode, modifier);
6723 /* Note that COND_EXPRs whose type is a structure or union
6724 are required to be constructed to contain assignments of
6725 a temporary variable, so that we can evaluate them here
6726 for side effect only. If type is void, we must do likewise. */
6728 /* If an arm of the branch requires a cleanup,
6729 only that cleanup is performed. */
6731 tree singleton = 0;
6732 tree binary_op = 0, unary_op = 0;
6734 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6735 convert it to our mode, if necessary. */
6736 if (integer_onep (TREE_OPERAND (exp, 1))
6737 && integer_zerop (TREE_OPERAND (exp, 2))
6738 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6740 if (ignore)
6742 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6743 ro_modifier);
6744 return const0_rtx;
6747 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6748 if (GET_MODE (op0) == mode)
6749 return op0;
6751 if (target == 0)
6752 target = gen_reg_rtx (mode);
6753 convert_move (target, op0, unsignedp);
6754 return target;
6757 /* Check for X ? A + B : A. If we have this, we can copy A to the
6758 output and conditionally add B. Similarly for unary operations.
6759 Don't do this if X has side-effects because those side effects
6760 might affect A or B and the "?" operation is a sequence point in
6761 ANSI. (operand_equal_p tests for side effects.) */
6763 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6764 && operand_equal_p (TREE_OPERAND (exp, 2),
6765 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6766 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6767 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6768 && operand_equal_p (TREE_OPERAND (exp, 1),
6769 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6770 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6771 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6772 && operand_equal_p (TREE_OPERAND (exp, 2),
6773 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6774 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6775 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6776 && operand_equal_p (TREE_OPERAND (exp, 1),
6777 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6778 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6780 /* If we are not to produce a result, we have no target. Otherwise,
6781 if a target was specified use it; it will not be used as an
6782 intermediate target unless it is safe. If no target, use a
6783 temporary. */
6785 if (ignore)
6786 temp = 0;
6787 else if (original_target
6788 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6789 || (singleton && GET_CODE (original_target) == REG
6790 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6791 && original_target == var_rtx (singleton)))
6792 && GET_MODE (original_target) == mode
6793 && ! (GET_CODE (original_target) == MEM
6794 && MEM_VOLATILE_P (original_target)))
6795 temp = original_target;
6796 else if (TREE_ADDRESSABLE (type))
6797 abort ();
6798 else
6799 temp = assign_temp (type, 0, 0, 1);
6801 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6802 do the test of X as a store-flag operation, do this as
6803 A + ((X != 0) << log C). Similarly for other simple binary
6804 operators. Only do for C == 1 if BRANCH_COST is low. */
6805 if (temp && singleton && binary_op
6806 && (TREE_CODE (binary_op) == PLUS_EXPR
6807 || TREE_CODE (binary_op) == MINUS_EXPR
6808 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6809 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6810 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6811 : integer_onep (TREE_OPERAND (binary_op, 1)))
6812 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6814 rtx result;
6815 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6816 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6817 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6818 : xor_optab);
6820 /* If we had X ? A : A + 1, do this as A + (X == 0).
6822 We have to invert the truth value here and then put it
6823 back later if do_store_flag fails. We cannot simply copy
6824 TREE_OPERAND (exp, 0) to another variable and modify that
6825 because invert_truthvalue can modify the tree pointed to
6826 by its argument. */
6827 if (singleton == TREE_OPERAND (exp, 1))
6828 TREE_OPERAND (exp, 0)
6829 = invert_truthvalue (TREE_OPERAND (exp, 0));
6831 result = do_store_flag (TREE_OPERAND (exp, 0),
6832 (safe_from_p (temp, singleton)
6833 ? temp : NULL_RTX),
6834 mode, BRANCH_COST <= 1);
6836 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6837 result = expand_shift (LSHIFT_EXPR, mode, result,
6838 build_int_2 (tree_log2
6839 (TREE_OPERAND
6840 (binary_op, 1)),
6842 (safe_from_p (temp, singleton)
6843 ? temp : NULL_RTX), 0);
6845 if (result)
6847 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6848 return expand_binop (mode, boptab, op1, result, temp,
6849 unsignedp, OPTAB_LIB_WIDEN);
6851 else if (singleton == TREE_OPERAND (exp, 1))
6852 TREE_OPERAND (exp, 0)
6853 = invert_truthvalue (TREE_OPERAND (exp, 0));
6856 do_pending_stack_adjust ();
6857 NO_DEFER_POP;
6858 op0 = gen_label_rtx ();
6860 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6862 if (temp != 0)
6864 /* If the target conflicts with the other operand of the
6865 binary op, we can't use it. Also, we can't use the target
6866 if it is a hard register, because evaluating the condition
6867 might clobber it. */
6868 if ((binary_op
6869 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6870 || (GET_CODE (temp) == REG
6871 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6872 temp = gen_reg_rtx (mode);
6873 store_expr (singleton, temp, 0);
6875 else
6876 expand_expr (singleton,
6877 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6878 if (singleton == TREE_OPERAND (exp, 1))
6879 jumpif (TREE_OPERAND (exp, 0), op0);
6880 else
6881 jumpifnot (TREE_OPERAND (exp, 0), op0);
6883 start_cleanup_deferral ();
6884 if (binary_op && temp == 0)
6885 /* Just touch the other operand. */
6886 expand_expr (TREE_OPERAND (binary_op, 1),
6887 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6888 else if (binary_op)
6889 store_expr (build (TREE_CODE (binary_op), type,
6890 make_tree (type, temp),
6891 TREE_OPERAND (binary_op, 1)),
6892 temp, 0);
6893 else
6894 store_expr (build1 (TREE_CODE (unary_op), type,
6895 make_tree (type, temp)),
6896 temp, 0);
6897 op1 = op0;
6899 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6900 comparison operator. If we have one of these cases, set the
6901 output to A, branch on A (cse will merge these two references),
6902 then set the output to FOO. */
6903 else if (temp
6904 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6905 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6906 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6907 TREE_OPERAND (exp, 1), 0)
6908 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6909 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
6910 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6912 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6913 temp = gen_reg_rtx (mode);
6914 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6915 jumpif (TREE_OPERAND (exp, 0), op0);
6917 start_cleanup_deferral ();
6918 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6919 op1 = op0;
6921 else if (temp
6922 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6923 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6924 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6925 TREE_OPERAND (exp, 2), 0)
6926 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6927 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
6928 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6930 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6931 temp = gen_reg_rtx (mode);
6932 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6933 jumpifnot (TREE_OPERAND (exp, 0), op0);
6935 start_cleanup_deferral ();
6936 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6937 op1 = op0;
6939 else
6941 op1 = gen_label_rtx ();
6942 jumpifnot (TREE_OPERAND (exp, 0), op0);
6944 start_cleanup_deferral ();
6945 if (temp != 0)
6946 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6947 else
6948 expand_expr (TREE_OPERAND (exp, 1),
6949 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6950 end_cleanup_deferral ();
6951 emit_queue ();
6952 emit_jump_insn (gen_jump (op1));
6953 emit_barrier ();
6954 emit_label (op0);
6955 start_cleanup_deferral ();
6956 if (temp != 0)
6957 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6958 else
6959 expand_expr (TREE_OPERAND (exp, 2),
6960 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6963 end_cleanup_deferral ();
6965 emit_queue ();
6966 emit_label (op1);
6967 OK_DEFER_POP;
6969 return temp;
6972 case TARGET_EXPR:
6974 /* Something needs to be initialized, but we didn't know
6975 where that thing was when building the tree. For example,
6976 it could be the return value of a function, or a parameter
6977 to a function which lays down in the stack, or a temporary
6978 variable which must be passed by reference.
6980 We guarantee that the expression will either be constructed
6981 or copied into our original target. */
6983 tree slot = TREE_OPERAND (exp, 0);
6984 tree cleanups = NULL_TREE;
6985 tree exp1;
6986 rtx temp;
6988 if (TREE_CODE (slot) != VAR_DECL)
6989 abort ();
6991 if (! ignore)
6992 target = original_target;
6994 if (target == 0)
6996 if (DECL_RTL (slot) != 0)
6998 target = DECL_RTL (slot);
6999 /* If we have already expanded the slot, so don't do
7000 it again. (mrs) */
7001 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7002 return target;
7004 else
7006 target = assign_temp (type, 2, 0, 1);
7007 /* All temp slots at this level must not conflict. */
7008 preserve_temp_slots (target);
7009 DECL_RTL (slot) = target;
7010 if (TREE_ADDRESSABLE (slot))
7012 TREE_ADDRESSABLE (slot) = 0;
7013 mark_addressable (slot);
7016 /* Since SLOT is not known to the called function
7017 to belong to its stack frame, we must build an explicit
7018 cleanup. This case occurs when we must build up a reference
7019 to pass the reference as an argument. In this case,
7020 it is very likely that such a reference need not be
7021 built here. */
7023 if (TREE_OPERAND (exp, 2) == 0)
7024 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7025 cleanups = TREE_OPERAND (exp, 2);
7028 else
7030 /* This case does occur, when expanding a parameter which
7031 needs to be constructed on the stack. The target
7032 is the actual stack address that we want to initialize.
7033 The function we call will perform the cleanup in this case. */
7035 /* If we have already assigned it space, use that space,
7036 not target that we were passed in, as our target
7037 parameter is only a hint. */
7038 if (DECL_RTL (slot) != 0)
7040 target = DECL_RTL (slot);
7041 /* If we have already expanded the slot, so don't do
7042 it again. (mrs) */
7043 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7044 return target;
7046 else
7048 DECL_RTL (slot) = target;
7049 /* If we must have an addressable slot, then make sure that
7050 the RTL that we just stored in slot is OK. */
7051 if (TREE_ADDRESSABLE (slot))
7053 TREE_ADDRESSABLE (slot) = 0;
7054 mark_addressable (slot);
7059 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7060 /* Mark it as expanded. */
7061 TREE_OPERAND (exp, 1) = NULL_TREE;
7063 store_expr (exp1, target, 0);
7065 expand_decl_cleanup (NULL_TREE, cleanups);
7067 return target;
7070 case INIT_EXPR:
7072 tree lhs = TREE_OPERAND (exp, 0);
7073 tree rhs = TREE_OPERAND (exp, 1);
7074 tree noncopied_parts = 0;
7075 tree lhs_type = TREE_TYPE (lhs);
7077 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7078 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7079 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7080 TYPE_NONCOPIED_PARTS (lhs_type));
7081 while (noncopied_parts != 0)
7083 expand_assignment (TREE_VALUE (noncopied_parts),
7084 TREE_PURPOSE (noncopied_parts), 0, 0);
7085 noncopied_parts = TREE_CHAIN (noncopied_parts);
7087 return temp;
7090 case MODIFY_EXPR:
7092 /* If lhs is complex, expand calls in rhs before computing it.
7093 That's so we don't compute a pointer and save it over a call.
7094 If lhs is simple, compute it first so we can give it as a
7095 target if the rhs is just a call. This avoids an extra temp and copy
7096 and that prevents a partial-subsumption which makes bad code.
7097 Actually we could treat component_ref's of vars like vars. */
7099 tree lhs = TREE_OPERAND (exp, 0);
7100 tree rhs = TREE_OPERAND (exp, 1);
7101 tree noncopied_parts = 0;
7102 tree lhs_type = TREE_TYPE (lhs);
7104 temp = 0;
7106 if (TREE_CODE (lhs) != VAR_DECL
7107 && TREE_CODE (lhs) != RESULT_DECL
7108 && TREE_CODE (lhs) != PARM_DECL
7109 && ! (TREE_CODE (lhs) == INDIRECT_REF
7110 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7111 preexpand_calls (exp);
7113 /* Check for |= or &= of a bitfield of size one into another bitfield
7114 of size 1. In this case, (unless we need the result of the
7115 assignment) we can do this more efficiently with a
7116 test followed by an assignment, if necessary.
7118 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7119 things change so we do, this code should be enhanced to
7120 support it. */
7121 if (ignore
7122 && TREE_CODE (lhs) == COMPONENT_REF
7123 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7124 || TREE_CODE (rhs) == BIT_AND_EXPR)
7125 && TREE_OPERAND (rhs, 0) == lhs
7126 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7127 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7128 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7130 rtx label = gen_label_rtx ();
7132 do_jump (TREE_OPERAND (rhs, 1),
7133 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7134 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7135 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7136 (TREE_CODE (rhs) == BIT_IOR_EXPR
7137 ? integer_one_node
7138 : integer_zero_node)),
7139 0, 0);
7140 do_pending_stack_adjust ();
7141 emit_label (label);
7142 return const0_rtx;
7145 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7146 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7147 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7148 TYPE_NONCOPIED_PARTS (lhs_type));
7150 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7151 while (noncopied_parts != 0)
7153 expand_assignment (TREE_PURPOSE (noncopied_parts),
7154 TREE_VALUE (noncopied_parts), 0, 0);
7155 noncopied_parts = TREE_CHAIN (noncopied_parts);
7157 return temp;
7160 case PREINCREMENT_EXPR:
7161 case PREDECREMENT_EXPR:
7162 return expand_increment (exp, 0, ignore);
7164 case POSTINCREMENT_EXPR:
7165 case POSTDECREMENT_EXPR:
7166 /* Faster to treat as pre-increment if result is not used. */
7167 return expand_increment (exp, ! ignore, ignore);
7169 case ADDR_EXPR:
7170 /* If nonzero, TEMP will be set to the address of something that might
7171 be a MEM corresponding to a stack slot. */
7172 temp = 0;
7174 /* Are we taking the address of a nested function? */
7175 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7176 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7177 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7179 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7180 op0 = force_operand (op0, target);
7182 /* If we are taking the address of something erroneous, just
7183 return a zero. */
7184 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7185 return const0_rtx;
7186 else
7188 /* We make sure to pass const0_rtx down if we came in with
7189 ignore set, to avoid doing the cleanups twice for something. */
7190 op0 = expand_expr (TREE_OPERAND (exp, 0),
7191 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7192 (modifier == EXPAND_INITIALIZER
7193 ? modifier : EXPAND_CONST_ADDRESS));
7195 /* If we are going to ignore the result, OP0 will have been set
7196 to const0_rtx, so just return it. Don't get confused and
7197 think we are taking the address of the constant. */
7198 if (ignore)
7199 return op0;
7201 op0 = protect_from_queue (op0, 0);
7203 /* We would like the object in memory. If it is a constant,
7204 we can have it be statically allocated into memory. For
7205 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7206 memory and store the value into it. */
7208 if (CONSTANT_P (op0))
7209 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7210 op0);
7211 else if (GET_CODE (op0) == MEM)
7213 mark_temp_addr_taken (op0);
7214 temp = XEXP (op0, 0);
7217 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7218 || GET_CODE (op0) == CONCAT)
7220 /* If this object is in a register, it must be not
7221 be BLKmode. */
7222 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7223 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7225 mark_temp_addr_taken (memloc);
7226 emit_move_insn (memloc, op0);
7227 op0 = memloc;
7230 if (GET_CODE (op0) != MEM)
7231 abort ();
7233 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7235 temp = XEXP (op0, 0);
7236 #ifdef POINTERS_EXTEND_UNSIGNED
7237 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7238 && mode == ptr_mode)
7239 temp = convert_memory_address (ptr_mode, temp);
7240 #endif
7241 return temp;
7244 op0 = force_operand (XEXP (op0, 0), target);
7247 if (flag_force_addr && GET_CODE (op0) != REG)
7248 op0 = force_reg (Pmode, op0);
7250 if (GET_CODE (op0) == REG
7251 && ! REG_USERVAR_P (op0))
7252 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7254 /* If we might have had a temp slot, add an equivalent address
7255 for it. */
7256 if (temp != 0)
7257 update_temp_slot_address (temp, op0);
7259 #ifdef POINTERS_EXTEND_UNSIGNED
7260 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7261 && mode == ptr_mode)
7262 op0 = convert_memory_address (ptr_mode, op0);
7263 #endif
7265 return op0;
7267 case ENTRY_VALUE_EXPR:
7268 abort ();
7270 /* COMPLEX type for Extended Pascal & Fortran */
7271 case COMPLEX_EXPR:
7273 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7274 rtx insns;
7276 /* Get the rtx code of the operands. */
7277 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7278 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7280 if (! target)
7281 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7283 start_sequence ();
7285 /* Move the real (op0) and imaginary (op1) parts to their location. */
7286 emit_move_insn (gen_realpart (mode, target), op0);
7287 emit_move_insn (gen_imagpart (mode, target), op1);
7289 insns = get_insns ();
7290 end_sequence ();
7292 /* Complex construction should appear as a single unit. */
7293 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7294 each with a separate pseudo as destination.
7295 It's not correct for flow to treat them as a unit. */
7296 if (GET_CODE (target) != CONCAT)
7297 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7298 else
7299 emit_insns (insns);
7301 return target;
7304 case REALPART_EXPR:
7305 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7306 return gen_realpart (mode, op0);
7308 case IMAGPART_EXPR:
7309 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7310 return gen_imagpart (mode, op0);
7312 case CONJ_EXPR:
7314 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7315 rtx imag_t;
7316 rtx insns;
7318 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7320 if (! target)
7321 target = gen_reg_rtx (mode);
7323 start_sequence ();
7325 /* Store the realpart and the negated imagpart to target. */
7326 emit_move_insn (gen_realpart (partmode, target),
7327 gen_realpart (partmode, op0));
7329 imag_t = gen_imagpart (partmode, target);
7330 temp = expand_unop (partmode, neg_optab,
7331 gen_imagpart (partmode, op0), imag_t, 0);
7332 if (temp != imag_t)
7333 emit_move_insn (imag_t, temp);
7335 insns = get_insns ();
7336 end_sequence ();
7338 /* Conjugate should appear as a single unit
7339 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7340 each with a separate pseudo as destination.
7341 It's not correct for flow to treat them as a unit. */
7342 if (GET_CODE (target) != CONCAT)
7343 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7344 else
7345 emit_insns (insns);
7347 return target;
7350 case TRY_CATCH_EXPR:
7352 tree handler = TREE_OPERAND (exp, 1);
7354 expand_eh_region_start ();
7356 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7358 expand_eh_region_end (handler);
7360 return op0;
7363 case POPDCC_EXPR:
7365 rtx dcc = get_dynamic_cleanup_chain ();
7366 emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
7367 return const0_rtx;
7370 case POPDHC_EXPR:
7372 rtx dhc = get_dynamic_handler_chain ();
7373 emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
7374 return const0_rtx;
7377 case ERROR_MARK:
7378 op0 = CONST0_RTX (tmode);
7379 if (op0 != 0)
7380 return op0;
7381 return const0_rtx;
7383 default:
7384 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7387 /* Here to do an ordinary binary operator, generating an instruction
7388 from the optab already placed in `this_optab'. */
7389 binop:
7390 preexpand_calls (exp);
7391 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7392 subtarget = 0;
7393 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7394 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7395 binop2:
7396 temp = expand_binop (mode, this_optab, op0, op1, target,
7397 unsignedp, OPTAB_LIB_WIDEN);
7398 if (temp == 0)
7399 abort ();
7400 return temp;
7404 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7406 void
7407 bc_expand_expr (exp)
7408 tree exp;
7410 enum tree_code code;
7411 tree type, arg0;
7412 rtx r;
7413 struct binary_operator *binoptab;
7414 struct unary_operator *unoptab;
7415 struct increment_operator *incroptab;
7416 struct bc_label *lab, *lab1;
7417 enum bytecode_opcode opcode;
7420 code = TREE_CODE (exp);
7422 switch (code)
7424 case PARM_DECL:
7426 if (DECL_RTL (exp) == 0)
7428 error_with_decl (exp, "prior parameter's size depends on `%s'");
7429 return;
7432 bc_load_parmaddr (DECL_RTL (exp));
7433 bc_load_memory (TREE_TYPE (exp), exp);
7435 return;
7437 case VAR_DECL:
7439 if (DECL_RTL (exp) == 0)
7440 abort ();
7442 #if 0
7443 if (BYTECODE_LABEL (DECL_RTL (exp)))
7444 bc_load_externaddr (DECL_RTL (exp));
7445 else
7446 bc_load_localaddr (DECL_RTL (exp));
7447 #endif
7448 if (TREE_PUBLIC (exp))
7449 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7450 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7451 else
7452 bc_load_localaddr (DECL_RTL (exp));
7454 bc_load_memory (TREE_TYPE (exp), exp);
7455 return;
7457 case INTEGER_CST:
7459 #ifdef DEBUG_PRINT_CODE
7460 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7461 #endif
7462 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7463 ? SImode
7464 : TYPE_MODE (TREE_TYPE (exp)))],
7465 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7466 return;
7468 case REAL_CST:
7470 #if 0
7471 #ifdef DEBUG_PRINT_CODE
7472 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7473 #endif
7474 /* FIX THIS: find a better way to pass real_cst's. -bson */
7475 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7476 (double) TREE_REAL_CST (exp));
7477 #else
7478 abort ();
7479 #endif
7481 return;
7483 case CALL_EXPR:
7485 /* We build a call description vector describing the type of
7486 the return value and of the arguments; this call vector,
7487 together with a pointer to a location for the return value
7488 and the base of the argument list, is passed to the low
7489 level machine dependent call subroutine, which is responsible
7490 for putting the arguments wherever real functions expect
7491 them, as well as getting the return value back. */
7493 tree calldesc = 0, arg;
7494 int nargs = 0, i;
7495 rtx retval;
7497 /* Push the evaluated args on the evaluation stack in reverse
7498 order. Also make an entry for each arg in the calldesc
7499 vector while we're at it. */
7501 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7503 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7505 ++nargs;
7506 bc_expand_expr (TREE_VALUE (arg));
7508 calldesc = tree_cons ((tree) 0,
7509 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7510 calldesc);
7511 calldesc = tree_cons ((tree) 0,
7512 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7513 calldesc);
7516 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7518 /* Allocate a location for the return value and push its
7519 address on the evaluation stack. Also make an entry
7520 at the front of the calldesc for the return value type. */
7522 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7523 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7524 bc_load_localaddr (retval);
7526 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7527 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7529 /* Prepend the argument count. */
7530 calldesc = tree_cons ((tree) 0,
7531 build_int_2 (nargs, 0),
7532 calldesc);
7534 /* Push the address of the call description vector on the stack. */
7535 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7536 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7537 build_index_type (build_int_2 (nargs * 2, 0)));
7538 r = output_constant_def (calldesc);
7539 bc_load_externaddr (r);
7541 /* Push the address of the function to be called. */
7542 bc_expand_expr (TREE_OPERAND (exp, 0));
7544 /* Call the function, popping its address and the calldesc vector
7545 address off the evaluation stack in the process. */
7546 bc_emit_instruction (call);
7548 /* Pop the arguments off the stack. */
7549 bc_adjust_stack (nargs);
7551 /* Load the return value onto the stack. */
7552 bc_load_localaddr (retval);
7553 bc_load_memory (type, TREE_OPERAND (exp, 0));
7555 return;
7557 case SAVE_EXPR:
7559 if (!SAVE_EXPR_RTL (exp))
7561 /* First time around: copy to local variable */
7562 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7563 TYPE_ALIGN (TREE_TYPE(exp)));
7564 bc_expand_expr (TREE_OPERAND (exp, 0));
7565 bc_emit_instruction (duplicate);
7567 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7568 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7570 else
7572 /* Consecutive reference: use saved copy */
7573 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7574 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7576 return;
7578 #if 0
7579 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7580 how are they handled instead? */
7581 case LET_STMT:
7583 TREE_USED (exp) = 1;
7584 bc_expand_expr (STMT_BODY (exp));
7585 return;
7586 #endif
7588 case NOP_EXPR:
7589 case CONVERT_EXPR:
7591 bc_expand_expr (TREE_OPERAND (exp, 0));
7592 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7593 return;
7595 case MODIFY_EXPR:
7597 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7598 return;
7600 case ADDR_EXPR:
7602 bc_expand_address (TREE_OPERAND (exp, 0));
7603 return;
7605 case INDIRECT_REF:
7607 bc_expand_expr (TREE_OPERAND (exp, 0));
7608 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7609 return;
7611 case ARRAY_REF:
7613 bc_expand_expr (bc_canonicalize_array_ref (exp));
7614 return;
7616 case COMPONENT_REF:
7618 bc_expand_component_address (exp);
7620 /* If we have a bitfield, generate a proper load */
7621 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7622 return;
7624 case COMPOUND_EXPR:
7626 bc_expand_expr (TREE_OPERAND (exp, 0));
7627 bc_emit_instruction (drop);
7628 bc_expand_expr (TREE_OPERAND (exp, 1));
7629 return;
7631 case COND_EXPR:
7633 bc_expand_expr (TREE_OPERAND (exp, 0));
7634 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7635 lab = bc_get_bytecode_label ();
7636 bc_emit_bytecode (xjumpifnot);
7637 bc_emit_bytecode_labelref (lab);
7639 #ifdef DEBUG_PRINT_CODE
7640 fputc ('\n', stderr);
7641 #endif
7642 bc_expand_expr (TREE_OPERAND (exp, 1));
7643 lab1 = bc_get_bytecode_label ();
7644 bc_emit_bytecode (jump);
7645 bc_emit_bytecode_labelref (lab1);
7647 #ifdef DEBUG_PRINT_CODE
7648 fputc ('\n', stderr);
7649 #endif
7651 bc_emit_bytecode_labeldef (lab);
7652 bc_expand_expr (TREE_OPERAND (exp, 2));
7653 bc_emit_bytecode_labeldef (lab1);
7654 return;
7656 case TRUTH_ANDIF_EXPR:
7658 opcode = xjumpifnot;
7659 goto andorif;
7661 case TRUTH_ORIF_EXPR:
7663 opcode = xjumpif;
7664 goto andorif;
7666 case PLUS_EXPR:
7668 binoptab = optab_plus_expr;
7669 goto binop;
7671 case MINUS_EXPR:
7673 binoptab = optab_minus_expr;
7674 goto binop;
7676 case MULT_EXPR:
7678 binoptab = optab_mult_expr;
7679 goto binop;
7681 case TRUNC_DIV_EXPR:
7682 case FLOOR_DIV_EXPR:
7683 case CEIL_DIV_EXPR:
7684 case ROUND_DIV_EXPR:
7685 case EXACT_DIV_EXPR:
7687 binoptab = optab_trunc_div_expr;
7688 goto binop;
7690 case TRUNC_MOD_EXPR:
7691 case FLOOR_MOD_EXPR:
7692 case CEIL_MOD_EXPR:
7693 case ROUND_MOD_EXPR:
7695 binoptab = optab_trunc_mod_expr;
7696 goto binop;
7698 case FIX_ROUND_EXPR:
7699 case FIX_FLOOR_EXPR:
7700 case FIX_CEIL_EXPR:
7701 abort (); /* Not used for C. */
7703 case FIX_TRUNC_EXPR:
7704 case FLOAT_EXPR:
7705 case MAX_EXPR:
7706 case MIN_EXPR:
7707 case FFS_EXPR:
7708 case LROTATE_EXPR:
7709 case RROTATE_EXPR:
7710 abort (); /* FIXME */
7712 case RDIV_EXPR:
7714 binoptab = optab_rdiv_expr;
7715 goto binop;
7717 case BIT_AND_EXPR:
7719 binoptab = optab_bit_and_expr;
7720 goto binop;
7722 case BIT_IOR_EXPR:
7724 binoptab = optab_bit_ior_expr;
7725 goto binop;
7727 case BIT_XOR_EXPR:
7729 binoptab = optab_bit_xor_expr;
7730 goto binop;
7732 case LSHIFT_EXPR:
7734 binoptab = optab_lshift_expr;
7735 goto binop;
7737 case RSHIFT_EXPR:
7739 binoptab = optab_rshift_expr;
7740 goto binop;
7742 case TRUTH_AND_EXPR:
7744 binoptab = optab_truth_and_expr;
7745 goto binop;
7747 case TRUTH_OR_EXPR:
7749 binoptab = optab_truth_or_expr;
7750 goto binop;
7752 case LT_EXPR:
7754 binoptab = optab_lt_expr;
7755 goto binop;
7757 case LE_EXPR:
7759 binoptab = optab_le_expr;
7760 goto binop;
7762 case GE_EXPR:
7764 binoptab = optab_ge_expr;
7765 goto binop;
7767 case GT_EXPR:
7769 binoptab = optab_gt_expr;
7770 goto binop;
7772 case EQ_EXPR:
7774 binoptab = optab_eq_expr;
7775 goto binop;
7777 case NE_EXPR:
7779 binoptab = optab_ne_expr;
7780 goto binop;
7782 case NEGATE_EXPR:
7784 unoptab = optab_negate_expr;
7785 goto unop;
7787 case BIT_NOT_EXPR:
7789 unoptab = optab_bit_not_expr;
7790 goto unop;
7792 case TRUTH_NOT_EXPR:
7794 unoptab = optab_truth_not_expr;
7795 goto unop;
7797 case PREDECREMENT_EXPR:
7799 incroptab = optab_predecrement_expr;
7800 goto increment;
7802 case PREINCREMENT_EXPR:
7804 incroptab = optab_preincrement_expr;
7805 goto increment;
7807 case POSTDECREMENT_EXPR:
7809 incroptab = optab_postdecrement_expr;
7810 goto increment;
7812 case POSTINCREMENT_EXPR:
7814 incroptab = optab_postincrement_expr;
7815 goto increment;
7817 case CONSTRUCTOR:
7819 bc_expand_constructor (exp);
7820 return;
7822 case ERROR_MARK:
7823 case RTL_EXPR:
7825 return;
7827 case BIND_EXPR:
7829 tree vars = TREE_OPERAND (exp, 0);
7830 int vars_need_expansion = 0;
7832 /* Need to open a binding contour here because
7833 if there are any cleanups they most be contained here. */
7834 expand_start_bindings (0);
7836 /* Mark the corresponding BLOCK for output. */
7837 if (TREE_OPERAND (exp, 2) != 0)
7838 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7840 /* If VARS have not yet been expanded, expand them now. */
7841 while (vars)
7843 if (DECL_RTL (vars) == 0)
7845 vars_need_expansion = 1;
7846 expand_decl (vars);
7848 expand_decl_init (vars);
7849 vars = TREE_CHAIN (vars);
7852 bc_expand_expr (TREE_OPERAND (exp, 1));
7854 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7856 return;
7859 default:
7860 abort ();
7863 abort ();
7865 binop:
7867 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7868 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7869 return;
7872 unop:
7874 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7875 return;
7878 andorif:
7880 bc_expand_expr (TREE_OPERAND (exp, 0));
7881 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7882 lab = bc_get_bytecode_label ();
7884 bc_emit_instruction (duplicate);
7885 bc_emit_bytecode (opcode);
7886 bc_emit_bytecode_labelref (lab);
7888 #ifdef DEBUG_PRINT_CODE
7889 fputc ('\n', stderr);
7890 #endif
7892 bc_emit_instruction (drop);
7894 bc_expand_expr (TREE_OPERAND (exp, 1));
7895 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7896 bc_emit_bytecode_labeldef (lab);
7897 return;
7900 increment:
7902 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7904 /* Push the quantum. */
7905 bc_expand_expr (TREE_OPERAND (exp, 1));
7907 /* Convert it to the lvalue's type. */
7908 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7910 /* Push the address of the lvalue */
7911 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7913 /* Perform actual increment */
7914 bc_expand_increment (incroptab, type);
7915 return;
7918 /* Return the alignment in bits of EXP, a pointer valued expression.
7919 But don't return more than MAX_ALIGN no matter what.
7920 The alignment returned is, by default, the alignment of the thing that
7921 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7923 Otherwise, look at the expression to see if we can do better, i.e., if the
7924 expression is actually pointing at an object whose alignment is tighter. */
7926 static int
7927 get_pointer_alignment (exp, max_align)
7928 tree exp;
7929 unsigned max_align;
7931 unsigned align, inner;
7933 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7934 return 0;
7936 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7937 align = MIN (align, max_align);
7939 while (1)
7941 switch (TREE_CODE (exp))
7943 case NOP_EXPR:
7944 case CONVERT_EXPR:
7945 case NON_LVALUE_EXPR:
7946 exp = TREE_OPERAND (exp, 0);
7947 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7948 return align;
7949 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7950 align = MIN (inner, max_align);
7951 break;
7953 case PLUS_EXPR:
7954 /* If sum of pointer + int, restrict our maximum alignment to that
7955 imposed by the integer. If not, we can't do any better than
7956 ALIGN. */
7957 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7958 return align;
7960 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7961 & (max_align - 1))
7962 != 0)
7963 max_align >>= 1;
7965 exp = TREE_OPERAND (exp, 0);
7966 break;
7968 case ADDR_EXPR:
7969 /* See what we are pointing at and look at its alignment. */
7970 exp = TREE_OPERAND (exp, 0);
7971 if (TREE_CODE (exp) == FUNCTION_DECL)
7972 align = FUNCTION_BOUNDARY;
7973 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7974 align = DECL_ALIGN (exp);
7975 #ifdef CONSTANT_ALIGNMENT
7976 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7977 align = CONSTANT_ALIGNMENT (exp, align);
7978 #endif
7979 return MIN (align, max_align);
7981 default:
7982 return align;
7987 /* Return the tree node and offset if a given argument corresponds to
7988 a string constant. */
7990 static tree
7991 string_constant (arg, ptr_offset)
7992 tree arg;
7993 tree *ptr_offset;
7995 STRIP_NOPS (arg);
7997 if (TREE_CODE (arg) == ADDR_EXPR
7998 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8000 *ptr_offset = integer_zero_node;
8001 return TREE_OPERAND (arg, 0);
8003 else if (TREE_CODE (arg) == PLUS_EXPR)
8005 tree arg0 = TREE_OPERAND (arg, 0);
8006 tree arg1 = TREE_OPERAND (arg, 1);
8008 STRIP_NOPS (arg0);
8009 STRIP_NOPS (arg1);
8011 if (TREE_CODE (arg0) == ADDR_EXPR
8012 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8014 *ptr_offset = arg1;
8015 return TREE_OPERAND (arg0, 0);
8017 else if (TREE_CODE (arg1) == ADDR_EXPR
8018 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8020 *ptr_offset = arg0;
8021 return TREE_OPERAND (arg1, 0);
8025 return 0;
8028 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8029 way, because it could contain a zero byte in the middle.
8030 TREE_STRING_LENGTH is the size of the character array, not the string.
8032 Unfortunately, string_constant can't access the values of const char
8033 arrays with initializers, so neither can we do so here. */
8035 static tree
8036 c_strlen (src)
8037 tree src;
8039 tree offset_node;
8040 int offset, max;
8041 char *ptr;
8043 src = string_constant (src, &offset_node);
8044 if (src == 0)
8045 return 0;
8046 max = TREE_STRING_LENGTH (src);
8047 ptr = TREE_STRING_POINTER (src);
8048 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8050 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8051 compute the offset to the following null if we don't know where to
8052 start searching for it. */
8053 int i;
8054 for (i = 0; i < max; i++)
8055 if (ptr[i] == 0)
8056 return 0;
8057 /* We don't know the starting offset, but we do know that the string
8058 has no internal zero bytes. We can assume that the offset falls
8059 within the bounds of the string; otherwise, the programmer deserves
8060 what he gets. Subtract the offset from the length of the string,
8061 and return that. */
8062 /* This would perhaps not be valid if we were dealing with named
8063 arrays in addition to literal string constants. */
8064 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8067 /* We have a known offset into the string. Start searching there for
8068 a null character. */
8069 if (offset_node == 0)
8070 offset = 0;
8071 else
8073 /* Did we get a long long offset? If so, punt. */
8074 if (TREE_INT_CST_HIGH (offset_node) != 0)
8075 return 0;
8076 offset = TREE_INT_CST_LOW (offset_node);
8078 /* If the offset is known to be out of bounds, warn, and call strlen at
8079 runtime. */
8080 if (offset < 0 || offset > max)
8082 warning ("offset outside bounds of constant string");
8083 return 0;
8085 /* Use strlen to search for the first zero byte. Since any strings
8086 constructed with build_string will have nulls appended, we win even
8087 if we get handed something like (char[4])"abcd".
8089 Since OFFSET is our starting index into the string, no further
8090 calculation is needed. */
8091 return size_int (strlen (ptr + offset));
8095 expand_builtin_return_addr (fndecl_code, count, tem)
8096 enum built_in_function fndecl_code;
8097 int count;
8098 rtx tem;
8100 int i;
8102 /* Some machines need special handling before we can access
8103 arbitrary frames. For example, on the sparc, we must first flush
8104 all register windows to the stack. */
8105 #ifdef SETUP_FRAME_ADDRESSES
8106 if (count > 0)
8107 SETUP_FRAME_ADDRESSES ();
8108 #endif
8110 /* On the sparc, the return address is not in the frame, it is in a
8111 register. There is no way to access it off of the current frame
8112 pointer, but it can be accessed off the previous frame pointer by
8113 reading the value from the register window save area. */
8114 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8115 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8116 count--;
8117 #endif
8119 /* Scan back COUNT frames to the specified frame. */
8120 for (i = 0; i < count; i++)
8122 /* Assume the dynamic chain pointer is in the word that the
8123 frame address points to, unless otherwise specified. */
8124 #ifdef DYNAMIC_CHAIN_ADDRESS
8125 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8126 #endif
8127 tem = memory_address (Pmode, tem);
8128 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8131 /* For __builtin_frame_address, return what we've got. */
8132 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8133 return tem;
8135 /* For __builtin_return_address, Get the return address from that
8136 frame. */
8137 #ifdef RETURN_ADDR_RTX
8138 tem = RETURN_ADDR_RTX (count, tem);
8139 #else
8140 tem = memory_address (Pmode,
8141 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8142 tem = gen_rtx (MEM, Pmode, tem);
8143 #endif
8144 return tem;
8147 /* __builtin_setjmp is passed a pointer to an array of five words (not
8148 all will be used on all machines). It operates similarly to the C
8149 library function of the same name, but is more efficient. Much of
8150 the code below (and for longjmp) is copied from the handling of
8151 non-local gotos.
8153 NOTE: This is intended for use by GNAT and the exception handling
8154 scheme in the compiler and will only work in the method used by
8155 them. */
8158 expand_builtin_setjmp (buf_addr, target)
8159 rtx buf_addr;
8160 rtx target;
8162 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8163 enum machine_mode sa_mode = Pmode, value_mode;
8164 rtx stack_save;
8165 int old_inhibit_defer_pop = inhibit_defer_pop;
8166 int return_pops
8167 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8168 build_function_type (void_type_node, NULL_TREE),
8170 rtx next_arg_reg;
8171 CUMULATIVE_ARGS args_so_far;
8172 rtx op0;
8173 int i;
8175 value_mode = TYPE_MODE (integer_type_node);
8177 #ifdef POINTERS_EXTEND_UNSIGNED
8178 buf_addr = convert_memory_address (Pmode, buf_addr);
8179 #endif
8181 buf_addr = force_reg (Pmode, buf_addr);
8183 if (target == 0 || GET_CODE (target) != REG
8184 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8185 target = gen_reg_rtx (value_mode);
8187 emit_queue ();
8189 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8190 current_function_calls_setjmp = 1;
8192 /* We store the frame pointer and the address of lab1 in the buffer
8193 and use the rest of it for the stack save area, which is
8194 machine-dependent. */
8195 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8196 virtual_stack_vars_rtx);
8197 emit_move_insn
8198 (validize_mem (gen_rtx (MEM, Pmode,
8199 plus_constant (buf_addr,
8200 GET_MODE_SIZE (Pmode)))),
8201 gen_rtx (LABEL_REF, Pmode, lab1));
8203 #ifdef HAVE_save_stack_nonlocal
8204 if (HAVE_save_stack_nonlocal)
8205 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8206 #endif
8208 stack_save = gen_rtx (MEM, sa_mode,
8209 plus_constant (buf_addr,
8210 2 * GET_MODE_SIZE (Pmode)));
8211 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8213 #ifdef HAVE_setjmp
8214 if (HAVE_setjmp)
8215 emit_insn (gen_setjmp ());
8216 #endif
8218 /* Set TARGET to zero and branch around the other case. */
8219 emit_move_insn (target, const0_rtx);
8220 emit_jump_insn (gen_jump (lab2));
8221 emit_barrier ();
8222 emit_label (lab1);
8224 /* Note that setjmp clobbers FP when we get here, so we have to make
8225 sure it's marked as used by this function. */
8226 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8228 /* Mark the static chain as clobbered here so life information
8229 doesn't get messed up for it. */
8230 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8232 /* Now put in the code to restore the frame pointer, and argument
8233 pointer, if needed. The code below is from expand_end_bindings
8234 in stmt.c; see detailed documentation there. */
8235 #ifdef HAVE_nonlocal_goto
8236 if (! HAVE_nonlocal_goto)
8237 #endif
8238 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8240 /* Do we need to do something like:
8242 current_function_has_nonlocal_label = 1;
8244 here? It seems like we might have to, or some subset of that
8245 functionality, but I am unsure. (mrs) */
8247 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8248 if (fixed_regs[ARG_POINTER_REGNUM])
8250 #ifdef ELIMINABLE_REGS
8251 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8253 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8254 if (elim_regs[i].from == ARG_POINTER_REGNUM
8255 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8256 break;
8258 if (i == sizeof elim_regs / sizeof elim_regs [0])
8259 #endif
8261 /* Now restore our arg pointer from the address at which it
8262 was saved in our stack frame.
8263 If there hasn't be space allocated for it yet, make
8264 some now. */
8265 if (arg_pointer_save_area == 0)
8266 arg_pointer_save_area
8267 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8268 emit_move_insn (virtual_incoming_args_rtx,
8269 copy_to_reg (arg_pointer_save_area));
8272 #endif
8274 #ifdef HAVE_nonlocal_goto_receiver
8275 if (HAVE_nonlocal_goto_receiver)
8276 emit_insn (gen_nonlocal_goto_receiver ());
8277 #endif
8278 /* The static chain pointer contains the address of dummy function.
8279 We need to call it here to handle some PIC cases of restoring a
8280 global pointer. Then return 1. */
8281 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8283 /* We can't actually call emit_library_call here, so do everything
8284 it does, which isn't much for a libfunc with no args. */
8285 op0 = memory_address (FUNCTION_MODE, op0);
8287 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8288 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8289 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8291 #ifndef ACCUMULATE_OUTGOING_ARGS
8292 #ifdef HAVE_call_pop
8293 if (HAVE_call_pop)
8294 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8295 const0_rtx, next_arg_reg,
8296 GEN_INT (return_pops)));
8297 else
8298 #endif
8299 #endif
8301 #ifdef HAVE_call
8302 if (HAVE_call)
8303 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8304 const0_rtx, next_arg_reg, const0_rtx));
8305 else
8306 #endif
8307 abort ();
8309 emit_move_insn (target, const1_rtx);
8310 emit_label (lab2);
8311 return target;
8315 /* Expand an expression EXP that calls a built-in function,
8316 with result going to TARGET if that's convenient
8317 (and in mode MODE if that's convenient).
8318 SUBTARGET may be used as the target for computing one of EXP's operands.
8319 IGNORE is nonzero if the value is to be ignored. */
8321 #define CALLED_AS_BUILT_IN(NODE) \
8322 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8324 static rtx
8325 expand_builtin (exp, target, subtarget, mode, ignore)
8326 tree exp;
8327 rtx target;
8328 rtx subtarget;
8329 enum machine_mode mode;
8330 int ignore;
8332 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8333 tree arglist = TREE_OPERAND (exp, 1);
8334 rtx op0;
8335 rtx lab1, insns;
8336 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8337 optab builtin_optab;
8339 switch (DECL_FUNCTION_CODE (fndecl))
8341 case BUILT_IN_ABS:
8342 case BUILT_IN_LABS:
8343 case BUILT_IN_FABS:
8344 /* build_function_call changes these into ABS_EXPR. */
8345 abort ();
8347 case BUILT_IN_SIN:
8348 case BUILT_IN_COS:
8349 /* Treat these like sqrt, but only if the user asks for them. */
8350 if (! flag_fast_math)
8351 break;
8352 case BUILT_IN_FSQRT:
8353 /* If not optimizing, call the library function. */
8354 if (! optimize)
8355 break;
8357 if (arglist == 0
8358 /* Arg could be wrong type if user redeclared this fcn wrong. */
8359 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8360 break;
8362 /* Stabilize and compute the argument. */
8363 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8364 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8366 exp = copy_node (exp);
8367 arglist = copy_node (arglist);
8368 TREE_OPERAND (exp, 1) = arglist;
8369 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8371 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8373 /* Make a suitable register to place result in. */
8374 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8376 emit_queue ();
8377 start_sequence ();
8379 switch (DECL_FUNCTION_CODE (fndecl))
8381 case BUILT_IN_SIN:
8382 builtin_optab = sin_optab; break;
8383 case BUILT_IN_COS:
8384 builtin_optab = cos_optab; break;
8385 case BUILT_IN_FSQRT:
8386 builtin_optab = sqrt_optab; break;
8387 default:
8388 abort ();
8391 /* Compute into TARGET.
8392 Set TARGET to wherever the result comes back. */
8393 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8394 builtin_optab, op0, target, 0);
8396 /* If we were unable to expand via the builtin, stop the
8397 sequence (without outputting the insns) and break, causing
8398 a call the the library function. */
8399 if (target == 0)
8401 end_sequence ();
8402 break;
8405 /* Check the results by default. But if flag_fast_math is turned on,
8406 then assume sqrt will always be called with valid arguments. */
8408 if (! flag_fast_math)
8410 /* Don't define the builtin FP instructions
8411 if your machine is not IEEE. */
8412 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8413 abort ();
8415 lab1 = gen_label_rtx ();
8417 /* Test the result; if it is NaN, set errno=EDOM because
8418 the argument was not in the domain. */
8419 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8420 emit_jump_insn (gen_beq (lab1));
8422 #ifdef TARGET_EDOM
8424 #ifdef GEN_ERRNO_RTX
8425 rtx errno_rtx = GEN_ERRNO_RTX;
8426 #else
8427 rtx errno_rtx
8428 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8429 #endif
8431 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8433 #else
8434 /* We can't set errno=EDOM directly; let the library call do it.
8435 Pop the arguments right away in case the call gets deleted. */
8436 NO_DEFER_POP;
8437 expand_call (exp, target, 0);
8438 OK_DEFER_POP;
8439 #endif
8441 emit_label (lab1);
8444 /* Output the entire sequence. */
8445 insns = get_insns ();
8446 end_sequence ();
8447 emit_insns (insns);
8449 return target;
8451 /* __builtin_apply_args returns block of memory allocated on
8452 the stack into which is stored the arg pointer, structure
8453 value address, static chain, and all the registers that might
8454 possibly be used in performing a function call. The code is
8455 moved to the start of the function so the incoming values are
8456 saved. */
8457 case BUILT_IN_APPLY_ARGS:
8458 /* Don't do __builtin_apply_args more than once in a function.
8459 Save the result of the first call and reuse it. */
8460 if (apply_args_value != 0)
8461 return apply_args_value;
8463 /* When this function is called, it means that registers must be
8464 saved on entry to this function. So we migrate the
8465 call to the first insn of this function. */
8466 rtx temp;
8467 rtx seq;
8469 start_sequence ();
8470 temp = expand_builtin_apply_args ();
8471 seq = get_insns ();
8472 end_sequence ();
8474 apply_args_value = temp;
8476 /* Put the sequence after the NOTE that starts the function.
8477 If this is inside a SEQUENCE, make the outer-level insn
8478 chain current, so the code is placed at the start of the
8479 function. */
8480 push_topmost_sequence ();
8481 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8482 pop_topmost_sequence ();
8483 return temp;
8486 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8487 FUNCTION with a copy of the parameters described by
8488 ARGUMENTS, and ARGSIZE. It returns a block of memory
8489 allocated on the stack into which is stored all the registers
8490 that might possibly be used for returning the result of a
8491 function. ARGUMENTS is the value returned by
8492 __builtin_apply_args. ARGSIZE is the number of bytes of
8493 arguments that must be copied. ??? How should this value be
8494 computed? We'll also need a safe worst case value for varargs
8495 functions. */
8496 case BUILT_IN_APPLY:
8497 if (arglist == 0
8498 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8499 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8500 || TREE_CHAIN (arglist) == 0
8501 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8502 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8503 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8504 return const0_rtx;
8505 else
8507 int i;
8508 tree t;
8509 rtx ops[3];
8511 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8512 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8514 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8517 /* __builtin_return (RESULT) causes the function to return the
8518 value described by RESULT. RESULT is address of the block of
8519 memory returned by __builtin_apply. */
8520 case BUILT_IN_RETURN:
8521 if (arglist
8522 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8523 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8524 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8525 NULL_RTX, VOIDmode, 0));
8526 return const0_rtx;
8528 case BUILT_IN_SAVEREGS:
8529 /* Don't do __builtin_saveregs more than once in a function.
8530 Save the result of the first call and reuse it. */
8531 if (saveregs_value != 0)
8532 return saveregs_value;
8534 /* When this function is called, it means that registers must be
8535 saved on entry to this function. So we migrate the
8536 call to the first insn of this function. */
8537 rtx temp;
8538 rtx seq;
8540 /* Now really call the function. `expand_call' does not call
8541 expand_builtin, so there is no danger of infinite recursion here. */
8542 start_sequence ();
8544 #ifdef EXPAND_BUILTIN_SAVEREGS
8545 /* Do whatever the machine needs done in this case. */
8546 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8547 #else
8548 /* The register where the function returns its value
8549 is likely to have something else in it, such as an argument.
8550 So preserve that register around the call. */
8552 if (value_mode != VOIDmode)
8554 rtx valreg = hard_libcall_value (value_mode);
8555 rtx saved_valreg = gen_reg_rtx (value_mode);
8557 emit_move_insn (saved_valreg, valreg);
8558 temp = expand_call (exp, target, ignore);
8559 emit_move_insn (valreg, saved_valreg);
8561 else
8562 /* Generate the call, putting the value in a pseudo. */
8563 temp = expand_call (exp, target, ignore);
8564 #endif
8566 seq = get_insns ();
8567 end_sequence ();
8569 saveregs_value = temp;
8571 /* Put the sequence after the NOTE that starts the function.
8572 If this is inside a SEQUENCE, make the outer-level insn
8573 chain current, so the code is placed at the start of the
8574 function. */
8575 push_topmost_sequence ();
8576 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8577 pop_topmost_sequence ();
8578 return temp;
8581 /* __builtin_args_info (N) returns word N of the arg space info
8582 for the current function. The number and meanings of words
8583 is controlled by the definition of CUMULATIVE_ARGS. */
8584 case BUILT_IN_ARGS_INFO:
8586 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8587 int i;
8588 int *word_ptr = (int *) &current_function_args_info;
8589 tree type, elts, result;
8591 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8592 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8593 __FILE__, __LINE__);
8595 if (arglist != 0)
8597 tree arg = TREE_VALUE (arglist);
8598 if (TREE_CODE (arg) != INTEGER_CST)
8599 error ("argument of `__builtin_args_info' must be constant");
8600 else
8602 int wordnum = TREE_INT_CST_LOW (arg);
8604 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8605 error ("argument of `__builtin_args_info' out of range");
8606 else
8607 return GEN_INT (word_ptr[wordnum]);
8610 else
8611 error ("missing argument in `__builtin_args_info'");
8613 return const0_rtx;
8615 #if 0
8616 for (i = 0; i < nwords; i++)
8617 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8619 type = build_array_type (integer_type_node,
8620 build_index_type (build_int_2 (nwords, 0)));
8621 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8622 TREE_CONSTANT (result) = 1;
8623 TREE_STATIC (result) = 1;
8624 result = build (INDIRECT_REF, build_pointer_type (type), result);
8625 TREE_CONSTANT (result) = 1;
8626 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8627 #endif
8630 /* Return the address of the first anonymous stack arg. */
8631 case BUILT_IN_NEXT_ARG:
8633 tree fntype = TREE_TYPE (current_function_decl);
8635 if ((TYPE_ARG_TYPES (fntype) == 0
8636 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8637 == void_type_node))
8638 && ! current_function_varargs)
8640 error ("`va_start' used in function with fixed args");
8641 return const0_rtx;
8644 if (arglist)
8646 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8647 tree arg = TREE_VALUE (arglist);
8649 /* Strip off all nops for the sake of the comparison. This
8650 is not quite the same as STRIP_NOPS. It does more.
8651 We must also strip off INDIRECT_EXPR for C++ reference
8652 parameters. */
8653 while (TREE_CODE (arg) == NOP_EXPR
8654 || TREE_CODE (arg) == CONVERT_EXPR
8655 || TREE_CODE (arg) == NON_LVALUE_EXPR
8656 || TREE_CODE (arg) == INDIRECT_REF)
8657 arg = TREE_OPERAND (arg, 0);
8658 if (arg != last_parm)
8659 warning ("second parameter of `va_start' not last named argument");
8661 else if (! current_function_varargs)
8662 /* Evidently an out of date version of <stdarg.h>; can't validate
8663 va_start's second argument, but can still work as intended. */
8664 warning ("`__builtin_next_arg' called without an argument");
8667 return expand_binop (Pmode, add_optab,
8668 current_function_internal_arg_pointer,
8669 current_function_arg_offset_rtx,
8670 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8672 case BUILT_IN_CLASSIFY_TYPE:
8673 if (arglist != 0)
8675 tree type = TREE_TYPE (TREE_VALUE (arglist));
8676 enum tree_code code = TREE_CODE (type);
8677 if (code == VOID_TYPE)
8678 return GEN_INT (void_type_class);
8679 if (code == INTEGER_TYPE)
8680 return GEN_INT (integer_type_class);
8681 if (code == CHAR_TYPE)
8682 return GEN_INT (char_type_class);
8683 if (code == ENUMERAL_TYPE)
8684 return GEN_INT (enumeral_type_class);
8685 if (code == BOOLEAN_TYPE)
8686 return GEN_INT (boolean_type_class);
8687 if (code == POINTER_TYPE)
8688 return GEN_INT (pointer_type_class);
8689 if (code == REFERENCE_TYPE)
8690 return GEN_INT (reference_type_class);
8691 if (code == OFFSET_TYPE)
8692 return GEN_INT (offset_type_class);
8693 if (code == REAL_TYPE)
8694 return GEN_INT (real_type_class);
8695 if (code == COMPLEX_TYPE)
8696 return GEN_INT (complex_type_class);
8697 if (code == FUNCTION_TYPE)
8698 return GEN_INT (function_type_class);
8699 if (code == METHOD_TYPE)
8700 return GEN_INT (method_type_class);
8701 if (code == RECORD_TYPE)
8702 return GEN_INT (record_type_class);
8703 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8704 return GEN_INT (union_type_class);
8705 if (code == ARRAY_TYPE)
8707 if (TYPE_STRING_FLAG (type))
8708 return GEN_INT (string_type_class);
8709 else
8710 return GEN_INT (array_type_class);
8712 if (code == SET_TYPE)
8713 return GEN_INT (set_type_class);
8714 if (code == FILE_TYPE)
8715 return GEN_INT (file_type_class);
8716 if (code == LANG_TYPE)
8717 return GEN_INT (lang_type_class);
8719 return GEN_INT (no_type_class);
8721 case BUILT_IN_CONSTANT_P:
8722 if (arglist == 0)
8723 return const0_rtx;
8724 else
8726 tree arg = TREE_VALUE (arglist);
8728 STRIP_NOPS (arg);
8729 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8730 || (TREE_CODE (arg) == ADDR_EXPR
8731 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8732 ? const1_rtx : const0_rtx);
8735 case BUILT_IN_FRAME_ADDRESS:
8736 /* The argument must be a nonnegative integer constant.
8737 It counts the number of frames to scan up the stack.
8738 The value is the address of that frame. */
8739 case BUILT_IN_RETURN_ADDRESS:
8740 /* The argument must be a nonnegative integer constant.
8741 It counts the number of frames to scan up the stack.
8742 The value is the return address saved in that frame. */
8743 if (arglist == 0)
8744 /* Warning about missing arg was already issued. */
8745 return const0_rtx;
8746 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8747 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8749 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8750 error ("invalid arg to `__builtin_frame_address'");
8751 else
8752 error ("invalid arg to `__builtin_return_address'");
8753 return const0_rtx;
8755 else
8757 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8758 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8759 hard_frame_pointer_rtx);
8761 /* Some ports cannot access arbitrary stack frames. */
8762 if (tem == NULL)
8764 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8765 warning ("unsupported arg to `__builtin_frame_address'");
8766 else
8767 warning ("unsupported arg to `__builtin_return_address'");
8768 return const0_rtx;
8771 /* For __builtin_frame_address, return what we've got. */
8772 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8773 return tem;
8775 if (GET_CODE (tem) != REG)
8776 tem = copy_to_reg (tem);
8777 return tem;
8780 /* Returns the address of the area where the structure is returned.
8781 0 otherwise. */
8782 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8783 if (arglist != 0
8784 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8785 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8786 return const0_rtx;
8787 else
8788 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8790 case BUILT_IN_ALLOCA:
8791 if (arglist == 0
8792 /* Arg could be non-integer if user redeclared this fcn wrong. */
8793 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8794 break;
8796 /* Compute the argument. */
8797 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8799 /* Allocate the desired space. */
8800 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8802 case BUILT_IN_FFS:
8803 /* If not optimizing, call the library function. */
8804 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8805 break;
8807 if (arglist == 0
8808 /* Arg could be non-integer if user redeclared this fcn wrong. */
8809 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8810 break;
8812 /* Compute the argument. */
8813 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8814 /* Compute ffs, into TARGET if possible.
8815 Set TARGET to wherever the result comes back. */
8816 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8817 ffs_optab, op0, target, 1);
8818 if (target == 0)
8819 abort ();
8820 return target;
8822 case BUILT_IN_STRLEN:
8823 /* If not optimizing, call the library function. */
8824 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8825 break;
8827 if (arglist == 0
8828 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8829 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8830 break;
8831 else
8833 tree src = TREE_VALUE (arglist);
8834 tree len = c_strlen (src);
8836 int align
8837 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8839 rtx result, src_rtx, char_rtx;
8840 enum machine_mode insn_mode = value_mode, char_mode;
8841 enum insn_code icode;
8843 /* If the length is known, just return it. */
8844 if (len != 0)
8845 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8847 /* If SRC is not a pointer type, don't do this operation inline. */
8848 if (align == 0)
8849 break;
8851 /* Call a function if we can't compute strlen in the right mode. */
8853 while (insn_mode != VOIDmode)
8855 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8856 if (icode != CODE_FOR_nothing)
8857 break;
8859 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8861 if (insn_mode == VOIDmode)
8862 break;
8864 /* Make a place to write the result of the instruction. */
8865 result = target;
8866 if (! (result != 0
8867 && GET_CODE (result) == REG
8868 && GET_MODE (result) == insn_mode
8869 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8870 result = gen_reg_rtx (insn_mode);
8872 /* Make sure the operands are acceptable to the predicates. */
8874 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8875 result = gen_reg_rtx (insn_mode);
8876 src_rtx = memory_address (BLKmode,
8877 expand_expr (src, NULL_RTX, ptr_mode,
8878 EXPAND_NORMAL));
8880 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8881 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8883 /* Check the string is readable and has an end. */
8884 if (flag_check_memory_usage)
8885 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8886 src_rtx, ptr_mode,
8887 GEN_INT (MEMORY_USE_RO),
8888 TYPE_MODE (integer_type_node));
8890 char_rtx = const0_rtx;
8891 char_mode = insn_operand_mode[(int)icode][2];
8892 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8893 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8895 emit_insn (GEN_FCN (icode) (result,
8896 gen_rtx (MEM, BLKmode, src_rtx),
8897 char_rtx, GEN_INT (align)));
8899 /* Return the value in the proper mode for this function. */
8900 if (GET_MODE (result) == value_mode)
8901 return result;
8902 else if (target != 0)
8904 convert_move (target, result, 0);
8905 return target;
8907 else
8908 return convert_to_mode (value_mode, result, 0);
8911 case BUILT_IN_STRCPY:
8912 /* If not optimizing, call the library function. */
8913 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8914 break;
8916 if (arglist == 0
8917 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8918 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8919 || TREE_CHAIN (arglist) == 0
8920 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8921 break;
8922 else
8924 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8926 if (len == 0)
8927 break;
8929 len = size_binop (PLUS_EXPR, len, integer_one_node);
8931 chainon (arglist, build_tree_list (NULL_TREE, len));
8934 /* Drops in. */
8935 case BUILT_IN_MEMCPY:
8936 /* If not optimizing, call the library function. */
8937 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8938 break;
8940 if (arglist == 0
8941 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8942 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8943 || TREE_CHAIN (arglist) == 0
8944 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8945 != POINTER_TYPE)
8946 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8947 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8948 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8949 != INTEGER_TYPE))
8950 break;
8951 else
8953 tree dest = TREE_VALUE (arglist);
8954 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8955 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8956 tree type;
8958 int src_align
8959 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8960 int dest_align
8961 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8962 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
8964 /* If either SRC or DEST is not a pointer type, don't do
8965 this operation in-line. */
8966 if (src_align == 0 || dest_align == 0)
8968 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8969 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8970 break;
8973 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8974 dest_mem = gen_rtx (MEM, BLKmode,
8975 memory_address (BLKmode, dest_rtx));
8976 /* There could be a void* cast on top of the object. */
8977 while (TREE_CODE (dest) == NOP_EXPR)
8978 dest = TREE_OPERAND (dest, 0);
8979 type = TREE_TYPE (TREE_TYPE (dest));
8980 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8981 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8982 src_mem = gen_rtx (MEM, BLKmode,
8983 memory_address (BLKmode, src_rtx));
8984 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8986 /* Just copy the rights of SRC to the rights of DEST. */
8987 if (flag_check_memory_usage)
8988 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8989 src_rtx, ptr_mode,
8990 dest_rtx, ptr_mode,
8991 len_rtx, TYPE_MODE (sizetype));
8993 /* There could be a void* cast on top of the object. */
8994 while (TREE_CODE (src) == NOP_EXPR)
8995 src = TREE_OPERAND (src, 0);
8996 type = TREE_TYPE (TREE_TYPE (src));
8997 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8999 /* Copy word part most expediently. */
9000 dest_addr
9001 = emit_block_move (dest_mem, src_mem, len_rtx,
9002 MIN (src_align, dest_align));
9004 if (dest_addr == 0)
9005 dest_addr = force_operand (dest_rtx, NULL_RTX);
9007 return dest_addr;
9010 case BUILT_IN_MEMSET:
9011 /* If not optimizing, call the library function. */
9012 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9013 break;
9015 if (arglist == 0
9016 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9017 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9018 || TREE_CHAIN (arglist) == 0
9019 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9020 != INTEGER_TYPE)
9021 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9022 || (INTEGER_CST
9023 != (TREE_CODE (TREE_TYPE
9024 (TREE_VALUE
9025 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9026 break;
9027 else
9029 tree dest = TREE_VALUE (arglist);
9030 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9031 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9032 tree type;
9034 int dest_align
9035 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9036 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
9038 /* If DEST is not a pointer type, don't do this
9039 operation in-line. */
9040 if (dest_align == 0)
9041 break;
9043 /* If VAL is not 0, don't do this operation in-line. */
9044 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9045 break;
9047 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
9048 dest_mem = gen_rtx (MEM, BLKmode,
9049 memory_address (BLKmode, dest_rtx));
9050 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9052 /* Just check DST is writable and mark it as readable. */
9053 if (flag_check_memory_usage)
9054 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9055 dest_rtx, ptr_mode,
9056 len_rtx, TYPE_MODE (sizetype),
9057 GEN_INT (MEMORY_USE_WO),
9058 TYPE_MODE (integer_type_node));
9061 /* There could be a void* cast on top of the object. */
9062 while (TREE_CODE (dest) == NOP_EXPR)
9063 dest = TREE_OPERAND (dest, 0);
9064 type = TREE_TYPE (TREE_TYPE (dest));
9065 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
9067 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9069 if (dest_addr == 0)
9070 dest_addr = force_operand (dest_rtx, NULL_RTX);
9072 return dest_addr;
9075 /* These comparison functions need an instruction that returns an actual
9076 index. An ordinary compare that just sets the condition codes
9077 is not enough. */
9078 #ifdef HAVE_cmpstrsi
9079 case BUILT_IN_STRCMP:
9080 /* If not optimizing, call the library function. */
9081 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9082 break;
9084 /* If we need to check memory accesses, call the library function. */
9085 if (flag_check_memory_usage)
9086 break;
9088 if (arglist == 0
9089 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9090 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9091 || TREE_CHAIN (arglist) == 0
9092 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9093 break;
9094 else if (!HAVE_cmpstrsi)
9095 break;
9097 tree arg1 = TREE_VALUE (arglist);
9098 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9099 tree offset;
9100 tree len, len2;
9102 len = c_strlen (arg1);
9103 if (len)
9104 len = size_binop (PLUS_EXPR, integer_one_node, len);
9105 len2 = c_strlen (arg2);
9106 if (len2)
9107 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9109 /* If we don't have a constant length for the first, use the length
9110 of the second, if we know it. We don't require a constant for
9111 this case; some cost analysis could be done if both are available
9112 but neither is constant. For now, assume they're equally cheap.
9114 If both strings have constant lengths, use the smaller. This
9115 could arise if optimization results in strcpy being called with
9116 two fixed strings, or if the code was machine-generated. We should
9117 add some code to the `memcmp' handler below to deal with such
9118 situations, someday. */
9119 if (!len || TREE_CODE (len) != INTEGER_CST)
9121 if (len2)
9122 len = len2;
9123 else if (len == 0)
9124 break;
9126 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9128 if (tree_int_cst_lt (len2, len))
9129 len = len2;
9132 chainon (arglist, build_tree_list (NULL_TREE, len));
9135 /* Drops in. */
9136 case BUILT_IN_MEMCMP:
9137 /* If not optimizing, call the library function. */
9138 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9139 break;
9141 /* If we need to check memory accesses, call the library function. */
9142 if (flag_check_memory_usage)
9143 break;
9145 if (arglist == 0
9146 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9147 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9148 || TREE_CHAIN (arglist) == 0
9149 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9150 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9151 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9152 break;
9153 else if (!HAVE_cmpstrsi)
9154 break;
9156 tree arg1 = TREE_VALUE (arglist);
9157 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9158 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9159 rtx result;
9161 int arg1_align
9162 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9163 int arg2_align
9164 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9165 enum machine_mode insn_mode
9166 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9168 /* If we don't have POINTER_TYPE, call the function. */
9169 if (arg1_align == 0 || arg2_align == 0)
9171 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9172 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9173 break;
9176 /* Make a place to write the result of the instruction. */
9177 result = target;
9178 if (! (result != 0
9179 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9180 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9181 result = gen_reg_rtx (insn_mode);
9183 emit_insn (gen_cmpstrsi (result,
9184 gen_rtx (MEM, BLKmode,
9185 expand_expr (arg1, NULL_RTX,
9186 ptr_mode,
9187 EXPAND_NORMAL)),
9188 gen_rtx (MEM, BLKmode,
9189 expand_expr (arg2, NULL_RTX,
9190 ptr_mode,
9191 EXPAND_NORMAL)),
9192 expand_expr (len, NULL_RTX, VOIDmode, 0),
9193 GEN_INT (MIN (arg1_align, arg2_align))));
9195 /* Return the value in the proper mode for this function. */
9196 mode = TYPE_MODE (TREE_TYPE (exp));
9197 if (GET_MODE (result) == mode)
9198 return result;
9199 else if (target != 0)
9201 convert_move (target, result, 0);
9202 return target;
9204 else
9205 return convert_to_mode (mode, result, 0);
9207 #else
9208 case BUILT_IN_STRCMP:
9209 case BUILT_IN_MEMCMP:
9210 break;
9211 #endif
9213 case BUILT_IN_SETJMP:
9214 if (arglist == 0
9215 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9216 break;
9219 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9220 VOIDmode, 0);
9221 return expand_builtin_setjmp (buf_addr, target);
9224 /* __builtin_longjmp is passed a pointer to an array of five words
9225 and a value, which is a dummy. It's similar to the C library longjmp
9226 function but works with __builtin_setjmp above. */
9227 case BUILT_IN_LONGJMP:
9228 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9229 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9230 break;
9233 tree dummy_id = get_identifier ("__dummy");
9234 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9235 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9236 #ifdef POINTERS_EXTEND_UNSIGNED
9237 rtx buf_addr
9238 = force_reg (Pmode,
9239 convert_memory_address
9240 (Pmode,
9241 expand_expr (TREE_VALUE (arglist),
9242 NULL_RTX, VOIDmode, 0)));
9243 #else
9244 rtx buf_addr
9245 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9246 NULL_RTX,
9247 VOIDmode, 0));
9248 #endif
9249 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9250 rtx lab = gen_rtx (MEM, Pmode,
9251 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9252 enum machine_mode sa_mode
9253 #ifdef HAVE_save_stack_nonlocal
9254 = (HAVE_save_stack_nonlocal
9255 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9256 : Pmode);
9257 #else
9258 = Pmode;
9259 #endif
9260 rtx stack = gen_rtx (MEM, sa_mode,
9261 plus_constant (buf_addr,
9262 2 * GET_MODE_SIZE (Pmode)));
9264 DECL_EXTERNAL (dummy_decl) = 1;
9265 TREE_PUBLIC (dummy_decl) = 1;
9266 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9268 /* Expand the second expression just for side-effects. */
9269 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9270 const0_rtx, VOIDmode, 0);
9272 assemble_external (dummy_decl);
9274 /* Pick up FP, label, and SP from the block and jump. This code is
9275 from expand_goto in stmt.c; see there for detailed comments. */
9276 #if HAVE_nonlocal_goto
9277 if (HAVE_nonlocal_goto)
9278 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9279 XEXP (DECL_RTL (dummy_decl), 0)));
9280 else
9281 #endif
9283 lab = copy_to_reg (lab);
9284 emit_move_insn (hard_frame_pointer_rtx, fp);
9285 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9287 /* Put in the static chain register the address of the dummy
9288 function. */
9289 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9290 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9291 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9292 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9293 emit_indirect_jump (lab);
9296 return const0_rtx;
9299 /* Various hooks for the DWARF 2 __throw routine. */
9300 case BUILT_IN_UNWIND_INIT:
9301 expand_builtin_unwind_init ();
9302 return const0_rtx;
9303 case BUILT_IN_FP:
9304 return frame_pointer_rtx;
9305 case BUILT_IN_SP:
9306 return stack_pointer_rtx;
9307 #ifdef DWARF2_UNWIND_INFO
9308 case BUILT_IN_DWARF_FP_REGNUM:
9309 return expand_builtin_dwarf_fp_regnum ();
9310 case BUILT_IN_DWARF_REG_SIZE:
9311 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9312 #endif
9313 case BUILT_IN_FROB_RETURN_ADDR:
9314 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9315 case BUILT_IN_EXTRACT_RETURN_ADDR:
9316 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9317 case BUILT_IN_SET_RETURN_ADDR_REG:
9318 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
9319 return const0_rtx;
9320 case BUILT_IN_EH_STUB:
9321 return expand_builtin_eh_stub ();
9322 case BUILT_IN_SET_EH_REGS:
9323 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
9324 TREE_VALUE (TREE_CHAIN (arglist)));
9325 return const0_rtx;
9327 default: /* just do library call, if unknown builtin */
9328 error ("built-in function `%s' not currently supported",
9329 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9332 /* The switch statement above can drop through to cause the function
9333 to be called normally. */
9335 return expand_call (exp, target, ignore);
9338 /* Built-in functions to perform an untyped call and return. */
9340 /* For each register that may be used for calling a function, this
9341 gives a mode used to copy the register's value. VOIDmode indicates
9342 the register is not used for calling a function. If the machine
9343 has register windows, this gives only the outbound registers.
9344 INCOMING_REGNO gives the corresponding inbound register. */
9345 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9347 /* For each register that may be used for returning values, this gives
9348 a mode used to copy the register's value. VOIDmode indicates the
9349 register is not used for returning values. If the machine has
9350 register windows, this gives only the outbound registers.
9351 INCOMING_REGNO gives the corresponding inbound register. */
9352 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9354 /* For each register that may be used for calling a function, this
9355 gives the offset of that register into the block returned by
9356 __builtin_apply_args. 0 indicates that the register is not
9357 used for calling a function. */
9358 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9360 /* Return the offset of register REGNO into the block returned by
9361 __builtin_apply_args. This is not declared static, since it is
9362 needed in objc-act.c. */
9364 int
9365 apply_args_register_offset (regno)
9366 int regno;
9368 apply_args_size ();
9370 /* Arguments are always put in outgoing registers (in the argument
9371 block) if such make sense. */
9372 #ifdef OUTGOING_REGNO
9373 regno = OUTGOING_REGNO(regno);
9374 #endif
9375 return apply_args_reg_offset[regno];
9378 /* Return the size required for the block returned by __builtin_apply_args,
9379 and initialize apply_args_mode. */
9381 static int
9382 apply_args_size ()
9384 static int size = -1;
9385 int align, regno;
9386 enum machine_mode mode;
9388 /* The values computed by this function never change. */
9389 if (size < 0)
9391 /* The first value is the incoming arg-pointer. */
9392 size = GET_MODE_SIZE (Pmode);
9394 /* The second value is the structure value address unless this is
9395 passed as an "invisible" first argument. */
9396 if (struct_value_rtx)
9397 size += GET_MODE_SIZE (Pmode);
9399 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9400 if (FUNCTION_ARG_REGNO_P (regno))
9402 /* Search for the proper mode for copying this register's
9403 value. I'm not sure this is right, but it works so far. */
9404 enum machine_mode best_mode = VOIDmode;
9406 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9407 mode != VOIDmode;
9408 mode = GET_MODE_WIDER_MODE (mode))
9409 if (HARD_REGNO_MODE_OK (regno, mode)
9410 && HARD_REGNO_NREGS (regno, mode) == 1)
9411 best_mode = mode;
9413 if (best_mode == VOIDmode)
9414 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9415 mode != VOIDmode;
9416 mode = GET_MODE_WIDER_MODE (mode))
9417 if (HARD_REGNO_MODE_OK (regno, mode)
9418 && (mov_optab->handlers[(int) mode].insn_code
9419 != CODE_FOR_nothing))
9420 best_mode = mode;
9422 mode = best_mode;
9423 if (mode == VOIDmode)
9424 abort ();
9426 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9427 if (size % align != 0)
9428 size = CEIL (size, align) * align;
9429 apply_args_reg_offset[regno] = size;
9430 size += GET_MODE_SIZE (mode);
9431 apply_args_mode[regno] = mode;
9433 else
9435 apply_args_mode[regno] = VOIDmode;
9436 apply_args_reg_offset[regno] = 0;
9439 return size;
9442 /* Return the size required for the block returned by __builtin_apply,
9443 and initialize apply_result_mode. */
9445 static int
9446 apply_result_size ()
9448 static int size = -1;
9449 int align, regno;
9450 enum machine_mode mode;
9452 /* The values computed by this function never change. */
9453 if (size < 0)
9455 size = 0;
9457 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9458 if (FUNCTION_VALUE_REGNO_P (regno))
9460 /* Search for the proper mode for copying this register's
9461 value. I'm not sure this is right, but it works so far. */
9462 enum machine_mode best_mode = VOIDmode;
9464 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9465 mode != TImode;
9466 mode = GET_MODE_WIDER_MODE (mode))
9467 if (HARD_REGNO_MODE_OK (regno, mode))
9468 best_mode = mode;
9470 if (best_mode == VOIDmode)
9471 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9472 mode != VOIDmode;
9473 mode = GET_MODE_WIDER_MODE (mode))
9474 if (HARD_REGNO_MODE_OK (regno, mode)
9475 && (mov_optab->handlers[(int) mode].insn_code
9476 != CODE_FOR_nothing))
9477 best_mode = mode;
9479 mode = best_mode;
9480 if (mode == VOIDmode)
9481 abort ();
9483 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9484 if (size % align != 0)
9485 size = CEIL (size, align) * align;
9486 size += GET_MODE_SIZE (mode);
9487 apply_result_mode[regno] = mode;
9489 else
9490 apply_result_mode[regno] = VOIDmode;
9492 /* Allow targets that use untyped_call and untyped_return to override
9493 the size so that machine-specific information can be stored here. */
9494 #ifdef APPLY_RESULT_SIZE
9495 size = APPLY_RESULT_SIZE;
9496 #endif
9498 return size;
9501 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9502 /* Create a vector describing the result block RESULT. If SAVEP is true,
9503 the result block is used to save the values; otherwise it is used to
9504 restore the values. */
9506 static rtx
9507 result_vector (savep, result)
9508 int savep;
9509 rtx result;
9511 int regno, size, align, nelts;
9512 enum machine_mode mode;
9513 rtx reg, mem;
9514 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9516 size = nelts = 0;
9517 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9518 if ((mode = apply_result_mode[regno]) != VOIDmode)
9520 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9521 if (size % align != 0)
9522 size = CEIL (size, align) * align;
9523 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9524 mem = change_address (result, mode,
9525 plus_constant (XEXP (result, 0), size));
9526 savevec[nelts++] = (savep
9527 ? gen_rtx (SET, VOIDmode, mem, reg)
9528 : gen_rtx (SET, VOIDmode, reg, mem));
9529 size += GET_MODE_SIZE (mode);
9531 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9533 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9535 /* Save the state required to perform an untyped call with the same
9536 arguments as were passed to the current function. */
9538 static rtx
9539 expand_builtin_apply_args ()
9541 rtx registers;
9542 int size, align, regno;
9543 enum machine_mode mode;
9545 /* Create a block where the arg-pointer, structure value address,
9546 and argument registers can be saved. */
9547 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9549 /* Walk past the arg-pointer and structure value address. */
9550 size = GET_MODE_SIZE (Pmode);
9551 if (struct_value_rtx)
9552 size += GET_MODE_SIZE (Pmode);
9554 /* Save each register used in calling a function to the block. */
9555 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9556 if ((mode = apply_args_mode[regno]) != VOIDmode)
9558 rtx tem;
9560 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9561 if (size % align != 0)
9562 size = CEIL (size, align) * align;
9564 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9566 #ifdef STACK_REGS
9567 /* For reg-stack.c's stack register household.
9568 Compare with a similar piece of code in function.c. */
9570 emit_insn (gen_rtx (USE, mode, tem));
9571 #endif
9573 emit_move_insn (change_address (registers, mode,
9574 plus_constant (XEXP (registers, 0),
9575 size)),
9576 tem);
9577 size += GET_MODE_SIZE (mode);
9580 /* Save the arg pointer to the block. */
9581 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9582 copy_to_reg (virtual_incoming_args_rtx));
9583 size = GET_MODE_SIZE (Pmode);
9585 /* Save the structure value address unless this is passed as an
9586 "invisible" first argument. */
9587 if (struct_value_incoming_rtx)
9589 emit_move_insn (change_address (registers, Pmode,
9590 plus_constant (XEXP (registers, 0),
9591 size)),
9592 copy_to_reg (struct_value_incoming_rtx));
9593 size += GET_MODE_SIZE (Pmode);
9596 /* Return the address of the block. */
9597 return copy_addr_to_reg (XEXP (registers, 0));
9600 /* Perform an untyped call and save the state required to perform an
9601 untyped return of whatever value was returned by the given function. */
9603 static rtx
9604 expand_builtin_apply (function, arguments, argsize)
9605 rtx function, arguments, argsize;
9607 int size, align, regno;
9608 enum machine_mode mode;
9609 rtx incoming_args, result, reg, dest, call_insn;
9610 rtx old_stack_level = 0;
9611 rtx call_fusage = 0;
9613 /* Create a block where the return registers can be saved. */
9614 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9616 /* ??? The argsize value should be adjusted here. */
9618 /* Fetch the arg pointer from the ARGUMENTS block. */
9619 incoming_args = gen_reg_rtx (Pmode);
9620 emit_move_insn (incoming_args,
9621 gen_rtx (MEM, Pmode, arguments));
9622 #ifndef STACK_GROWS_DOWNWARD
9623 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9624 incoming_args, 0, OPTAB_LIB_WIDEN);
9625 #endif
9627 /* Perform postincrements before actually calling the function. */
9628 emit_queue ();
9630 /* Push a new argument block and copy the arguments. */
9631 do_pending_stack_adjust ();
9633 /* Save the stack with nonlocal if available */
9634 #ifdef HAVE_save_stack_nonlocal
9635 if (HAVE_save_stack_nonlocal)
9636 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9637 else
9638 #endif
9639 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9641 /* Push a block of memory onto the stack to store the memory arguments.
9642 Save the address in a register, and copy the memory arguments. ??? I
9643 haven't figured out how the calling convention macros effect this,
9644 but it's likely that the source and/or destination addresses in
9645 the block copy will need updating in machine specific ways. */
9646 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9647 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9648 gen_rtx (MEM, BLKmode, incoming_args),
9649 argsize,
9650 PARM_BOUNDARY / BITS_PER_UNIT);
9652 /* Refer to the argument block. */
9653 apply_args_size ();
9654 arguments = gen_rtx (MEM, BLKmode, arguments);
9656 /* Walk past the arg-pointer and structure value address. */
9657 size = GET_MODE_SIZE (Pmode);
9658 if (struct_value_rtx)
9659 size += GET_MODE_SIZE (Pmode);
9661 /* Restore each of the registers previously saved. Make USE insns
9662 for each of these registers for use in making the call. */
9663 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9664 if ((mode = apply_args_mode[regno]) != VOIDmode)
9666 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9667 if (size % align != 0)
9668 size = CEIL (size, align) * align;
9669 reg = gen_rtx (REG, mode, regno);
9670 emit_move_insn (reg,
9671 change_address (arguments, mode,
9672 plus_constant (XEXP (arguments, 0),
9673 size)));
9675 use_reg (&call_fusage, reg);
9676 size += GET_MODE_SIZE (mode);
9679 /* Restore the structure value address unless this is passed as an
9680 "invisible" first argument. */
9681 size = GET_MODE_SIZE (Pmode);
9682 if (struct_value_rtx)
9684 rtx value = gen_reg_rtx (Pmode);
9685 emit_move_insn (value,
9686 change_address (arguments, Pmode,
9687 plus_constant (XEXP (arguments, 0),
9688 size)));
9689 emit_move_insn (struct_value_rtx, value);
9690 if (GET_CODE (struct_value_rtx) == REG)
9691 use_reg (&call_fusage, struct_value_rtx);
9692 size += GET_MODE_SIZE (Pmode);
9695 /* All arguments and registers used for the call are set up by now! */
9696 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9698 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9699 and we don't want to load it into a register as an optimization,
9700 because prepare_call_address already did it if it should be done. */
9701 if (GET_CODE (function) != SYMBOL_REF)
9702 function = memory_address (FUNCTION_MODE, function);
9704 /* Generate the actual call instruction and save the return value. */
9705 #ifdef HAVE_untyped_call
9706 if (HAVE_untyped_call)
9707 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9708 result, result_vector (1, result)));
9709 else
9710 #endif
9711 #ifdef HAVE_call_value
9712 if (HAVE_call_value)
9714 rtx valreg = 0;
9716 /* Locate the unique return register. It is not possible to
9717 express a call that sets more than one return register using
9718 call_value; use untyped_call for that. In fact, untyped_call
9719 only needs to save the return registers in the given block. */
9720 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9721 if ((mode = apply_result_mode[regno]) != VOIDmode)
9723 if (valreg)
9724 abort (); /* HAVE_untyped_call required. */
9725 valreg = gen_rtx (REG, mode, regno);
9728 emit_call_insn (gen_call_value (valreg,
9729 gen_rtx (MEM, FUNCTION_MODE, function),
9730 const0_rtx, NULL_RTX, const0_rtx));
9732 emit_move_insn (change_address (result, GET_MODE (valreg),
9733 XEXP (result, 0)),
9734 valreg);
9736 else
9737 #endif
9738 abort ();
9740 /* Find the CALL insn we just emitted. */
9741 for (call_insn = get_last_insn ();
9742 call_insn && GET_CODE (call_insn) != CALL_INSN;
9743 call_insn = PREV_INSN (call_insn))
9746 if (! call_insn)
9747 abort ();
9749 /* Put the register usage information on the CALL. If there is already
9750 some usage information, put ours at the end. */
9751 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9753 rtx link;
9755 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9756 link = XEXP (link, 1))
9759 XEXP (link, 1) = call_fusage;
9761 else
9762 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9764 /* Restore the stack. */
9765 #ifdef HAVE_save_stack_nonlocal
9766 if (HAVE_save_stack_nonlocal)
9767 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9768 else
9769 #endif
9770 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9772 /* Return the address of the result block. */
9773 return copy_addr_to_reg (XEXP (result, 0));
9776 /* Perform an untyped return. */
9778 static void
9779 expand_builtin_return (result)
9780 rtx result;
9782 int size, align, regno;
9783 enum machine_mode mode;
9784 rtx reg;
9785 rtx call_fusage = 0;
9787 apply_result_size ();
9788 result = gen_rtx (MEM, BLKmode, result);
9790 #ifdef HAVE_untyped_return
9791 if (HAVE_untyped_return)
9793 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9794 emit_barrier ();
9795 return;
9797 #endif
9799 /* Restore the return value and note that each value is used. */
9800 size = 0;
9801 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9802 if ((mode = apply_result_mode[regno]) != VOIDmode)
9804 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9805 if (size % align != 0)
9806 size = CEIL (size, align) * align;
9807 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9808 emit_move_insn (reg,
9809 change_address (result, mode,
9810 plus_constant (XEXP (result, 0),
9811 size)));
9813 push_to_sequence (call_fusage);
9814 emit_insn (gen_rtx (USE, VOIDmode, reg));
9815 call_fusage = get_insns ();
9816 end_sequence ();
9817 size += GET_MODE_SIZE (mode);
9820 /* Put the USE insns before the return. */
9821 emit_insns (call_fusage);
9823 /* Return whatever values was restored by jumping directly to the end
9824 of the function. */
9825 expand_null_return ();
9828 /* Expand code for a post- or pre- increment or decrement
9829 and return the RTX for the result.
9830 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9832 static rtx
9833 expand_increment (exp, post, ignore)
9834 register tree exp;
9835 int post, ignore;
9837 register rtx op0, op1;
9838 register rtx temp, value;
9839 register tree incremented = TREE_OPERAND (exp, 0);
9840 optab this_optab = add_optab;
9841 int icode;
9842 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9843 int op0_is_copy = 0;
9844 int single_insn = 0;
9845 /* 1 means we can't store into OP0 directly,
9846 because it is a subreg narrower than a word,
9847 and we don't dare clobber the rest of the word. */
9848 int bad_subreg = 0;
9850 if (output_bytecode)
9852 bc_expand_expr (exp);
9853 return NULL_RTX;
9856 /* Stabilize any component ref that might need to be
9857 evaluated more than once below. */
9858 if (!post
9859 || TREE_CODE (incremented) == BIT_FIELD_REF
9860 || (TREE_CODE (incremented) == COMPONENT_REF
9861 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9862 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9863 incremented = stabilize_reference (incremented);
9864 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9865 ones into save exprs so that they don't accidentally get evaluated
9866 more than once by the code below. */
9867 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9868 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9869 incremented = save_expr (incremented);
9871 /* Compute the operands as RTX.
9872 Note whether OP0 is the actual lvalue or a copy of it:
9873 I believe it is a copy iff it is a register or subreg
9874 and insns were generated in computing it. */
9876 temp = get_last_insn ();
9877 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9879 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9880 in place but instead must do sign- or zero-extension during assignment,
9881 so we copy it into a new register and let the code below use it as
9882 a copy.
9884 Note that we can safely modify this SUBREG since it is know not to be
9885 shared (it was made by the expand_expr call above). */
9887 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9889 if (post)
9890 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9891 else
9892 bad_subreg = 1;
9894 else if (GET_CODE (op0) == SUBREG
9895 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9897 /* We cannot increment this SUBREG in place. If we are
9898 post-incrementing, get a copy of the old value. Otherwise,
9899 just mark that we cannot increment in place. */
9900 if (post)
9901 op0 = copy_to_reg (op0);
9902 else
9903 bad_subreg = 1;
9906 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9907 && temp != get_last_insn ());
9908 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9909 EXPAND_MEMORY_USE_BAD);
9911 /* Decide whether incrementing or decrementing. */
9912 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9913 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9914 this_optab = sub_optab;
9916 /* Convert decrement by a constant into a negative increment. */
9917 if (this_optab == sub_optab
9918 && GET_CODE (op1) == CONST_INT)
9920 op1 = GEN_INT (- INTVAL (op1));
9921 this_optab = add_optab;
9924 /* For a preincrement, see if we can do this with a single instruction. */
9925 if (!post)
9927 icode = (int) this_optab->handlers[(int) mode].insn_code;
9928 if (icode != (int) CODE_FOR_nothing
9929 /* Make sure that OP0 is valid for operands 0 and 1
9930 of the insn we want to queue. */
9931 && (*insn_operand_predicate[icode][0]) (op0, mode)
9932 && (*insn_operand_predicate[icode][1]) (op0, mode)
9933 && (*insn_operand_predicate[icode][2]) (op1, mode))
9934 single_insn = 1;
9937 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9938 then we cannot just increment OP0. We must therefore contrive to
9939 increment the original value. Then, for postincrement, we can return
9940 OP0 since it is a copy of the old value. For preincrement, expand here
9941 unless we can do it with a single insn.
9943 Likewise if storing directly into OP0 would clobber high bits
9944 we need to preserve (bad_subreg). */
9945 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9947 /* This is the easiest way to increment the value wherever it is.
9948 Problems with multiple evaluation of INCREMENTED are prevented
9949 because either (1) it is a component_ref or preincrement,
9950 in which case it was stabilized above, or (2) it is an array_ref
9951 with constant index in an array in a register, which is
9952 safe to reevaluate. */
9953 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9954 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9955 ? MINUS_EXPR : PLUS_EXPR),
9956 TREE_TYPE (exp),
9957 incremented,
9958 TREE_OPERAND (exp, 1));
9960 while (TREE_CODE (incremented) == NOP_EXPR
9961 || TREE_CODE (incremented) == CONVERT_EXPR)
9963 newexp = convert (TREE_TYPE (incremented), newexp);
9964 incremented = TREE_OPERAND (incremented, 0);
9967 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9968 return post ? op0 : temp;
9971 if (post)
9973 /* We have a true reference to the value in OP0.
9974 If there is an insn to add or subtract in this mode, queue it.
9975 Queueing the increment insn avoids the register shuffling
9976 that often results if we must increment now and first save
9977 the old value for subsequent use. */
9979 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9980 op0 = stabilize (op0);
9981 #endif
9983 icode = (int) this_optab->handlers[(int) mode].insn_code;
9984 if (icode != (int) CODE_FOR_nothing
9985 /* Make sure that OP0 is valid for operands 0 and 1
9986 of the insn we want to queue. */
9987 && (*insn_operand_predicate[icode][0]) (op0, mode)
9988 && (*insn_operand_predicate[icode][1]) (op0, mode))
9990 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9991 op1 = force_reg (mode, op1);
9993 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9995 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9997 rtx addr = (general_operand (XEXP (op0, 0), mode)
9998 ? force_reg (Pmode, XEXP (op0, 0))
9999 : copy_to_reg (XEXP (op0, 0)));
10000 rtx temp, result;
10002 op0 = change_address (op0, VOIDmode, addr);
10003 temp = force_reg (GET_MODE (op0), op0);
10004 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10005 op1 = force_reg (mode, op1);
10007 /* The increment queue is LIFO, thus we have to `queue'
10008 the instructions in reverse order. */
10009 enqueue_insn (op0, gen_move_insn (op0, temp));
10010 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10011 return result;
10015 /* Preincrement, or we can't increment with one simple insn. */
10016 if (post)
10017 /* Save a copy of the value before inc or dec, to return it later. */
10018 temp = value = copy_to_reg (op0);
10019 else
10020 /* Arrange to return the incremented value. */
10021 /* Copy the rtx because expand_binop will protect from the queue,
10022 and the results of that would be invalid for us to return
10023 if our caller does emit_queue before using our result. */
10024 temp = copy_rtx (value = op0);
10026 /* Increment however we can. */
10027 op1 = expand_binop (mode, this_optab, value, op1,
10028 flag_check_memory_usage ? NULL_RTX : op0,
10029 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10030 /* Make sure the value is stored into OP0. */
10031 if (op1 != op0)
10032 emit_move_insn (op0, op1);
10034 return temp;
10037 /* Expand all function calls contained within EXP, innermost ones first.
10038 But don't look within expressions that have sequence points.
10039 For each CALL_EXPR, record the rtx for its value
10040 in the CALL_EXPR_RTL field. */
10042 static void
10043 preexpand_calls (exp)
10044 tree exp;
10046 register int nops, i;
10047 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10049 if (! do_preexpand_calls)
10050 return;
10052 /* Only expressions and references can contain calls. */
10054 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10055 return;
10057 switch (TREE_CODE (exp))
10059 case CALL_EXPR:
10060 /* Do nothing if already expanded. */
10061 if (CALL_EXPR_RTL (exp) != 0
10062 /* Do nothing if the call returns a variable-sized object. */
10063 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10064 /* Do nothing to built-in functions. */
10065 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10066 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10067 == FUNCTION_DECL)
10068 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10069 return;
10071 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10072 return;
10074 case COMPOUND_EXPR:
10075 case COND_EXPR:
10076 case TRUTH_ANDIF_EXPR:
10077 case TRUTH_ORIF_EXPR:
10078 /* If we find one of these, then we can be sure
10079 the adjust will be done for it (since it makes jumps).
10080 Do it now, so that if this is inside an argument
10081 of a function, we don't get the stack adjustment
10082 after some other args have already been pushed. */
10083 do_pending_stack_adjust ();
10084 return;
10086 case BLOCK:
10087 case RTL_EXPR:
10088 case WITH_CLEANUP_EXPR:
10089 case CLEANUP_POINT_EXPR:
10090 case TRY_CATCH_EXPR:
10091 return;
10093 case SAVE_EXPR:
10094 if (SAVE_EXPR_RTL (exp) != 0)
10095 return;
10097 default:
10098 break;
10101 nops = tree_code_length[(int) TREE_CODE (exp)];
10102 for (i = 0; i < nops; i++)
10103 if (TREE_OPERAND (exp, i) != 0)
10105 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10106 if (type == 'e' || type == '<' || type == '1' || type == '2'
10107 || type == 'r')
10108 preexpand_calls (TREE_OPERAND (exp, i));
10112 /* At the start of a function, record that we have no previously-pushed
10113 arguments waiting to be popped. */
10115 void
10116 init_pending_stack_adjust ()
10118 pending_stack_adjust = 0;
10121 /* When exiting from function, if safe, clear out any pending stack adjust
10122 so the adjustment won't get done. */
10124 void
10125 clear_pending_stack_adjust ()
10127 #ifdef EXIT_IGNORE_STACK
10128 if (optimize > 0
10129 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
10130 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10131 && ! flag_inline_functions)
10132 pending_stack_adjust = 0;
10133 #endif
10136 /* Pop any previously-pushed arguments that have not been popped yet. */
10138 void
10139 do_pending_stack_adjust ()
10141 if (inhibit_defer_pop == 0)
10143 if (pending_stack_adjust != 0)
10144 adjust_stack (GEN_INT (pending_stack_adjust));
10145 pending_stack_adjust = 0;
10149 /* Expand conditional expressions. */
10151 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10152 LABEL is an rtx of code CODE_LABEL, in this function and all the
10153 functions here. */
10155 void
10156 jumpifnot (exp, label)
10157 tree exp;
10158 rtx label;
10160 do_jump (exp, label, NULL_RTX);
10163 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10165 void
10166 jumpif (exp, label)
10167 tree exp;
10168 rtx label;
10170 do_jump (exp, NULL_RTX, label);
10173 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10174 the result is zero, or IF_TRUE_LABEL if the result is one.
10175 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10176 meaning fall through in that case.
10178 do_jump always does any pending stack adjust except when it does not
10179 actually perform a jump. An example where there is no jump
10180 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10182 This function is responsible for optimizing cases such as
10183 &&, || and comparison operators in EXP. */
10185 void
10186 do_jump (exp, if_false_label, if_true_label)
10187 tree exp;
10188 rtx if_false_label, if_true_label;
10190 register enum tree_code code = TREE_CODE (exp);
10191 /* Some cases need to create a label to jump to
10192 in order to properly fall through.
10193 These cases set DROP_THROUGH_LABEL nonzero. */
10194 rtx drop_through_label = 0;
10195 rtx temp;
10196 rtx comparison = 0;
10197 int i;
10198 tree type;
10199 enum machine_mode mode;
10201 emit_queue ();
10203 switch (code)
10205 case ERROR_MARK:
10206 break;
10208 case INTEGER_CST:
10209 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10210 if (temp)
10211 emit_jump (temp);
10212 break;
10214 #if 0
10215 /* This is not true with #pragma weak */
10216 case ADDR_EXPR:
10217 /* The address of something can never be zero. */
10218 if (if_true_label)
10219 emit_jump (if_true_label);
10220 break;
10221 #endif
10223 case NOP_EXPR:
10224 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10225 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10226 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10227 goto normal;
10228 case CONVERT_EXPR:
10229 /* If we are narrowing the operand, we have to do the compare in the
10230 narrower mode. */
10231 if ((TYPE_PRECISION (TREE_TYPE (exp))
10232 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10233 goto normal;
10234 case NON_LVALUE_EXPR:
10235 case REFERENCE_EXPR:
10236 case ABS_EXPR:
10237 case NEGATE_EXPR:
10238 case LROTATE_EXPR:
10239 case RROTATE_EXPR:
10240 /* These cannot change zero->non-zero or vice versa. */
10241 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10242 break;
10244 #if 0
10245 /* This is never less insns than evaluating the PLUS_EXPR followed by
10246 a test and can be longer if the test is eliminated. */
10247 case PLUS_EXPR:
10248 /* Reduce to minus. */
10249 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10250 TREE_OPERAND (exp, 0),
10251 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10252 TREE_OPERAND (exp, 1))));
10253 /* Process as MINUS. */
10254 #endif
10256 case MINUS_EXPR:
10257 /* Non-zero iff operands of minus differ. */
10258 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10259 TREE_OPERAND (exp, 0),
10260 TREE_OPERAND (exp, 1)),
10261 NE, NE);
10262 break;
10264 case BIT_AND_EXPR:
10265 /* If we are AND'ing with a small constant, do this comparison in the
10266 smallest type that fits. If the machine doesn't have comparisons
10267 that small, it will be converted back to the wider comparison.
10268 This helps if we are testing the sign bit of a narrower object.
10269 combine can't do this for us because it can't know whether a
10270 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10272 if (! SLOW_BYTE_ACCESS
10273 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10274 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10275 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10276 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10277 && (type = type_for_mode (mode, 1)) != 0
10278 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10279 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10280 != CODE_FOR_nothing))
10282 do_jump (convert (type, exp), if_false_label, if_true_label);
10283 break;
10285 goto normal;
10287 case TRUTH_NOT_EXPR:
10288 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10289 break;
10291 case TRUTH_ANDIF_EXPR:
10292 if (if_false_label == 0)
10293 if_false_label = drop_through_label = gen_label_rtx ();
10294 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10295 start_cleanup_deferral ();
10296 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10297 end_cleanup_deferral ();
10298 break;
10300 case TRUTH_ORIF_EXPR:
10301 if (if_true_label == 0)
10302 if_true_label = drop_through_label = gen_label_rtx ();
10303 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10304 start_cleanup_deferral ();
10305 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10306 end_cleanup_deferral ();
10307 break;
10309 case COMPOUND_EXPR:
10310 push_temp_slots ();
10311 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10312 preserve_temp_slots (NULL_RTX);
10313 free_temp_slots ();
10314 pop_temp_slots ();
10315 emit_queue ();
10316 do_pending_stack_adjust ();
10317 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10318 break;
10320 case COMPONENT_REF:
10321 case BIT_FIELD_REF:
10322 case ARRAY_REF:
10324 int bitsize, bitpos, unsignedp;
10325 enum machine_mode mode;
10326 tree type;
10327 tree offset;
10328 int volatilep = 0;
10329 int alignment;
10331 /* Get description of this reference. We don't actually care
10332 about the underlying object here. */
10333 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10334 &mode, &unsignedp, &volatilep,
10335 &alignment);
10337 type = type_for_size (bitsize, unsignedp);
10338 if (! SLOW_BYTE_ACCESS
10339 && type != 0 && bitsize >= 0
10340 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10341 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10342 != CODE_FOR_nothing))
10344 do_jump (convert (type, exp), if_false_label, if_true_label);
10345 break;
10347 goto normal;
10350 case COND_EXPR:
10351 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10352 if (integer_onep (TREE_OPERAND (exp, 1))
10353 && integer_zerop (TREE_OPERAND (exp, 2)))
10354 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10356 else if (integer_zerop (TREE_OPERAND (exp, 1))
10357 && integer_onep (TREE_OPERAND (exp, 2)))
10358 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10360 else
10362 register rtx label1 = gen_label_rtx ();
10363 drop_through_label = gen_label_rtx ();
10365 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10367 start_cleanup_deferral ();
10368 /* Now the THEN-expression. */
10369 do_jump (TREE_OPERAND (exp, 1),
10370 if_false_label ? if_false_label : drop_through_label,
10371 if_true_label ? if_true_label : drop_through_label);
10372 /* In case the do_jump just above never jumps. */
10373 do_pending_stack_adjust ();
10374 emit_label (label1);
10376 /* Now the ELSE-expression. */
10377 do_jump (TREE_OPERAND (exp, 2),
10378 if_false_label ? if_false_label : drop_through_label,
10379 if_true_label ? if_true_label : drop_through_label);
10380 end_cleanup_deferral ();
10382 break;
10384 case EQ_EXPR:
10386 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10388 if (integer_zerop (TREE_OPERAND (exp, 1)))
10389 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10390 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10391 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10392 do_jump
10393 (fold
10394 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10395 fold (build (EQ_EXPR, TREE_TYPE (exp),
10396 fold (build1 (REALPART_EXPR,
10397 TREE_TYPE (inner_type),
10398 TREE_OPERAND (exp, 0))),
10399 fold (build1 (REALPART_EXPR,
10400 TREE_TYPE (inner_type),
10401 TREE_OPERAND (exp, 1))))),
10402 fold (build (EQ_EXPR, TREE_TYPE (exp),
10403 fold (build1 (IMAGPART_EXPR,
10404 TREE_TYPE (inner_type),
10405 TREE_OPERAND (exp, 0))),
10406 fold (build1 (IMAGPART_EXPR,
10407 TREE_TYPE (inner_type),
10408 TREE_OPERAND (exp, 1))))))),
10409 if_false_label, if_true_label);
10410 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10411 && !can_compare_p (TYPE_MODE (inner_type)))
10412 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10413 else
10414 comparison = compare (exp, EQ, EQ);
10415 break;
10418 case NE_EXPR:
10420 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10422 if (integer_zerop (TREE_OPERAND (exp, 1)))
10423 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10424 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10425 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10426 do_jump
10427 (fold
10428 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10429 fold (build (NE_EXPR, TREE_TYPE (exp),
10430 fold (build1 (REALPART_EXPR,
10431 TREE_TYPE (inner_type),
10432 TREE_OPERAND (exp, 0))),
10433 fold (build1 (REALPART_EXPR,
10434 TREE_TYPE (inner_type),
10435 TREE_OPERAND (exp, 1))))),
10436 fold (build (NE_EXPR, TREE_TYPE (exp),
10437 fold (build1 (IMAGPART_EXPR,
10438 TREE_TYPE (inner_type),
10439 TREE_OPERAND (exp, 0))),
10440 fold (build1 (IMAGPART_EXPR,
10441 TREE_TYPE (inner_type),
10442 TREE_OPERAND (exp, 1))))))),
10443 if_false_label, if_true_label);
10444 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10445 && !can_compare_p (TYPE_MODE (inner_type)))
10446 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10447 else
10448 comparison = compare (exp, NE, NE);
10449 break;
10452 case LT_EXPR:
10453 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10454 == MODE_INT)
10455 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10456 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10457 else
10458 comparison = compare (exp, LT, LTU);
10459 break;
10461 case LE_EXPR:
10462 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10463 == MODE_INT)
10464 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10465 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10466 else
10467 comparison = compare (exp, LE, LEU);
10468 break;
10470 case GT_EXPR:
10471 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10472 == MODE_INT)
10473 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10474 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10475 else
10476 comparison = compare (exp, GT, GTU);
10477 break;
10479 case GE_EXPR:
10480 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10481 == MODE_INT)
10482 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10483 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10484 else
10485 comparison = compare (exp, GE, GEU);
10486 break;
10488 default:
10489 normal:
10490 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10491 #if 0
10492 /* This is not needed any more and causes poor code since it causes
10493 comparisons and tests from non-SI objects to have different code
10494 sequences. */
10495 /* Copy to register to avoid generating bad insns by cse
10496 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10497 if (!cse_not_expected && GET_CODE (temp) == MEM)
10498 temp = copy_to_reg (temp);
10499 #endif
10500 do_pending_stack_adjust ();
10501 if (GET_CODE (temp) == CONST_INT)
10502 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10503 else if (GET_CODE (temp) == LABEL_REF)
10504 comparison = const_true_rtx;
10505 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10506 && !can_compare_p (GET_MODE (temp)))
10507 /* Note swapping the labels gives us not-equal. */
10508 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10509 else if (GET_MODE (temp) != VOIDmode)
10510 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10511 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10512 GET_MODE (temp), NULL_RTX, 0);
10513 else
10514 abort ();
10517 /* Do any postincrements in the expression that was tested. */
10518 emit_queue ();
10520 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10521 straight into a conditional jump instruction as the jump condition.
10522 Otherwise, all the work has been done already. */
10524 if (comparison == const_true_rtx)
10526 if (if_true_label)
10527 emit_jump (if_true_label);
10529 else if (comparison == const0_rtx)
10531 if (if_false_label)
10532 emit_jump (if_false_label);
10534 else if (comparison)
10535 do_jump_for_compare (comparison, if_false_label, if_true_label);
10537 if (drop_through_label)
10539 /* If do_jump produces code that might be jumped around,
10540 do any stack adjusts from that code, before the place
10541 where control merges in. */
10542 do_pending_stack_adjust ();
10543 emit_label (drop_through_label);
10547 /* Given a comparison expression EXP for values too wide to be compared
10548 with one insn, test the comparison and jump to the appropriate label.
10549 The code of EXP is ignored; we always test GT if SWAP is 0,
10550 and LT if SWAP is 1. */
10552 static void
10553 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10554 tree exp;
10555 int swap;
10556 rtx if_false_label, if_true_label;
10558 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10559 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10560 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10561 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10562 rtx drop_through_label = 0;
10563 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10564 int i;
10566 if (! if_true_label || ! if_false_label)
10567 drop_through_label = gen_label_rtx ();
10568 if (! if_true_label)
10569 if_true_label = drop_through_label;
10570 if (! if_false_label)
10571 if_false_label = drop_through_label;
10573 /* Compare a word at a time, high order first. */
10574 for (i = 0; i < nwords; i++)
10576 rtx comp;
10577 rtx op0_word, op1_word;
10579 if (WORDS_BIG_ENDIAN)
10581 op0_word = operand_subword_force (op0, i, mode);
10582 op1_word = operand_subword_force (op1, i, mode);
10584 else
10586 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10587 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10590 /* All but high-order word must be compared as unsigned. */
10591 comp = compare_from_rtx (op0_word, op1_word,
10592 (unsignedp || i > 0) ? GTU : GT,
10593 unsignedp, word_mode, NULL_RTX, 0);
10594 if (comp == const_true_rtx)
10595 emit_jump (if_true_label);
10596 else if (comp != const0_rtx)
10597 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10599 /* Consider lower words only if these are equal. */
10600 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10601 NULL_RTX, 0);
10602 if (comp == const_true_rtx)
10603 emit_jump (if_false_label);
10604 else if (comp != const0_rtx)
10605 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10608 if (if_false_label)
10609 emit_jump (if_false_label);
10610 if (drop_through_label)
10611 emit_label (drop_through_label);
10614 /* Compare OP0 with OP1, word at a time, in mode MODE.
10615 UNSIGNEDP says to do unsigned comparison.
10616 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10618 void
10619 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10620 enum machine_mode mode;
10621 int unsignedp;
10622 rtx op0, op1;
10623 rtx if_false_label, if_true_label;
10625 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10626 rtx drop_through_label = 0;
10627 int i;
10629 if (! if_true_label || ! if_false_label)
10630 drop_through_label = gen_label_rtx ();
10631 if (! if_true_label)
10632 if_true_label = drop_through_label;
10633 if (! if_false_label)
10634 if_false_label = drop_through_label;
10636 /* Compare a word at a time, high order first. */
10637 for (i = 0; i < nwords; i++)
10639 rtx comp;
10640 rtx op0_word, op1_word;
10642 if (WORDS_BIG_ENDIAN)
10644 op0_word = operand_subword_force (op0, i, mode);
10645 op1_word = operand_subword_force (op1, i, mode);
10647 else
10649 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10650 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10653 /* All but high-order word must be compared as unsigned. */
10654 comp = compare_from_rtx (op0_word, op1_word,
10655 (unsignedp || i > 0) ? GTU : GT,
10656 unsignedp, word_mode, NULL_RTX, 0);
10657 if (comp == const_true_rtx)
10658 emit_jump (if_true_label);
10659 else if (comp != const0_rtx)
10660 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10662 /* Consider lower words only if these are equal. */
10663 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10664 NULL_RTX, 0);
10665 if (comp == const_true_rtx)
10666 emit_jump (if_false_label);
10667 else if (comp != const0_rtx)
10668 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10671 if (if_false_label)
10672 emit_jump (if_false_label);
10673 if (drop_through_label)
10674 emit_label (drop_through_label);
10677 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10678 with one insn, test the comparison and jump to the appropriate label. */
10680 static void
10681 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10682 tree exp;
10683 rtx if_false_label, if_true_label;
10685 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10686 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10687 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10688 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10689 int i;
10690 rtx drop_through_label = 0;
10692 if (! if_false_label)
10693 drop_through_label = if_false_label = gen_label_rtx ();
10695 for (i = 0; i < nwords; i++)
10697 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10698 operand_subword_force (op1, i, mode),
10699 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10700 word_mode, NULL_RTX, 0);
10701 if (comp == const_true_rtx)
10702 emit_jump (if_false_label);
10703 else if (comp != const0_rtx)
10704 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10707 if (if_true_label)
10708 emit_jump (if_true_label);
10709 if (drop_through_label)
10710 emit_label (drop_through_label);
10713 /* Jump according to whether OP0 is 0.
10714 We assume that OP0 has an integer mode that is too wide
10715 for the available compare insns. */
10717 static void
10718 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10719 rtx op0;
10720 rtx if_false_label, if_true_label;
10722 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10723 rtx part;
10724 int i;
10725 rtx drop_through_label = 0;
10727 /* The fastest way of doing this comparison on almost any machine is to
10728 "or" all the words and compare the result. If all have to be loaded
10729 from memory and this is a very wide item, it's possible this may
10730 be slower, but that's highly unlikely. */
10732 part = gen_reg_rtx (word_mode);
10733 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10734 for (i = 1; i < nwords && part != 0; i++)
10735 part = expand_binop (word_mode, ior_optab, part,
10736 operand_subword_force (op0, i, GET_MODE (op0)),
10737 part, 1, OPTAB_WIDEN);
10739 if (part != 0)
10741 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10742 NULL_RTX, 0);
10744 if (comp == const_true_rtx)
10745 emit_jump (if_false_label);
10746 else if (comp == const0_rtx)
10747 emit_jump (if_true_label);
10748 else
10749 do_jump_for_compare (comp, if_false_label, if_true_label);
10751 return;
10754 /* If we couldn't do the "or" simply, do this with a series of compares. */
10755 if (! if_false_label)
10756 drop_through_label = if_false_label = gen_label_rtx ();
10758 for (i = 0; i < nwords; i++)
10760 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10761 GET_MODE (op0)),
10762 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10763 if (comp == const_true_rtx)
10764 emit_jump (if_false_label);
10765 else if (comp != const0_rtx)
10766 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10769 if (if_true_label)
10770 emit_jump (if_true_label);
10772 if (drop_through_label)
10773 emit_label (drop_through_label);
10776 /* Given a comparison expression in rtl form, output conditional branches to
10777 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10779 static void
10780 do_jump_for_compare (comparison, if_false_label, if_true_label)
10781 rtx comparison, if_false_label, if_true_label;
10783 if (if_true_label)
10785 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10786 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10787 else
10788 abort ();
10790 if (if_false_label)
10791 emit_jump (if_false_label);
10793 else if (if_false_label)
10795 rtx insn;
10796 rtx prev = get_last_insn ();
10797 rtx branch = 0;
10799 /* Output the branch with the opposite condition. Then try to invert
10800 what is generated. If more than one insn is a branch, or if the
10801 branch is not the last insn written, abort. If we can't invert
10802 the branch, emit make a true label, redirect this jump to that,
10803 emit a jump to the false label and define the true label. */
10805 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10806 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10807 else
10808 abort ();
10810 /* Here we get the first insn that was just emitted. It used to be the
10811 case that, on some machines, emitting the branch would discard
10812 the previous compare insn and emit a replacement. This isn't
10813 done anymore, but abort if we see that PREV is deleted. */
10815 if (prev == 0)
10816 insn = get_insns ();
10817 else if (INSN_DELETED_P (prev))
10818 abort ();
10819 else
10820 insn = NEXT_INSN (prev);
10822 for (; insn; insn = NEXT_INSN (insn))
10823 if (GET_CODE (insn) == JUMP_INSN)
10825 if (branch)
10826 abort ();
10827 branch = insn;
10830 if (branch != get_last_insn ())
10831 abort ();
10833 JUMP_LABEL (branch) = if_false_label;
10834 if (! invert_jump (branch, if_false_label))
10836 if_true_label = gen_label_rtx ();
10837 redirect_jump (branch, if_true_label);
10838 emit_jump (if_false_label);
10839 emit_label (if_true_label);
10844 /* Generate code for a comparison expression EXP
10845 (including code to compute the values to be compared)
10846 and set (CC0) according to the result.
10847 SIGNED_CODE should be the rtx operation for this comparison for
10848 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10850 We force a stack adjustment unless there are currently
10851 things pushed on the stack that aren't yet used. */
10853 static rtx
10854 compare (exp, signed_code, unsigned_code)
10855 register tree exp;
10856 enum rtx_code signed_code, unsigned_code;
10858 register rtx op0
10859 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10860 register rtx op1
10861 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10862 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10863 register enum machine_mode mode = TYPE_MODE (type);
10864 int unsignedp = TREE_UNSIGNED (type);
10865 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10867 #ifdef HAVE_canonicalize_funcptr_for_compare
10868 /* If function pointers need to be "canonicalized" before they can
10869 be reliably compared, then canonicalize them. */
10870 if (HAVE_canonicalize_funcptr_for_compare
10871 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10872 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10873 == FUNCTION_TYPE))
10875 rtx new_op0 = gen_reg_rtx (mode);
10877 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10878 op0 = new_op0;
10881 if (HAVE_canonicalize_funcptr_for_compare
10882 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10883 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10884 == FUNCTION_TYPE))
10886 rtx new_op1 = gen_reg_rtx (mode);
10888 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10889 op1 = new_op1;
10891 #endif
10893 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10894 ((mode == BLKmode)
10895 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10896 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10899 /* Like compare but expects the values to compare as two rtx's.
10900 The decision as to signed or unsigned comparison must be made by the caller.
10902 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10903 compared.
10905 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10906 size of MODE should be used. */
10909 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10910 register rtx op0, op1;
10911 enum rtx_code code;
10912 int unsignedp;
10913 enum machine_mode mode;
10914 rtx size;
10915 int align;
10917 rtx tem;
10919 /* If one operand is constant, make it the second one. Only do this
10920 if the other operand is not constant as well. */
10922 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10923 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10925 tem = op0;
10926 op0 = op1;
10927 op1 = tem;
10928 code = swap_condition (code);
10931 if (flag_force_mem)
10933 op0 = force_not_mem (op0);
10934 op1 = force_not_mem (op1);
10937 do_pending_stack_adjust ();
10939 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10940 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10941 return tem;
10943 #if 0
10944 /* There's no need to do this now that combine.c can eliminate lots of
10945 sign extensions. This can be less efficient in certain cases on other
10946 machines. */
10948 /* If this is a signed equality comparison, we can do it as an
10949 unsigned comparison since zero-extension is cheaper than sign
10950 extension and comparisons with zero are done as unsigned. This is
10951 the case even on machines that can do fast sign extension, since
10952 zero-extension is easier to combine with other operations than
10953 sign-extension is. If we are comparing against a constant, we must
10954 convert it to what it would look like unsigned. */
10955 if ((code == EQ || code == NE) && ! unsignedp
10956 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10958 if (GET_CODE (op1) == CONST_INT
10959 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10960 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10961 unsignedp = 1;
10963 #endif
10965 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10967 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10970 /* Generate code to calculate EXP using a store-flag instruction
10971 and return an rtx for the result. EXP is either a comparison
10972 or a TRUTH_NOT_EXPR whose operand is a comparison.
10974 If TARGET is nonzero, store the result there if convenient.
10976 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10977 cheap.
10979 Return zero if there is no suitable set-flag instruction
10980 available on this machine.
10982 Once expand_expr has been called on the arguments of the comparison,
10983 we are committed to doing the store flag, since it is not safe to
10984 re-evaluate the expression. We emit the store-flag insn by calling
10985 emit_store_flag, but only expand the arguments if we have a reason
10986 to believe that emit_store_flag will be successful. If we think that
10987 it will, but it isn't, we have to simulate the store-flag with a
10988 set/jump/set sequence. */
10990 static rtx
10991 do_store_flag (exp, target, mode, only_cheap)
10992 tree exp;
10993 rtx target;
10994 enum machine_mode mode;
10995 int only_cheap;
10997 enum rtx_code code;
10998 tree arg0, arg1, type;
10999 tree tem;
11000 enum machine_mode operand_mode;
11001 int invert = 0;
11002 int unsignedp;
11003 rtx op0, op1;
11004 enum insn_code icode;
11005 rtx subtarget = target;
11006 rtx result, label, pattern, jump_pat;
11008 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11009 result at the end. We can't simply invert the test since it would
11010 have already been inverted if it were valid. This case occurs for
11011 some floating-point comparisons. */
11013 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11014 invert = 1, exp = TREE_OPERAND (exp, 0);
11016 arg0 = TREE_OPERAND (exp, 0);
11017 arg1 = TREE_OPERAND (exp, 1);
11018 type = TREE_TYPE (arg0);
11019 operand_mode = TYPE_MODE (type);
11020 unsignedp = TREE_UNSIGNED (type);
11022 /* We won't bother with BLKmode store-flag operations because it would mean
11023 passing a lot of information to emit_store_flag. */
11024 if (operand_mode == BLKmode)
11025 return 0;
11027 /* We won't bother with store-flag operations involving function pointers
11028 when function pointers must be canonicalized before comparisons. */
11029 #ifdef HAVE_canonicalize_funcptr_for_compare
11030 if (HAVE_canonicalize_funcptr_for_compare
11031 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11032 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11033 == FUNCTION_TYPE))
11034 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11035 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11036 == FUNCTION_TYPE))))
11037 return 0;
11038 #endif
11040 STRIP_NOPS (arg0);
11041 STRIP_NOPS (arg1);
11043 /* Get the rtx comparison code to use. We know that EXP is a comparison
11044 operation of some type. Some comparisons against 1 and -1 can be
11045 converted to comparisons with zero. Do so here so that the tests
11046 below will be aware that we have a comparison with zero. These
11047 tests will not catch constants in the first operand, but constants
11048 are rarely passed as the first operand. */
11050 switch (TREE_CODE (exp))
11052 case EQ_EXPR:
11053 code = EQ;
11054 break;
11055 case NE_EXPR:
11056 code = NE;
11057 break;
11058 case LT_EXPR:
11059 if (integer_onep (arg1))
11060 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11061 else
11062 code = unsignedp ? LTU : LT;
11063 break;
11064 case LE_EXPR:
11065 if (! unsignedp && integer_all_onesp (arg1))
11066 arg1 = integer_zero_node, code = LT;
11067 else
11068 code = unsignedp ? LEU : LE;
11069 break;
11070 case GT_EXPR:
11071 if (! unsignedp && integer_all_onesp (arg1))
11072 arg1 = integer_zero_node, code = GE;
11073 else
11074 code = unsignedp ? GTU : GT;
11075 break;
11076 case GE_EXPR:
11077 if (integer_onep (arg1))
11078 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11079 else
11080 code = unsignedp ? GEU : GE;
11081 break;
11082 default:
11083 abort ();
11086 /* Put a constant second. */
11087 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11089 tem = arg0; arg0 = arg1; arg1 = tem;
11090 code = swap_condition (code);
11093 /* If this is an equality or inequality test of a single bit, we can
11094 do this by shifting the bit being tested to the low-order bit and
11095 masking the result with the constant 1. If the condition was EQ,
11096 we xor it with 1. This does not require an scc insn and is faster
11097 than an scc insn even if we have it. */
11099 if ((code == NE || code == EQ)
11100 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11101 && integer_pow2p (TREE_OPERAND (arg0, 1))
11102 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
11104 tree inner = TREE_OPERAND (arg0, 0);
11105 HOST_WIDE_INT tem;
11106 int bitnum;
11107 int ops_unsignedp;
11109 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
11110 NULL_RTX, VOIDmode, 0));
11111 /* In this case, immed_double_const will sign extend the value to make
11112 it look the same on the host and target. We must remove the
11113 sign-extension before calling exact_log2, since exact_log2 will
11114 fail for negative values. */
11115 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
11116 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
11117 /* We don't use the obvious constant shift to generate the mask,
11118 because that generates compiler warnings when BITS_PER_WORD is
11119 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11120 code is unreachable in that case. */
11121 tem = tem & GET_MODE_MASK (word_mode);
11122 bitnum = exact_log2 (tem);
11124 /* If INNER is a right shift of a constant and it plus BITNUM does
11125 not overflow, adjust BITNUM and INNER. */
11127 if (TREE_CODE (inner) == RSHIFT_EXPR
11128 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11129 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11130 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11131 < TYPE_PRECISION (type)))
11133 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11134 inner = TREE_OPERAND (inner, 0);
11137 /* If we are going to be able to omit the AND below, we must do our
11138 operations as unsigned. If we must use the AND, we have a choice.
11139 Normally unsigned is faster, but for some machines signed is. */
11140 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11141 #ifdef LOAD_EXTEND_OP
11142 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11143 #else
11145 #endif
11148 if (subtarget == 0 || GET_CODE (subtarget) != REG
11149 || GET_MODE (subtarget) != operand_mode
11150 || ! safe_from_p (subtarget, inner))
11151 subtarget = 0;
11153 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11155 if (bitnum != 0)
11156 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11157 size_int (bitnum), subtarget, ops_unsignedp);
11159 if (GET_MODE (op0) != mode)
11160 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11162 if ((code == EQ && ! invert) || (code == NE && invert))
11163 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11164 ops_unsignedp, OPTAB_LIB_WIDEN);
11166 /* Put the AND last so it can combine with more things. */
11167 if (bitnum != TYPE_PRECISION (type) - 1)
11168 op0 = expand_and (op0, const1_rtx, subtarget);
11170 return op0;
11173 /* Now see if we are likely to be able to do this. Return if not. */
11174 if (! can_compare_p (operand_mode))
11175 return 0;
11176 icode = setcc_gen_code[(int) code];
11177 if (icode == CODE_FOR_nothing
11178 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11180 /* We can only do this if it is one of the special cases that
11181 can be handled without an scc insn. */
11182 if ((code == LT && integer_zerop (arg1))
11183 || (! only_cheap && code == GE && integer_zerop (arg1)))
11185 else if (BRANCH_COST >= 0
11186 && ! only_cheap && (code == NE || code == EQ)
11187 && TREE_CODE (type) != REAL_TYPE
11188 && ((abs_optab->handlers[(int) operand_mode].insn_code
11189 != CODE_FOR_nothing)
11190 || (ffs_optab->handlers[(int) operand_mode].insn_code
11191 != CODE_FOR_nothing)))
11193 else
11194 return 0;
11197 preexpand_calls (exp);
11198 if (subtarget == 0 || GET_CODE (subtarget) != REG
11199 || GET_MODE (subtarget) != operand_mode
11200 || ! safe_from_p (subtarget, arg1))
11201 subtarget = 0;
11203 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11204 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11206 if (target == 0)
11207 target = gen_reg_rtx (mode);
11209 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11210 because, if the emit_store_flag does anything it will succeed and
11211 OP0 and OP1 will not be used subsequently. */
11213 result = emit_store_flag (target, code,
11214 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11215 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11216 operand_mode, unsignedp, 1);
11218 if (result)
11220 if (invert)
11221 result = expand_binop (mode, xor_optab, result, const1_rtx,
11222 result, 0, OPTAB_LIB_WIDEN);
11223 return result;
11226 /* If this failed, we have to do this with set/compare/jump/set code. */
11227 if (GET_CODE (target) != REG
11228 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11229 target = gen_reg_rtx (GET_MODE (target));
11231 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11232 result = compare_from_rtx (op0, op1, code, unsignedp,
11233 operand_mode, NULL_RTX, 0);
11234 if (GET_CODE (result) == CONST_INT)
11235 return (((result == const0_rtx && ! invert)
11236 || (result != const0_rtx && invert))
11237 ? const0_rtx : const1_rtx);
11239 label = gen_label_rtx ();
11240 if (bcc_gen_fctn[(int) code] == 0)
11241 abort ();
11243 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11244 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11245 emit_label (label);
11247 return target;
11250 /* Generate a tablejump instruction (used for switch statements). */
11252 #ifdef HAVE_tablejump
11254 /* INDEX is the value being switched on, with the lowest value
11255 in the table already subtracted.
11256 MODE is its expected mode (needed if INDEX is constant).
11257 RANGE is the length of the jump table.
11258 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11260 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11261 index value is out of range. */
11263 void
11264 do_tablejump (index, mode, range, table_label, default_label)
11265 rtx index, range, table_label, default_label;
11266 enum machine_mode mode;
11268 register rtx temp, vector;
11270 /* Do an unsigned comparison (in the proper mode) between the index
11271 expression and the value which represents the length of the range.
11272 Since we just finished subtracting the lower bound of the range
11273 from the index expression, this comparison allows us to simultaneously
11274 check that the original index expression value is both greater than
11275 or equal to the minimum value of the range and less than or equal to
11276 the maximum value of the range. */
11278 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11279 emit_jump_insn (gen_bgtu (default_label));
11281 /* If index is in range, it must fit in Pmode.
11282 Convert to Pmode so we can index with it. */
11283 if (mode != Pmode)
11284 index = convert_to_mode (Pmode, index, 1);
11286 /* Don't let a MEM slip thru, because then INDEX that comes
11287 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11288 and break_out_memory_refs will go to work on it and mess it up. */
11289 #ifdef PIC_CASE_VECTOR_ADDRESS
11290 if (flag_pic && GET_CODE (index) != REG)
11291 index = copy_to_mode_reg (Pmode, index);
11292 #endif
11294 /* If flag_force_addr were to affect this address
11295 it could interfere with the tricky assumptions made
11296 about addresses that contain label-refs,
11297 which may be valid only very near the tablejump itself. */
11298 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11299 GET_MODE_SIZE, because this indicates how large insns are. The other
11300 uses should all be Pmode, because they are addresses. This code
11301 could fail if addresses and insns are not the same size. */
11302 index = gen_rtx (PLUS, Pmode,
11303 gen_rtx (MULT, Pmode, index,
11304 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11305 gen_rtx (LABEL_REF, Pmode, table_label));
11306 #ifdef PIC_CASE_VECTOR_ADDRESS
11307 if (flag_pic)
11308 index = PIC_CASE_VECTOR_ADDRESS (index);
11309 else
11310 #endif
11311 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11312 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11313 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11314 RTX_UNCHANGING_P (vector) = 1;
11315 convert_move (temp, vector, 0);
11317 emit_jump_insn (gen_tablejump (temp, table_label));
11319 #ifndef CASE_VECTOR_PC_RELATIVE
11320 /* If we are generating PIC code or if the table is PC-relative, the
11321 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11322 if (! flag_pic)
11323 emit_barrier ();
11324 #endif
11327 #endif /* HAVE_tablejump */
11330 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11331 to that value is on the top of the stack. The resulting type is TYPE, and
11332 the source declaration is DECL. */
11334 void
11335 bc_load_memory (type, decl)
11336 tree type, decl;
11338 enum bytecode_opcode opcode;
11341 /* Bit fields are special. We only know about signed and
11342 unsigned ints, and enums. The latter are treated as
11343 signed integers. */
11345 if (DECL_BIT_FIELD (decl))
11346 if (TREE_CODE (type) == ENUMERAL_TYPE
11347 || TREE_CODE (type) == INTEGER_TYPE)
11348 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11349 else
11350 abort ();
11351 else
11352 /* See corresponding comment in bc_store_memory. */
11353 if (TYPE_MODE (type) == BLKmode
11354 || TYPE_MODE (type) == VOIDmode)
11355 return;
11356 else
11357 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11359 if (opcode == neverneverland)
11360 abort ();
11362 bc_emit_bytecode (opcode);
11364 #ifdef DEBUG_PRINT_CODE
11365 fputc ('\n', stderr);
11366 #endif
11370 /* Store the contents of the second stack slot to the address in the
11371 top stack slot. DECL is the declaration of the destination and is used
11372 to determine whether we're dealing with a bitfield. */
11374 void
11375 bc_store_memory (type, decl)
11376 tree type, decl;
11378 enum bytecode_opcode opcode;
11381 if (DECL_BIT_FIELD (decl))
11383 if (TREE_CODE (type) == ENUMERAL_TYPE
11384 || TREE_CODE (type) == INTEGER_TYPE)
11385 opcode = sstoreBI;
11386 else
11387 abort ();
11389 else
11390 if (TYPE_MODE (type) == BLKmode)
11392 /* Copy structure. This expands to a block copy instruction, storeBLK.
11393 In addition to the arguments expected by the other store instructions,
11394 it also expects a type size (SImode) on top of the stack, which is the
11395 structure size in size units (usually bytes). The two first arguments
11396 are already on the stack; so we just put the size on level 1. For some
11397 other languages, the size may be variable, this is why we don't encode
11398 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11400 bc_expand_expr (TYPE_SIZE (type));
11401 opcode = storeBLK;
11403 else
11404 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11406 if (opcode == neverneverland)
11407 abort ();
11409 bc_emit_bytecode (opcode);
11411 #ifdef DEBUG_PRINT_CODE
11412 fputc ('\n', stderr);
11413 #endif
11417 /* Allocate local stack space sufficient to hold a value of the given
11418 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11419 integral power of 2. A special case is locals of type VOID, which
11420 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11421 remapped into the corresponding attribute of SI. */
11424 bc_allocate_local (size, alignment)
11425 int size, alignment;
11427 rtx retval;
11428 int byte_alignment;
11430 if (size < 0)
11431 abort ();
11433 /* Normalize size and alignment */
11434 if (!size)
11435 size = UNITS_PER_WORD;
11437 if (alignment < BITS_PER_UNIT)
11438 byte_alignment = 1 << (INT_ALIGN - 1);
11439 else
11440 /* Align */
11441 byte_alignment = alignment / BITS_PER_UNIT;
11443 if (local_vars_size & (byte_alignment - 1))
11444 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11446 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11447 local_vars_size += size;
11449 return retval;
11453 /* Allocate variable-sized local array. Variable-sized arrays are
11454 actually pointers to the address in memory where they are stored. */
11457 bc_allocate_variable_array (size)
11458 tree size;
11460 rtx retval;
11461 const int ptralign = (1 << (PTR_ALIGN - 1));
11463 /* Align pointer */
11464 if (local_vars_size & ptralign)
11465 local_vars_size += ptralign - (local_vars_size & ptralign);
11467 /* Note down local space needed: pointer to block; also return
11468 dummy rtx */
11470 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11471 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11472 return retval;
11476 /* Push the machine address for the given external variable offset. */
11478 void
11479 bc_load_externaddr (externaddr)
11480 rtx externaddr;
11482 bc_emit_bytecode (constP);
11483 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11484 BYTECODE_BC_LABEL (externaddr)->offset);
11486 #ifdef DEBUG_PRINT_CODE
11487 fputc ('\n', stderr);
11488 #endif
11492 /* Like above, but expects an IDENTIFIER. */
11494 void
11495 bc_load_externaddr_id (id, offset)
11496 tree id;
11497 int offset;
11499 if (!IDENTIFIER_POINTER (id))
11500 abort ();
11502 bc_emit_bytecode (constP);
11503 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11505 #ifdef DEBUG_PRINT_CODE
11506 fputc ('\n', stderr);
11507 #endif
11511 /* Push the machine address for the given local variable offset. */
11513 void
11514 bc_load_localaddr (localaddr)
11515 rtx localaddr;
11517 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11521 /* Push the machine address for the given parameter offset.
11522 NOTE: offset is in bits. */
11524 void
11525 bc_load_parmaddr (parmaddr)
11526 rtx parmaddr;
11528 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11529 / BITS_PER_UNIT));
11533 /* Convert a[i] into *(a + i). */
11535 tree
11536 bc_canonicalize_array_ref (exp)
11537 tree exp;
11539 tree type = TREE_TYPE (exp);
11540 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11541 TREE_OPERAND (exp, 0));
11542 tree index = TREE_OPERAND (exp, 1);
11545 /* Convert the integer argument to a type the same size as a pointer
11546 so the multiply won't overflow spuriously. */
11548 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11549 index = convert (type_for_size (POINTER_SIZE, 0), index);
11551 /* The array address isn't volatile even if the array is.
11552 (Of course this isn't terribly relevant since the bytecode
11553 translator treats nearly everything as volatile anyway.) */
11554 TREE_THIS_VOLATILE (array_adr) = 0;
11556 return build1 (INDIRECT_REF, type,
11557 fold (build (PLUS_EXPR,
11558 TYPE_POINTER_TO (type),
11559 array_adr,
11560 fold (build (MULT_EXPR,
11561 TYPE_POINTER_TO (type),
11562 index,
11563 size_in_bytes (type))))));
11567 /* Load the address of the component referenced by the given
11568 COMPONENT_REF expression.
11570 Returns innermost lvalue. */
11572 tree
11573 bc_expand_component_address (exp)
11574 tree exp;
11576 tree tem, chain;
11577 enum machine_mode mode;
11578 int bitpos = 0;
11579 HOST_WIDE_INT SIval;
11582 tem = TREE_OPERAND (exp, 1);
11583 mode = DECL_MODE (tem);
11586 /* Compute cumulative bit offset for nested component refs
11587 and array refs, and find the ultimate containing object. */
11589 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11591 if (TREE_CODE (tem) == COMPONENT_REF)
11592 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11593 else
11594 if (TREE_CODE (tem) == ARRAY_REF
11595 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11596 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11598 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11599 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11600 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11601 else
11602 break;
11605 bc_expand_expr (tem);
11608 /* For bitfields also push their offset and size */
11609 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11610 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11611 else
11612 if (SIval = bitpos / BITS_PER_UNIT)
11613 bc_emit_instruction (addconstPSI, SIval);
11615 return (TREE_OPERAND (exp, 1));
11619 /* Emit code to push two SI constants */
11621 void
11622 bc_push_offset_and_size (offset, size)
11623 HOST_WIDE_INT offset, size;
11625 bc_emit_instruction (constSI, offset);
11626 bc_emit_instruction (constSI, size);
11630 /* Emit byte code to push the address of the given lvalue expression to
11631 the stack. If it's a bit field, we also push offset and size info.
11633 Returns innermost component, which allows us to determine not only
11634 its type, but also whether it's a bitfield. */
11636 tree
11637 bc_expand_address (exp)
11638 tree exp;
11640 /* Safeguard */
11641 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11642 return (exp);
11645 switch (TREE_CODE (exp))
11647 case ARRAY_REF:
11649 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11651 case COMPONENT_REF:
11653 return (bc_expand_component_address (exp));
11655 case INDIRECT_REF:
11657 bc_expand_expr (TREE_OPERAND (exp, 0));
11659 /* For variable-sized types: retrieve pointer. Sometimes the
11660 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11661 also make sure we have an operand, just in case... */
11663 if (TREE_OPERAND (exp, 0)
11664 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11665 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11666 bc_emit_instruction (loadP);
11668 /* If packed, also return offset and size */
11669 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11671 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11672 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11674 return (TREE_OPERAND (exp, 0));
11676 case FUNCTION_DECL:
11678 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11679 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11680 break;
11682 case PARM_DECL:
11684 bc_load_parmaddr (DECL_RTL (exp));
11686 /* For variable-sized types: retrieve pointer */
11687 if (TYPE_SIZE (TREE_TYPE (exp))
11688 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11689 bc_emit_instruction (loadP);
11691 /* If packed, also return offset and size */
11692 if (DECL_BIT_FIELD (exp))
11693 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11694 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11696 break;
11698 case RESULT_DECL:
11700 bc_emit_instruction (returnP);
11701 break;
11703 case VAR_DECL:
11705 #if 0
11706 if (BYTECODE_LABEL (DECL_RTL (exp)))
11707 bc_load_externaddr (DECL_RTL (exp));
11708 #endif
11710 if (DECL_EXTERNAL (exp))
11711 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11712 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11713 else
11714 bc_load_localaddr (DECL_RTL (exp));
11716 /* For variable-sized types: retrieve pointer */
11717 if (TYPE_SIZE (TREE_TYPE (exp))
11718 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11719 bc_emit_instruction (loadP);
11721 /* If packed, also return offset and size */
11722 if (DECL_BIT_FIELD (exp))
11723 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11724 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11726 break;
11728 case STRING_CST:
11730 rtx r;
11732 bc_emit_bytecode (constP);
11733 r = output_constant_def (exp);
11734 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11736 #ifdef DEBUG_PRINT_CODE
11737 fputc ('\n', stderr);
11738 #endif
11740 break;
11742 default:
11744 abort();
11745 break;
11748 /* Most lvalues don't have components. */
11749 return (exp);
11753 /* Emit a type code to be used by the runtime support in handling
11754 parameter passing. The type code consists of the machine mode
11755 plus the minimal alignment shifted left 8 bits. */
11757 tree
11758 bc_runtime_type_code (type)
11759 tree type;
11761 int val;
11763 switch (TREE_CODE (type))
11765 case VOID_TYPE:
11766 case INTEGER_TYPE:
11767 case REAL_TYPE:
11768 case COMPLEX_TYPE:
11769 case ENUMERAL_TYPE:
11770 case POINTER_TYPE:
11771 case RECORD_TYPE:
11773 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11774 break;
11776 case ERROR_MARK:
11778 val = 0;
11779 break;
11781 default:
11783 abort ();
11785 return build_int_2 (val, 0);
11789 /* Generate constructor label */
11791 char *
11792 bc_gen_constr_label ()
11794 static int label_counter;
11795 static char label[20];
11797 sprintf (label, "*LR%d", label_counter++);
11799 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11803 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11804 expand the constructor data as static data, and push a pointer to it.
11805 The pointer is put in the pointer table and is retrieved by a constP
11806 bytecode instruction. We then loop and store each constructor member in
11807 the corresponding component. Finally, we return the original pointer on
11808 the stack. */
11810 void
11811 bc_expand_constructor (constr)
11812 tree constr;
11814 char *l;
11815 HOST_WIDE_INT ptroffs;
11816 rtx constr_rtx;
11819 /* Literal constructors are handled as constants, whereas
11820 non-literals are evaluated and stored element by element
11821 into the data segment. */
11823 /* Allocate space in proper segment and push pointer to space on stack.
11826 l = bc_gen_constr_label ();
11828 if (TREE_CONSTANT (constr))
11830 text_section ();
11832 bc_emit_const_labeldef (l);
11833 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11835 else
11837 data_section ();
11839 bc_emit_data_labeldef (l);
11840 bc_output_data_constructor (constr);
11844 /* Add reference to pointer table and recall pointer to stack;
11845 this code is common for both types of constructors: literals
11846 and non-literals. */
11848 ptroffs = bc_define_pointer (l);
11849 bc_emit_instruction (constP, ptroffs);
11851 /* This is all that has to be done if it's a literal. */
11852 if (TREE_CONSTANT (constr))
11853 return;
11856 /* At this point, we have the pointer to the structure on top of the stack.
11857 Generate sequences of store_memory calls for the constructor. */
11859 /* constructor type is structure */
11860 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11862 register tree elt;
11864 /* If the constructor has fewer fields than the structure,
11865 clear the whole structure first. */
11867 if (list_length (CONSTRUCTOR_ELTS (constr))
11868 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11870 bc_emit_instruction (duplicate);
11871 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11872 bc_emit_instruction (clearBLK);
11875 /* Store each element of the constructor into the corresponding
11876 field of TARGET. */
11878 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11880 register tree field = TREE_PURPOSE (elt);
11881 register enum machine_mode mode;
11882 int bitsize;
11883 int bitpos;
11884 int unsignedp;
11886 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11887 mode = DECL_MODE (field);
11888 unsignedp = TREE_UNSIGNED (field);
11890 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11892 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11893 /* The alignment of TARGET is
11894 at least what its type requires. */
11895 VOIDmode, 0,
11896 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11897 int_size_in_bytes (TREE_TYPE (constr)));
11900 else
11902 /* Constructor type is array */
11903 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11905 register tree elt;
11906 register int i;
11907 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11908 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11909 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11910 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11912 /* If the constructor has fewer fields than the structure,
11913 clear the whole structure first. */
11915 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11917 bc_emit_instruction (duplicate);
11918 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11919 bc_emit_instruction (clearBLK);
11923 /* Store each element of the constructor into the corresponding
11924 element of TARGET, determined by counting the elements. */
11926 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11927 elt;
11928 elt = TREE_CHAIN (elt), i++)
11930 register enum machine_mode mode;
11931 int bitsize;
11932 int bitpos;
11933 int unsignedp;
11935 mode = TYPE_MODE (elttype);
11936 bitsize = GET_MODE_BITSIZE (mode);
11937 unsignedp = TREE_UNSIGNED (elttype);
11939 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11940 /* * TYPE_SIZE_UNIT (elttype) */ );
11942 bc_store_field (elt, bitsize, bitpos, mode,
11943 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11944 /* The alignment of TARGET is
11945 at least what its type requires. */
11946 VOIDmode, 0,
11947 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11948 int_size_in_bytes (TREE_TYPE (constr)));
11955 /* Store the value of EXP (an expression tree) into member FIELD of
11956 structure at address on stack, which has type TYPE, mode MODE and
11957 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11958 structure.
11960 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11961 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11963 void
11964 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11965 value_mode, unsignedp, align, total_size)
11966 int bitsize, bitpos;
11967 enum machine_mode mode;
11968 tree field, exp, type;
11969 enum machine_mode value_mode;
11970 int unsignedp;
11971 int align;
11972 int total_size;
11975 /* Expand expression and copy pointer */
11976 bc_expand_expr (exp);
11977 bc_emit_instruction (over);
11980 /* If the component is a bit field, we cannot use addressing to access
11981 it. Use bit-field techniques to store in it. */
11983 if (DECL_BIT_FIELD (field))
11985 bc_store_bit_field (bitpos, bitsize, unsignedp);
11986 return;
11988 else
11989 /* Not bit field */
11991 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11993 /* Advance pointer to the desired member */
11994 if (offset)
11995 bc_emit_instruction (addconstPSI, offset);
11997 /* Store */
11998 bc_store_memory (type, field);
12003 /* Store SI/SU in bitfield */
12005 void
12006 bc_store_bit_field (offset, size, unsignedp)
12007 int offset, size, unsignedp;
12009 /* Push bitfield offset and size */
12010 bc_push_offset_and_size (offset, size);
12012 /* Store */
12013 bc_emit_instruction (sstoreBI);
12017 /* Load SI/SU from bitfield */
12019 void
12020 bc_load_bit_field (offset, size, unsignedp)
12021 int offset, size, unsignedp;
12023 /* Push bitfield offset and size */
12024 bc_push_offset_and_size (offset, size);
12026 /* Load: sign-extend if signed, else zero-extend */
12027 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
12031 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
12032 (adjust stack pointer upwards), negative means add that number of
12033 levels (adjust the stack pointer downwards). Only positive values
12034 normally make sense. */
12036 void
12037 bc_adjust_stack (nlevels)
12038 int nlevels;
12040 switch (nlevels)
12042 case 0:
12043 break;
12045 case 2:
12046 bc_emit_instruction (drop);
12048 case 1:
12049 bc_emit_instruction (drop);
12050 break;
12052 default:
12054 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
12055 stack_depth -= nlevels;
12058 #if defined (VALIDATE_STACK_FOR_BC)
12059 VALIDATE_STACK_FOR_BC ();
12060 #endif