Retabify
[official-gcc.git] / gcc / expr.c
blobe5ef6e1ec880576a948ff512d756af8a52a474bc
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
52 #ifdef PUSH_ROUNDING
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
58 #endif
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
71 #endif
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
90 /* Under some ABIs, it is the caller's responsibility to pop arguments
91 pushed for function calls. A naive implementation would simply pop
92 the arguments immediately after each call. However, if several
93 function calls are made in a row, it is typically cheaper to pop
94 all the arguments after all of the calls are complete since a
95 single pop instruction can be used. Therefore, GCC attempts to
96 defer popping the arguments until absolutely necessary. (For
97 example, at the end of a conditional, the arguments must be popped,
98 since code outside the conditional won't know whether or not the
99 arguments need to be popped.)
101 When INHIBIT_DEFER_POP is non-zero, however, the compiler does not
102 attempt to defer pops. Instead, the stack is popped immediately
103 after each call. Rather then setting this variable directly, use
104 NO_DEFER_POP and OK_DEFER_POP. */
105 int inhibit_defer_pop;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110 static rtx saveregs_value;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
115 /* Don't check memory usage, since code is being emitted to check a memory
116 usage. Used when current_function_check_memory_usage is true, to avoid
117 infinite recursion. */
118 static int in_check_memory_usage;
120 /* Postincrements that still need to be expanded. */
121 static rtx pending_chain;
123 /* This structure is used by move_by_pieces to describe the move to
124 be performed. */
125 struct move_by_pieces
127 rtx to;
128 rtx to_addr;
129 int autinc_to;
130 int explicit_inc_to;
131 int to_struct;
132 rtx from;
133 rtx from_addr;
134 int autinc_from;
135 int explicit_inc_from;
136 int from_struct;
137 int len;
138 int offset;
139 int reverse;
142 /* This structure is used by clear_by_pieces to describe the clear to
143 be performed. */
145 struct clear_by_pieces
147 rtx to;
148 rtx to_addr;
149 int autinc_to;
150 int explicit_inc_to;
151 int to_struct;
152 int len;
153 int offset;
154 int reverse;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
160 static rtx get_push_address PROTO ((int));
162 static rtx enqueue_insn PROTO((rtx, rtx));
163 static void init_queue PROTO((void));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
173 tree, tree, int));
174 static void store_constructor PROTO((tree, rtx, int));
175 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
176 enum machine_mode, int, int,
177 int, int));
178 static enum memory_use_mode
179 get_memory_usage_from_modifier PROTO((enum expand_modifier));
180 static tree save_noncopied_parts PROTO((tree, tree));
181 static tree init_noncopied_parts PROTO((tree, tree));
182 static int safe_from_p PROTO((rtx, tree, int));
183 static int fixed_type_p PROTO((tree));
184 static rtx var_rtx PROTO((tree));
185 static int get_pointer_alignment PROTO((tree, unsigned));
186 static tree string_constant PROTO((tree, tree *));
187 static tree c_strlen PROTO((tree));
188 static rtx get_memory_rtx PROTO((tree));
189 static rtx expand_builtin PROTO((tree, rtx, rtx,
190 enum machine_mode, int));
191 static int apply_args_size PROTO((void));
192 static int apply_result_size PROTO((void));
193 static rtx result_vector PROTO((int, rtx));
194 static rtx expand_builtin_apply_args PROTO((void));
195 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
196 static void expand_builtin_return PROTO((rtx));
197 static rtx expand_increment PROTO((tree, int, int));
198 static void preexpand_calls PROTO((tree));
199 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
200 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
201 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
202 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
203 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
205 /* Record for each mode whether we can move a register directly to or
206 from an object of that mode in memory. If we can't, we won't try
207 to use that mode directly when accessing a field of that mode. */
209 static char direct_load[NUM_MACHINE_MODES];
210 static char direct_store[NUM_MACHINE_MODES];
212 /* If a memory-to-memory move would take MOVE_RATIO or more simple
213 move-instruction sequences, we will do a movstr or libcall instead. */
215 #ifndef MOVE_RATIO
216 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
217 #define MOVE_RATIO 2
218 #else
219 /* If we are optimizing for space (-Os), cut down the default move ratio */
220 #define MOVE_RATIO (optimize_size ? 3 : 15)
221 #endif
222 #endif
224 /* This macro is used to determine whether move_by_pieces should be called
225 to perform a structure copy. */
226 #ifndef MOVE_BY_PIECES_P
227 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
228 (SIZE, ALIGN) < MOVE_RATIO)
229 #endif
231 /* This array records the insn_code of insns to perform block moves. */
232 enum insn_code movstr_optab[NUM_MACHINE_MODES];
234 /* This array records the insn_code of insns to perform block clears. */
235 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
237 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
239 #ifndef SLOW_UNALIGNED_ACCESS
240 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
241 #endif
243 /* Register mappings for target machines without register windows. */
244 #ifndef INCOMING_REGNO
245 #define INCOMING_REGNO(OUT) (OUT)
246 #endif
247 #ifndef OUTGOING_REGNO
248 #define OUTGOING_REGNO(IN) (IN)
249 #endif
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
254 void
255 init_expr_once ()
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 char *free_point;
263 start_sequence ();
265 /* Since we are on the permanent obstack, we must be sure we save this
266 spot AFTER we call start_sequence, since it will reuse the rtl it
267 makes. */
268 free_point = (char *) oballoc (0);
270 /* Try indexing by frame ptr and try by stack ptr.
271 It is known that on the Convex the stack ptr isn't a valid index.
272 With luck, one or the other is valid on any machine. */
273 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
274 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
276 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
277 pat = PATTERN (insn);
279 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
280 mode = (enum machine_mode) ((int) mode + 1))
282 int regno;
283 rtx reg;
285 direct_load[(int) mode] = direct_store[(int) mode] = 0;
286 PUT_MODE (mem, mode);
287 PUT_MODE (mem1, mode);
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
292 if (mode != VOIDmode && mode != BLKmode)
293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
294 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 regno++)
297 if (! HARD_REGNO_MODE_OK (regno, mode))
298 continue;
300 reg = gen_rtx_REG (mode, regno);
302 SET_SRC (pat) = mem;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = mem1;
308 SET_DEST (pat) = reg;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_load[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
317 SET_SRC (pat) = reg;
318 SET_DEST (pat) = mem1;
319 if (recog (pat, insn, &num_clobbers) >= 0)
320 direct_store[(int) mode] = 1;
324 end_sequence ();
325 obfree (free_point);
328 /* This is run at the start of compiling a function. */
330 void
331 init_expr ()
333 init_queue ();
335 pending_stack_adjust = 0;
336 inhibit_defer_pop = 0;
337 saveregs_value = 0;
338 apply_args_value = 0;
339 forced_labels = 0;
342 /* Save all variables describing the current status into the structure *P.
343 This is used before starting a nested function. */
345 void
346 save_expr_status (p)
347 struct function *p;
349 p->pending_chain = pending_chain;
350 p->pending_stack_adjust = pending_stack_adjust;
351 p->inhibit_defer_pop = inhibit_defer_pop;
352 p->saveregs_value = saveregs_value;
353 p->apply_args_value = apply_args_value;
354 p->forced_labels = forced_labels;
356 pending_chain = NULL_RTX;
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 saveregs_value = 0;
360 apply_args_value = 0;
361 forced_labels = 0;
364 /* Restore all variables describing the current status from the structure *P.
365 This is used after a nested function. */
367 void
368 restore_expr_status (p)
369 struct function *p;
371 pending_chain = p->pending_chain;
372 pending_stack_adjust = p->pending_stack_adjust;
373 inhibit_defer_pop = p->inhibit_defer_pop;
374 saveregs_value = p->saveregs_value;
375 apply_args_value = p->apply_args_value;
376 forced_labels = p->forced_labels;
379 /* Manage the queue of increment instructions to be output
380 for POSTINCREMENT_EXPR expressions, etc. */
382 /* Queue up to increment (or change) VAR later. BODY says how:
383 BODY should be the same thing you would pass to emit_insn
384 to increment right away. It will go to emit_insn later on.
386 The value is a QUEUED expression to be used in place of VAR
387 where you want to guarantee the pre-incrementation value of VAR. */
389 static rtx
390 enqueue_insn (var, body)
391 rtx var, body;
393 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
394 var, NULL_RTX, NULL_RTX, body,
395 pending_chain);
396 return pending_chain;
399 /* Use protect_from_queue to convert a QUEUED expression
400 into something that you can put immediately into an instruction.
401 If the queued incrementation has not happened yet,
402 protect_from_queue returns the variable itself.
403 If the incrementation has happened, protect_from_queue returns a temp
404 that contains a copy of the old value of the variable.
406 Any time an rtx which might possibly be a QUEUED is to be put
407 into an instruction, it must be passed through protect_from_queue first.
408 QUEUED expressions are not meaningful in instructions.
410 Do not pass a value through protect_from_queue and then hold
411 on to it for a while before putting it in an instruction!
412 If the queue is flushed in between, incorrect code will result. */
415 protect_from_queue (x, modify)
416 register rtx x;
417 int modify;
419 register RTX_CODE code = GET_CODE (x);
421 #if 0 /* A QUEUED can hang around after the queue is forced out. */
422 /* Shortcut for most common case. */
423 if (pending_chain == 0)
424 return x;
425 #endif
427 if (code != QUEUED)
429 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
430 use of autoincrement. Make a copy of the contents of the memory
431 location rather than a copy of the address, but not if the value is
432 of mode BLKmode. Don't modify X in place since it might be
433 shared. */
434 if (code == MEM && GET_MODE (x) != BLKmode
435 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
437 register rtx y = XEXP (x, 0);
438 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
440 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
441 MEM_COPY_ATTRIBUTES (new, x);
442 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
444 if (QUEUED_INSN (y))
446 register rtx temp = gen_reg_rtx (GET_MODE (new));
447 emit_insn_before (gen_move_insn (temp, new),
448 QUEUED_INSN (y));
449 return temp;
451 return new;
453 /* Otherwise, recursively protect the subexpressions of all
454 the kinds of rtx's that can contain a QUEUED. */
455 if (code == MEM)
457 rtx tem = protect_from_queue (XEXP (x, 0), 0);
458 if (tem != XEXP (x, 0))
460 x = copy_rtx (x);
461 XEXP (x, 0) = tem;
464 else if (code == PLUS || code == MULT)
466 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
467 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
468 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
470 x = copy_rtx (x);
471 XEXP (x, 0) = new0;
472 XEXP (x, 1) = new1;
475 return x;
477 /* If the increment has not happened, use the variable itself. */
478 if (QUEUED_INSN (x) == 0)
479 return QUEUED_VAR (x);
480 /* If the increment has happened and a pre-increment copy exists,
481 use that copy. */
482 if (QUEUED_COPY (x) != 0)
483 return QUEUED_COPY (x);
484 /* The increment has happened but we haven't set up a pre-increment copy.
485 Set one up now, and use it. */
486 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
487 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
488 QUEUED_INSN (x));
489 return QUEUED_COPY (x);
492 /* Return nonzero if X contains a QUEUED expression:
493 if it contains anything that will be altered by a queued increment.
494 We handle only combinations of MEM, PLUS, MINUS and MULT operators
495 since memory addresses generally contain only those. */
498 queued_subexp_p (x)
499 rtx x;
501 register enum rtx_code code = GET_CODE (x);
502 switch (code)
504 case QUEUED:
505 return 1;
506 case MEM:
507 return queued_subexp_p (XEXP (x, 0));
508 case MULT:
509 case PLUS:
510 case MINUS:
511 return (queued_subexp_p (XEXP (x, 0))
512 || queued_subexp_p (XEXP (x, 1)));
513 default:
514 return 0;
518 /* Perform all the pending incrementations. */
520 void
521 emit_queue ()
523 register rtx p;
524 while ((p = pending_chain))
526 rtx body = QUEUED_BODY (p);
528 if (GET_CODE (body) == SEQUENCE)
530 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
531 emit_insn (QUEUED_BODY (p));
533 else
534 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
535 pending_chain = QUEUED_NEXT (p);
539 static void
540 init_queue ()
542 if (pending_chain)
543 abort ();
546 /* Copy data from FROM to TO, where the machine modes are not the same.
547 Both modes may be integer, or both may be floating.
548 UNSIGNEDP should be nonzero if FROM is an unsigned type.
549 This causes zero-extension instead of sign-extension. */
551 void
552 convert_move (to, from, unsignedp)
553 register rtx to, from;
554 int unsignedp;
556 enum machine_mode to_mode = GET_MODE (to);
557 enum machine_mode from_mode = GET_MODE (from);
558 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
559 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
560 enum insn_code code;
561 rtx libcall;
563 /* rtx code for making an equivalent value. */
564 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
566 to = protect_from_queue (to, 1);
567 from = protect_from_queue (from, 0);
569 if (to_real != from_real)
570 abort ();
572 /* If FROM is a SUBREG that indicates that we have already done at least
573 the required extension, strip it. We don't handle such SUBREGs as
574 TO here. */
576 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
577 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
578 >= GET_MODE_SIZE (to_mode))
579 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
580 from = gen_lowpart (to_mode, from), from_mode = to_mode;
582 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
583 abort ();
585 if (to_mode == from_mode
586 || (from_mode == VOIDmode && CONSTANT_P (from)))
588 emit_move_insn (to, from);
589 return;
592 if (to_real)
594 rtx value;
596 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
598 /* Try converting directly if the insn is supported. */
599 if ((code = can_extend_p (to_mode, from_mode, 0))
600 != CODE_FOR_nothing)
602 emit_unop_insn (code, to, from, UNKNOWN);
603 return;
607 #ifdef HAVE_trunchfqf2
608 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
610 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
611 return;
613 #endif
614 #ifdef HAVE_trunctqfqf2
615 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
617 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
618 return;
620 #endif
621 #ifdef HAVE_truncsfqf2
622 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
624 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
625 return;
627 #endif
628 #ifdef HAVE_truncdfqf2
629 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
631 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
632 return;
634 #endif
635 #ifdef HAVE_truncxfqf2
636 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
638 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
639 return;
641 #endif
642 #ifdef HAVE_trunctfqf2
643 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
645 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
646 return;
648 #endif
650 #ifdef HAVE_trunctqfhf2
651 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
653 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
654 return;
656 #endif
657 #ifdef HAVE_truncsfhf2
658 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
660 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
661 return;
663 #endif
664 #ifdef HAVE_truncdfhf2
665 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
667 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
668 return;
670 #endif
671 #ifdef HAVE_truncxfhf2
672 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
674 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
675 return;
677 #endif
678 #ifdef HAVE_trunctfhf2
679 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
681 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
682 return;
684 #endif
686 #ifdef HAVE_truncsftqf2
687 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
689 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
690 return;
692 #endif
693 #ifdef HAVE_truncdftqf2
694 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
696 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
697 return;
699 #endif
700 #ifdef HAVE_truncxftqf2
701 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
703 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
704 return;
706 #endif
707 #ifdef HAVE_trunctftqf2
708 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
710 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
711 return;
713 #endif
715 #ifdef HAVE_truncdfsf2
716 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
718 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
719 return;
721 #endif
722 #ifdef HAVE_truncxfsf2
723 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
725 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
726 return;
728 #endif
729 #ifdef HAVE_trunctfsf2
730 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
732 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
733 return;
735 #endif
736 #ifdef HAVE_truncxfdf2
737 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
739 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
740 return;
742 #endif
743 #ifdef HAVE_trunctfdf2
744 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
746 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
747 return;
749 #endif
751 libcall = (rtx) 0;
752 switch (from_mode)
754 case SFmode:
755 switch (to_mode)
757 case DFmode:
758 libcall = extendsfdf2_libfunc;
759 break;
761 case XFmode:
762 libcall = extendsfxf2_libfunc;
763 break;
765 case TFmode:
766 libcall = extendsftf2_libfunc;
767 break;
769 default:
770 break;
772 break;
774 case DFmode:
775 switch (to_mode)
777 case SFmode:
778 libcall = truncdfsf2_libfunc;
779 break;
781 case XFmode:
782 libcall = extenddfxf2_libfunc;
783 break;
785 case TFmode:
786 libcall = extenddftf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 case XFmode:
795 switch (to_mode)
797 case SFmode:
798 libcall = truncxfsf2_libfunc;
799 break;
801 case DFmode:
802 libcall = truncxfdf2_libfunc;
803 break;
805 default:
806 break;
808 break;
810 case TFmode:
811 switch (to_mode)
813 case SFmode:
814 libcall = trunctfsf2_libfunc;
815 break;
817 case DFmode:
818 libcall = trunctfdf2_libfunc;
819 break;
821 default:
822 break;
824 break;
826 default:
827 break;
830 if (libcall == (rtx) 0)
831 /* This conversion is not implemented yet. */
832 abort ();
834 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
835 1, from, from_mode);
836 emit_move_insn (to, value);
837 return;
840 /* Now both modes are integers. */
842 /* Handle expanding beyond a word. */
843 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
844 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
846 rtx insns;
847 rtx lowpart;
848 rtx fill_value;
849 rtx lowfrom;
850 int i;
851 enum machine_mode lowpart_mode;
852 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
854 /* Try converting directly if the insn is supported. */
855 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
856 != CODE_FOR_nothing)
858 /* If FROM is a SUBREG, put it into a register. Do this
859 so that we always generate the same set of insns for
860 better cse'ing; if an intermediate assignment occurred,
861 we won't be doing the operation directly on the SUBREG. */
862 if (optimize > 0 && GET_CODE (from) == SUBREG)
863 from = force_reg (from_mode, from);
864 emit_unop_insn (code, to, from, equiv_code);
865 return;
867 /* Next, try converting via full word. */
868 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
869 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
870 != CODE_FOR_nothing))
872 if (GET_CODE (to) == REG)
873 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
874 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
875 emit_unop_insn (code, to,
876 gen_lowpart (word_mode, to), equiv_code);
877 return;
880 /* No special multiword conversion insn; do it by hand. */
881 start_sequence ();
883 /* Since we will turn this into a no conflict block, we must ensure
884 that the source does not overlap the target. */
886 if (reg_overlap_mentioned_p (to, from))
887 from = force_reg (from_mode, from);
889 /* Get a copy of FROM widened to a word, if necessary. */
890 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
891 lowpart_mode = word_mode;
892 else
893 lowpart_mode = from_mode;
895 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
897 lowpart = gen_lowpart (lowpart_mode, to);
898 emit_move_insn (lowpart, lowfrom);
900 /* Compute the value to put in each remaining word. */
901 if (unsignedp)
902 fill_value = const0_rtx;
903 else
905 #ifdef HAVE_slt
906 if (HAVE_slt
907 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
908 && STORE_FLAG_VALUE == -1)
910 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
911 lowpart_mode, 0, 0);
912 fill_value = gen_reg_rtx (word_mode);
913 emit_insn (gen_slt (fill_value));
915 else
916 #endif
918 fill_value
919 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
920 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
921 NULL_RTX, 0);
922 fill_value = convert_to_mode (word_mode, fill_value, 1);
926 /* Fill the remaining words. */
927 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
929 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
930 rtx subword = operand_subword (to, index, 1, to_mode);
932 if (subword == 0)
933 abort ();
935 if (fill_value != subword)
936 emit_move_insn (subword, fill_value);
939 insns = get_insns ();
940 end_sequence ();
942 emit_no_conflict_block (insns, to, from, NULL_RTX,
943 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
944 return;
947 /* Truncating multi-word to a word or less. */
948 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
949 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
951 if (!((GET_CODE (from) == MEM
952 && ! MEM_VOLATILE_P (from)
953 && direct_load[(int) to_mode]
954 && ! mode_dependent_address_p (XEXP (from, 0)))
955 || GET_CODE (from) == REG
956 || GET_CODE (from) == SUBREG))
957 from = force_reg (from_mode, from);
958 convert_move (to, gen_lowpart (word_mode, from), 0);
959 return;
962 /* Handle pointer conversion */ /* SPEE 900220 */
963 if (to_mode == PQImode)
965 if (from_mode != QImode)
966 from = convert_to_mode (QImode, from, unsignedp);
968 #ifdef HAVE_truncqipqi2
969 if (HAVE_truncqipqi2)
971 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
972 return;
974 #endif /* HAVE_truncqipqi2 */
975 abort ();
978 if (from_mode == PQImode)
980 if (to_mode != QImode)
982 from = convert_to_mode (QImode, from, unsignedp);
983 from_mode = QImode;
985 else
987 #ifdef HAVE_extendpqiqi2
988 if (HAVE_extendpqiqi2)
990 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
991 return;
993 #endif /* HAVE_extendpqiqi2 */
994 abort ();
998 if (to_mode == PSImode)
1000 if (from_mode != SImode)
1001 from = convert_to_mode (SImode, from, unsignedp);
1003 #ifdef HAVE_truncsipsi2
1004 if (HAVE_truncsipsi2)
1006 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1007 return;
1009 #endif /* HAVE_truncsipsi2 */
1010 abort ();
1013 if (from_mode == PSImode)
1015 if (to_mode != SImode)
1017 from = convert_to_mode (SImode, from, unsignedp);
1018 from_mode = SImode;
1020 else
1022 #ifdef HAVE_extendpsisi2
1023 if (HAVE_extendpsisi2)
1025 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1026 return;
1028 #endif /* HAVE_extendpsisi2 */
1029 abort ();
1033 if (to_mode == PDImode)
1035 if (from_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1038 #ifdef HAVE_truncdipdi2
1039 if (HAVE_truncdipdi2)
1041 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1042 return;
1044 #endif /* HAVE_truncdipdi2 */
1045 abort ();
1048 if (from_mode == PDImode)
1050 if (to_mode != DImode)
1052 from = convert_to_mode (DImode, from, unsignedp);
1053 from_mode = DImode;
1055 else
1057 #ifdef HAVE_extendpdidi2
1058 if (HAVE_extendpdidi2)
1060 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1061 return;
1063 #endif /* HAVE_extendpdidi2 */
1064 abort ();
1068 /* Now follow all the conversions between integers
1069 no more than a word long. */
1071 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1072 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (from_mode)))
1076 if (!((GET_CODE (from) == MEM
1077 && ! MEM_VOLATILE_P (from)
1078 && direct_load[(int) to_mode]
1079 && ! mode_dependent_address_p (XEXP (from, 0)))
1080 || GET_CODE (from) == REG
1081 || GET_CODE (from) == SUBREG))
1082 from = force_reg (from_mode, from);
1083 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1084 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1085 from = copy_to_reg (from);
1086 emit_move_insn (to, gen_lowpart (to_mode, from));
1087 return;
1090 /* Handle extension. */
1091 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1093 /* Convert directly if that works. */
1094 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1095 != CODE_FOR_nothing)
1097 emit_unop_insn (code, to, from, equiv_code);
1098 return;
1100 else
1102 enum machine_mode intermediate;
1103 rtx tmp;
1104 tree shift_amount;
1106 /* Search for a mode to convert via. */
1107 for (intermediate = from_mode; intermediate != VOIDmode;
1108 intermediate = GET_MODE_WIDER_MODE (intermediate))
1109 if (((can_extend_p (to_mode, intermediate, unsignedp)
1110 != CODE_FOR_nothing)
1111 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1112 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1113 && (can_extend_p (intermediate, from_mode, unsignedp)
1114 != CODE_FOR_nothing))
1116 convert_move (to, convert_to_mode (intermediate, from,
1117 unsignedp), unsignedp);
1118 return;
1121 /* No suitable intermediate mode.
1122 Generate what we need with shifts. */
1123 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1124 - GET_MODE_BITSIZE (from_mode), 0);
1125 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1126 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1127 to, unsignedp);
1128 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1129 to, unsignedp);
1130 if (tmp != to)
1131 emit_move_insn (to, tmp);
1132 return;
1136 /* Support special truncate insns for certain modes. */
1138 if (from_mode == DImode && to_mode == SImode)
1140 #ifdef HAVE_truncdisi2
1141 if (HAVE_truncdisi2)
1143 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1144 return;
1146 #endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1151 if (from_mode == DImode && to_mode == HImode)
1153 #ifdef HAVE_truncdihi2
1154 if (HAVE_truncdihi2)
1156 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1157 return;
1159 #endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1164 if (from_mode == DImode && to_mode == QImode)
1166 #ifdef HAVE_truncdiqi2
1167 if (HAVE_truncdiqi2)
1169 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1170 return;
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1177 if (from_mode == SImode && to_mode == HImode)
1179 #ifdef HAVE_truncsihi2
1180 if (HAVE_truncsihi2)
1182 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1183 return;
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1190 if (from_mode == SImode && to_mode == QImode)
1192 #ifdef HAVE_truncsiqi2
1193 if (HAVE_truncsiqi2)
1195 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1196 return;
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1203 if (from_mode == HImode && to_mode == QImode)
1205 #ifdef HAVE_trunchiqi2
1206 if (HAVE_trunchiqi2)
1208 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1209 return;
1211 #endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1216 if (from_mode == TImode && to_mode == DImode)
1218 #ifdef HAVE_trunctidi2
1219 if (HAVE_trunctidi2)
1221 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1222 return;
1224 #endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1229 if (from_mode == TImode && to_mode == SImode)
1231 #ifdef HAVE_trunctisi2
1232 if (HAVE_trunctisi2)
1234 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1235 return;
1237 #endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1242 if (from_mode == TImode && to_mode == HImode)
1244 #ifdef HAVE_trunctihi2
1245 if (HAVE_trunctihi2)
1247 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1248 return;
1250 #endif
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 return;
1255 if (from_mode == TImode && to_mode == QImode)
1257 #ifdef HAVE_trunctiqi2
1258 if (HAVE_trunctiqi2)
1260 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1261 return;
1263 #endif
1264 convert_move (to, force_reg (from_mode, from), unsignedp);
1265 return;
1268 /* Handle truncation of volatile memrefs, and so on;
1269 the things that couldn't be truncated directly,
1270 and for which there was no special instruction. */
1271 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1273 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1274 emit_move_insn (to, temp);
1275 return;
1278 /* Mode combination is not recognized. */
1279 abort ();
1282 /* Return an rtx for a value that would result
1283 from converting X to mode MODE.
1284 Both X and MODE may be floating, or both integer.
1285 UNSIGNEDP is nonzero if X is an unsigned value.
1286 This can be done by referring to a part of X in place
1287 or by copying to a new temporary with conversion.
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1293 convert_to_mode (mode, x, unsignedp)
1294 enum machine_mode mode;
1295 rtx x;
1296 int unsignedp;
1298 return convert_modes (mode, VOIDmode, x, unsignedp);
1301 /* Return an rtx for a value that would result
1302 from converting X from mode OLDMODE to mode MODE.
1303 Both modes may be floating, or both integer.
1304 UNSIGNEDP is nonzero if X is an unsigned value.
1306 This can be done by referring to a part of X in place
1307 or by copying to a new temporary with conversion.
1309 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1311 This function *must not* call protect_from_queue
1312 except when putting X into an insn (in which case convert_move does it). */
1315 convert_modes (mode, oldmode, x, unsignedp)
1316 enum machine_mode mode, oldmode;
1317 rtx x;
1318 int unsignedp;
1320 register rtx temp;
1322 /* If FROM is a SUBREG that indicates that we have already done at least
1323 the required extension, strip it. */
1325 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1326 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1327 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1328 x = gen_lowpart (mode, x);
1330 if (GET_MODE (x) != VOIDmode)
1331 oldmode = GET_MODE (x);
1333 if (mode == oldmode)
1334 return x;
1336 /* There is one case that we must handle specially: If we are converting
1337 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1338 we are to interpret the constant as unsigned, gen_lowpart will do
1339 the wrong if the constant appears negative. What we want to do is
1340 make the high-order word of the constant zero, not all ones. */
1342 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1344 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1346 HOST_WIDE_INT val = INTVAL (x);
1348 if (oldmode != VOIDmode
1349 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1351 int width = GET_MODE_BITSIZE (oldmode);
1353 /* We need to zero extend VAL. */
1354 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1357 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1360 /* We can do this with a gen_lowpart if both desired and current modes
1361 are integer, and this is either a constant integer, a register, or a
1362 non-volatile MEM. Except for the constant case where MODE is no
1363 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1365 if ((GET_CODE (x) == CONST_INT
1366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1367 || (GET_MODE_CLASS (mode) == MODE_INT
1368 && GET_MODE_CLASS (oldmode) == MODE_INT
1369 && (GET_CODE (x) == CONST_DOUBLE
1370 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1371 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1372 && direct_load[(int) mode])
1373 || (GET_CODE (x) == REG
1374 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1375 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1377 /* ?? If we don't know OLDMODE, we have to assume here that
1378 X does not need sign- or zero-extension. This may not be
1379 the case, but it's the best we can do. */
1380 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1381 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1383 HOST_WIDE_INT val = INTVAL (x);
1384 int width = GET_MODE_BITSIZE (oldmode);
1386 /* We must sign or zero-extend in this case. Start by
1387 zero-extending, then sign extend if we need to. */
1388 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1389 if (! unsignedp
1390 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1391 val |= (HOST_WIDE_INT) (-1) << width;
1393 return GEN_INT (val);
1396 return gen_lowpart (mode, x);
1399 temp = gen_reg_rtx (mode);
1400 convert_move (temp, x, unsignedp);
1401 return temp;
1405 /* This macro is used to determine what the largest unit size that
1406 move_by_pieces can use is. */
1408 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1409 move efficiently, as opposed to MOVE_MAX which is the maximum
1410 number of bhytes we can move with a single instruction. */
1412 #ifndef MOVE_MAX_PIECES
1413 #define MOVE_MAX_PIECES MOVE_MAX
1414 #endif
1416 /* Generate several move instructions to copy LEN bytes
1417 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1418 The caller must pass FROM and TO
1419 through protect_from_queue before calling.
1420 ALIGN (in bytes) is maximum alignment we can assume. */
1422 void
1423 move_by_pieces (to, from, len, align)
1424 rtx to, from;
1425 int len, align;
1427 struct move_by_pieces data;
1428 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1429 int max_size = MOVE_MAX_PIECES + 1;
1430 enum machine_mode mode = VOIDmode, tmode;
1431 enum insn_code icode;
1433 data.offset = 0;
1434 data.to_addr = to_addr;
1435 data.from_addr = from_addr;
1436 data.to = to;
1437 data.from = from;
1438 data.autinc_to
1439 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1440 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1441 data.autinc_from
1442 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1443 || GET_CODE (from_addr) == POST_INC
1444 || GET_CODE (from_addr) == POST_DEC);
1446 data.explicit_inc_from = 0;
1447 data.explicit_inc_to = 0;
1448 data.reverse
1449 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1450 if (data.reverse) data.offset = len;
1451 data.len = len;
1453 data.to_struct = MEM_IN_STRUCT_P (to);
1454 data.from_struct = MEM_IN_STRUCT_P (from);
1456 /* If copying requires more than two move insns,
1457 copy addresses to registers (to make displacements shorter)
1458 and use post-increment if available. */
1459 if (!(data.autinc_from && data.autinc_to)
1460 && move_by_pieces_ninsns (len, align) > 2)
1462 /* Find the mode of the largest move... */
1463 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1464 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1465 if (GET_MODE_SIZE (tmode) < max_size)
1466 mode = tmode;
1468 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1470 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1471 data.autinc_from = 1;
1472 data.explicit_inc_from = -1;
1474 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1476 data.from_addr = copy_addr_to_reg (from_addr);
1477 data.autinc_from = 1;
1478 data.explicit_inc_from = 1;
1480 if (!data.autinc_from && CONSTANT_P (from_addr))
1481 data.from_addr = copy_addr_to_reg (from_addr);
1482 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1484 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1485 data.autinc_to = 1;
1486 data.explicit_inc_to = -1;
1488 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1490 data.to_addr = copy_addr_to_reg (to_addr);
1491 data.autinc_to = 1;
1492 data.explicit_inc_to = 1;
1494 if (!data.autinc_to && CONSTANT_P (to_addr))
1495 data.to_addr = copy_addr_to_reg (to_addr);
1498 if (! SLOW_UNALIGNED_ACCESS
1499 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1500 align = MOVE_MAX;
1502 /* First move what we can in the largest integer mode, then go to
1503 successively smaller modes. */
1505 while (max_size > 1)
1507 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1508 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1509 if (GET_MODE_SIZE (tmode) < max_size)
1510 mode = tmode;
1512 if (mode == VOIDmode)
1513 break;
1515 icode = mov_optab->handlers[(int) mode].insn_code;
1516 if (icode != CODE_FOR_nothing
1517 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1518 GET_MODE_SIZE (mode)))
1519 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1521 max_size = GET_MODE_SIZE (mode);
1524 /* The code above should have handled everything. */
1525 if (data.len > 0)
1526 abort ();
1529 /* Return number of insns required to move L bytes by pieces.
1530 ALIGN (in bytes) is maximum alignment we can assume. */
1532 static int
1533 move_by_pieces_ninsns (l, align)
1534 unsigned int l;
1535 int align;
1537 register int n_insns = 0;
1538 int max_size = MOVE_MAX + 1;
1540 if (! SLOW_UNALIGNED_ACCESS
1541 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1542 align = MOVE_MAX;
1544 while (max_size > 1)
1546 enum machine_mode mode = VOIDmode, tmode;
1547 enum insn_code icode;
1549 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1550 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1551 if (GET_MODE_SIZE (tmode) < max_size)
1552 mode = tmode;
1554 if (mode == VOIDmode)
1555 break;
1557 icode = mov_optab->handlers[(int) mode].insn_code;
1558 if (icode != CODE_FOR_nothing
1559 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1560 GET_MODE_SIZE (mode)))
1561 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1563 max_size = GET_MODE_SIZE (mode);
1566 return n_insns;
1569 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1570 with move instructions for mode MODE. GENFUN is the gen_... function
1571 to make a move insn for that mode. DATA has all the other info. */
1573 static void
1574 move_by_pieces_1 (genfun, mode, data)
1575 rtx (*genfun) PROTO ((rtx, ...));
1576 enum machine_mode mode;
1577 struct move_by_pieces *data;
1579 register int size = GET_MODE_SIZE (mode);
1580 register rtx to1, from1;
1582 while (data->len >= size)
1584 if (data->reverse) data->offset -= size;
1586 to1 = (data->autinc_to
1587 ? gen_rtx_MEM (mode, data->to_addr)
1588 : copy_rtx (change_address (data->to, mode,
1589 plus_constant (data->to_addr,
1590 data->offset))));
1591 MEM_IN_STRUCT_P (to1) = data->to_struct;
1593 from1
1594 = (data->autinc_from
1595 ? gen_rtx_MEM (mode, data->from_addr)
1596 : copy_rtx (change_address (data->from, mode,
1597 plus_constant (data->from_addr,
1598 data->offset))));
1599 MEM_IN_STRUCT_P (from1) = data->from_struct;
1601 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1602 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1603 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1604 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1606 emit_insn ((*genfun) (to1, from1));
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1608 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1609 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1610 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1612 if (! data->reverse) data->offset += size;
1614 data->len -= size;
1618 /* Emit code to move a block Y to a block X.
1619 This may be done with string-move instructions,
1620 with multiple scalar move instructions, or with a library call.
1622 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1623 with mode BLKmode.
1624 SIZE is an rtx that says how long they are.
1625 ALIGN is the maximum alignment we can assume they have,
1626 measured in bytes.
1628 Return the address of the new block, if memcpy is called and returns it,
1629 0 otherwise. */
1632 emit_block_move (x, y, size, align)
1633 rtx x, y;
1634 rtx size;
1635 int align;
1637 rtx retval = 0;
1638 #ifdef TARGET_MEM_FUNCTIONS
1639 static tree fn;
1640 tree call_expr, arg_list;
1641 #endif
1643 if (GET_MODE (x) != BLKmode)
1644 abort ();
1646 if (GET_MODE (y) != BLKmode)
1647 abort ();
1649 x = protect_from_queue (x, 1);
1650 y = protect_from_queue (y, 0);
1651 size = protect_from_queue (size, 0);
1653 if (GET_CODE (x) != MEM)
1654 abort ();
1655 if (GET_CODE (y) != MEM)
1656 abort ();
1657 if (size == 0)
1658 abort ();
1660 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1661 move_by_pieces (x, y, INTVAL (size), align);
1662 else
1664 /* Try the most limited insn first, because there's no point
1665 including more than one in the machine description unless
1666 the more limited one has some advantage. */
1668 rtx opalign = GEN_INT (align);
1669 enum machine_mode mode;
1671 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1672 mode = GET_MODE_WIDER_MODE (mode))
1674 enum insn_code code = movstr_optab[(int) mode];
1676 if (code != CODE_FOR_nothing
1677 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1678 here because if SIZE is less than the mode mask, as it is
1679 returned by the macro, it will definitely be less than the
1680 actual mode mask. */
1681 && ((GET_CODE (size) == CONST_INT
1682 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1683 <= (GET_MODE_MASK (mode) >> 1)))
1684 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1685 && (insn_operand_predicate[(int) code][0] == 0
1686 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1687 && (insn_operand_predicate[(int) code][1] == 0
1688 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1689 && (insn_operand_predicate[(int) code][3] == 0
1690 || (*insn_operand_predicate[(int) code][3]) (opalign,
1691 VOIDmode)))
1693 rtx op2;
1694 rtx last = get_last_insn ();
1695 rtx pat;
1697 op2 = convert_to_mode (mode, size, 1);
1698 if (insn_operand_predicate[(int) code][2] != 0
1699 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1700 op2 = copy_to_mode_reg (mode, op2);
1702 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1703 if (pat)
1705 emit_insn (pat);
1706 return 0;
1708 else
1709 delete_insns_since (last);
1713 #ifdef TARGET_MEM_FUNCTIONS
1714 /* It is incorrect to use the libcall calling conventions to call
1715 memcpy in this context.
1717 This could be a user call to memcpy and the user may wish to
1718 examine the return value from memcpy.
1720 For targets where libcalls and normal calls have different conventions
1721 for returning pointers, we could end up generating incorrect code.
1723 So instead of using a libcall sequence we build up a suitable
1724 CALL_EXPR and expand the call in the normal fashion. */
1725 if (fn == NULL_TREE)
1727 tree fntype;
1729 /* This was copied from except.c, I don't know if all this is
1730 necessary in this context or not. */
1731 fn = get_identifier ("memcpy");
1732 push_obstacks_nochange ();
1733 end_temporary_allocation ();
1734 fntype = build_pointer_type (void_type_node);
1735 fntype = build_function_type (fntype, NULL_TREE);
1736 fn = build_decl (FUNCTION_DECL, fn, fntype);
1737 DECL_EXTERNAL (fn) = 1;
1738 TREE_PUBLIC (fn) = 1;
1739 DECL_ARTIFICIAL (fn) = 1;
1740 make_decl_rtl (fn, NULL_PTR, 1);
1741 assemble_external (fn);
1742 pop_obstacks ();
1745 /* We need to make an argument list for the function call.
1747 memcpy has three arguments, the first two are void * addresses and
1748 the last is a size_t byte count for the copy. */
1749 arg_list
1750 = build_tree_list (NULL_TREE,
1751 make_tree (build_pointer_type (void_type_node),
1752 XEXP (x, 0)));
1753 TREE_CHAIN (arg_list)
1754 = build_tree_list (NULL_TREE,
1755 make_tree (build_pointer_type (void_type_node),
1756 XEXP (y, 0)));
1757 TREE_CHAIN (TREE_CHAIN (arg_list))
1758 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1759 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1761 /* Now we have to build up the CALL_EXPR itself. */
1762 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1763 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1764 call_expr, arg_list, NULL_TREE);
1765 TREE_SIDE_EFFECTS (call_expr) = 1;
1767 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1768 #else
1769 emit_library_call (bcopy_libfunc, 0,
1770 VOIDmode, 3, XEXP (y, 0), Pmode,
1771 XEXP (x, 0), Pmode,
1772 convert_to_mode (TYPE_MODE (integer_type_node), size,
1773 TREE_UNSIGNED (integer_type_node)),
1774 TYPE_MODE (integer_type_node));
1775 #endif
1778 return retval;
1781 /* Copy all or part of a value X into registers starting at REGNO.
1782 The number of registers to be filled is NREGS. */
1784 void
1785 move_block_to_reg (regno, x, nregs, mode)
1786 int regno;
1787 rtx x;
1788 int nregs;
1789 enum machine_mode mode;
1791 int i;
1792 #ifdef HAVE_load_multiple
1793 rtx pat;
1794 rtx last;
1795 #endif
1797 if (nregs == 0)
1798 return;
1800 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1801 x = validize_mem (force_const_mem (mode, x));
1803 /* See if the machine can do this with a load multiple insn. */
1804 #ifdef HAVE_load_multiple
1805 if (HAVE_load_multiple)
1807 last = get_last_insn ();
1808 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1809 GEN_INT (nregs));
1810 if (pat)
1812 emit_insn (pat);
1813 return;
1815 else
1816 delete_insns_since (last);
1818 #endif
1820 for (i = 0; i < nregs; i++)
1821 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1822 operand_subword_force (x, i, mode));
1825 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1826 The number of registers to be filled is NREGS. SIZE indicates the number
1827 of bytes in the object X. */
1830 void
1831 move_block_from_reg (regno, x, nregs, size)
1832 int regno;
1833 rtx x;
1834 int nregs;
1835 int size;
1837 int i;
1838 #ifdef HAVE_store_multiple
1839 rtx pat;
1840 rtx last;
1841 #endif
1842 enum machine_mode mode;
1844 /* If SIZE is that of a mode no bigger than a word, just use that
1845 mode's store operation. */
1846 if (size <= UNITS_PER_WORD
1847 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1849 emit_move_insn (change_address (x, mode, NULL),
1850 gen_rtx_REG (mode, regno));
1851 return;
1854 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1855 to the left before storing to memory. Note that the previous test
1856 doesn't handle all cases (e.g. SIZE == 3). */
1857 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1859 rtx tem = operand_subword (x, 0, 1, BLKmode);
1860 rtx shift;
1862 if (tem == 0)
1863 abort ();
1865 shift = expand_shift (LSHIFT_EXPR, word_mode,
1866 gen_rtx_REG (word_mode, regno),
1867 build_int_2 ((UNITS_PER_WORD - size)
1868 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1869 emit_move_insn (tem, shift);
1870 return;
1873 /* See if the machine can do this with a store multiple insn. */
1874 #ifdef HAVE_store_multiple
1875 if (HAVE_store_multiple)
1877 last = get_last_insn ();
1878 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1879 GEN_INT (nregs));
1880 if (pat)
1882 emit_insn (pat);
1883 return;
1885 else
1886 delete_insns_since (last);
1888 #endif
1890 for (i = 0; i < nregs; i++)
1892 rtx tem = operand_subword (x, i, 1, BLKmode);
1894 if (tem == 0)
1895 abort ();
1897 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1901 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1902 registers represented by a PARALLEL. SSIZE represents the total size of
1903 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1904 SRC in bits. */
1905 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1906 the balance will be in what would be the low-order memory addresses, i.e.
1907 left justified for big endian, right justified for little endian. This
1908 happens to be true for the targets currently using this support. If this
1909 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1910 would be needed. */
1912 void
1913 emit_group_load (dst, orig_src, ssize, align)
1914 rtx dst, orig_src;
1915 int align, ssize;
1917 rtx *tmps, src;
1918 int start, i;
1920 if (GET_CODE (dst) != PARALLEL)
1921 abort ();
1923 /* Check for a NULL entry, used to indicate that the parameter goes
1924 both on the stack and in registers. */
1925 if (XEXP (XVECEXP (dst, 0, 0), 0))
1926 start = 0;
1927 else
1928 start = 1;
1930 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1932 /* If we won't be loading directly from memory, protect the real source
1933 from strange tricks we might play. */
1934 src = orig_src;
1935 if (GET_CODE (src) != MEM)
1937 src = gen_reg_rtx (GET_MODE (orig_src));
1938 emit_move_insn (src, orig_src);
1941 /* Process the pieces. */
1942 for (i = start; i < XVECLEN (dst, 0); i++)
1944 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1945 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1946 int bytelen = GET_MODE_SIZE (mode);
1947 int shift = 0;
1949 /* Handle trailing fragments that run over the size of the struct. */
1950 if (ssize >= 0 && bytepos + bytelen > ssize)
1952 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1953 bytelen = ssize - bytepos;
1954 if (bytelen <= 0)
1955 abort();
1958 /* Optimize the access just a bit. */
1959 if (GET_CODE (src) == MEM
1960 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1961 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1962 && bytelen == GET_MODE_SIZE (mode))
1964 tmps[i] = gen_reg_rtx (mode);
1965 emit_move_insn (tmps[i],
1966 change_address (src, mode,
1967 plus_constant (XEXP (src, 0),
1968 bytepos)));
1970 else
1972 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1973 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1974 mode, mode, align, ssize);
1977 if (BYTES_BIG_ENDIAN && shift)
1979 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1980 tmps[i], 0, OPTAB_WIDEN);
1983 emit_queue();
1985 /* Copy the extracted pieces into the proper (probable) hard regs. */
1986 for (i = start; i < XVECLEN (dst, 0); i++)
1987 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1990 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1991 registers represented by a PARALLEL. SSIZE represents the total size of
1992 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1994 void
1995 emit_group_store (orig_dst, src, ssize, align)
1996 rtx orig_dst, src;
1997 int ssize, align;
1999 rtx *tmps, dst;
2000 int start, i;
2002 if (GET_CODE (src) != PARALLEL)
2003 abort ();
2005 /* Check for a NULL entry, used to indicate that the parameter goes
2006 both on the stack and in registers. */
2007 if (XEXP (XVECEXP (src, 0, 0), 0))
2008 start = 0;
2009 else
2010 start = 1;
2012 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2014 /* Copy the (probable) hard regs into pseudos. */
2015 for (i = start; i < XVECLEN (src, 0); i++)
2017 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2018 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2019 emit_move_insn (tmps[i], reg);
2021 emit_queue();
2023 /* If we won't be storing directly into memory, protect the real destination
2024 from strange tricks we might play. */
2025 dst = orig_dst;
2026 if (GET_CODE (dst) == PARALLEL)
2028 rtx temp;
2030 /* We can get a PARALLEL dst if there is a conditional expression in
2031 a return statement. In that case, the dst and src are the same,
2032 so no action is necessary. */
2033 if (rtx_equal_p (dst, src))
2034 return;
2036 /* It is unclear if we can ever reach here, but we may as well handle
2037 it. Allocate a temporary, and split this into a store/load to/from
2038 the temporary. */
2040 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2041 emit_group_store (temp, src, ssize, align);
2042 emit_group_load (dst, temp, ssize, align);
2043 return;
2045 else if (GET_CODE (dst) != MEM)
2047 dst = gen_reg_rtx (GET_MODE (orig_dst));
2048 /* Make life a bit easier for combine. */
2049 emit_move_insn (dst, const0_rtx);
2051 else if (! MEM_IN_STRUCT_P (dst))
2053 /* store_bit_field requires that memory operations have
2054 mem_in_struct_p set; we might not. */
2056 dst = copy_rtx (orig_dst);
2057 MEM_SET_IN_STRUCT_P (dst, 1);
2060 /* Process the pieces. */
2061 for (i = start; i < XVECLEN (src, 0); i++)
2063 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2064 enum machine_mode mode = GET_MODE (tmps[i]);
2065 int bytelen = GET_MODE_SIZE (mode);
2067 /* Handle trailing fragments that run over the size of the struct. */
2068 if (ssize >= 0 && bytepos + bytelen > ssize)
2070 if (BYTES_BIG_ENDIAN)
2072 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2073 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2074 tmps[i], 0, OPTAB_WIDEN);
2076 bytelen = ssize - bytepos;
2079 /* Optimize the access just a bit. */
2080 if (GET_CODE (dst) == MEM
2081 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2082 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2083 && bytelen == GET_MODE_SIZE (mode))
2085 emit_move_insn (change_address (dst, mode,
2086 plus_constant (XEXP (dst, 0),
2087 bytepos)),
2088 tmps[i]);
2090 else
2092 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2093 mode, tmps[i], align, ssize);
2096 emit_queue();
2098 /* Copy from the pseudo into the (probable) hard reg. */
2099 if (GET_CODE (dst) == REG)
2100 emit_move_insn (orig_dst, dst);
2103 /* Generate code to copy a BLKmode object of TYPE out of a
2104 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2105 is null, a stack temporary is created. TGTBLK is returned.
2107 The primary purpose of this routine is to handle functions
2108 that return BLKmode structures in registers. Some machines
2109 (the PA for example) want to return all small structures
2110 in registers regardless of the structure's alignment.
2114 copy_blkmode_from_reg(tgtblk,srcreg,type)
2115 rtx tgtblk;
2116 rtx srcreg;
2117 tree type;
2119 int bytes = int_size_in_bytes (type);
2120 rtx src = NULL, dst = NULL;
2121 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2122 int bitpos, xbitpos, big_endian_correction = 0;
2124 if (tgtblk == 0)
2126 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2127 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2128 preserve_temp_slots (tgtblk);
2131 /* This code assumes srcreg is at least a full word. If it isn't,
2132 copy it into a new pseudo which is a full word. */
2133 if (GET_MODE (srcreg) != BLKmode
2134 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2135 srcreg = convert_to_mode (word_mode, srcreg,
2136 TREE_UNSIGNED (type));
2138 /* Structures whose size is not a multiple of a word are aligned
2139 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2140 machine, this means we must skip the empty high order bytes when
2141 calculating the bit offset. */
2142 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2143 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2144 * BITS_PER_UNIT));
2146 /* Copy the structure BITSIZE bites at a time.
2148 We could probably emit more efficient code for machines
2149 which do not use strict alignment, but it doesn't seem
2150 worth the effort at the current time. */
2151 for (bitpos = 0, xbitpos = big_endian_correction;
2152 bitpos < bytes * BITS_PER_UNIT;
2153 bitpos += bitsize, xbitpos += bitsize)
2156 /* We need a new source operand each time xbitpos is on a
2157 word boundary and when xbitpos == big_endian_correction
2158 (the first time through). */
2159 if (xbitpos % BITS_PER_WORD == 0
2160 || xbitpos == big_endian_correction)
2161 src = operand_subword_force (srcreg,
2162 xbitpos / BITS_PER_WORD,
2163 BLKmode);
2165 /* We need a new destination operand each time bitpos is on
2166 a word boundary. */
2167 if (bitpos % BITS_PER_WORD == 0)
2168 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2170 /* Use xbitpos for the source extraction (right justified) and
2171 xbitpos for the destination store (left justified). */
2172 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2173 extract_bit_field (src, bitsize,
2174 xbitpos % BITS_PER_WORD, 1,
2175 NULL_RTX, word_mode,
2176 word_mode,
2177 bitsize / BITS_PER_UNIT,
2178 BITS_PER_WORD),
2179 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2181 return tgtblk;
2185 /* Add a USE expression for REG to the (possibly empty) list pointed
2186 to by CALL_FUSAGE. REG must denote a hard register. */
2188 void
2189 use_reg (call_fusage, reg)
2190 rtx *call_fusage, reg;
2192 if (GET_CODE (reg) != REG
2193 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2194 abort();
2196 *call_fusage
2197 = gen_rtx_EXPR_LIST (VOIDmode,
2198 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2201 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2202 starting at REGNO. All of these registers must be hard registers. */
2204 void
2205 use_regs (call_fusage, regno, nregs)
2206 rtx *call_fusage;
2207 int regno;
2208 int nregs;
2210 int i;
2212 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2213 abort ();
2215 for (i = 0; i < nregs; i++)
2216 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2219 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2220 PARALLEL REGS. This is for calls that pass values in multiple
2221 non-contiguous locations. The Irix 6 ABI has examples of this. */
2223 void
2224 use_group_regs (call_fusage, regs)
2225 rtx *call_fusage;
2226 rtx regs;
2228 int i;
2230 for (i = 0; i < XVECLEN (regs, 0); i++)
2232 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2234 /* A NULL entry means the parameter goes both on the stack and in
2235 registers. This can also be a MEM for targets that pass values
2236 partially on the stack and partially in registers. */
2237 if (reg != 0 && GET_CODE (reg) == REG)
2238 use_reg (call_fusage, reg);
2242 /* Generate several move instructions to clear LEN bytes of block TO.
2243 (A MEM rtx with BLKmode). The caller must pass TO through
2244 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2245 we can assume. */
2247 static void
2248 clear_by_pieces (to, len, align)
2249 rtx to;
2250 int len, align;
2252 struct clear_by_pieces data;
2253 rtx to_addr = XEXP (to, 0);
2254 int max_size = MOVE_MAX_PIECES + 1;
2255 enum machine_mode mode = VOIDmode, tmode;
2256 enum insn_code icode;
2258 data.offset = 0;
2259 data.to_addr = to_addr;
2260 data.to = to;
2261 data.autinc_to
2262 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2263 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2265 data.explicit_inc_to = 0;
2266 data.reverse
2267 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2268 if (data.reverse) data.offset = len;
2269 data.len = len;
2271 data.to_struct = MEM_IN_STRUCT_P (to);
2273 /* If copying requires more than two move insns,
2274 copy addresses to registers (to make displacements shorter)
2275 and use post-increment if available. */
2276 if (!data.autinc_to
2277 && move_by_pieces_ninsns (len, align) > 2)
2279 /* Determine the main mode we'll be using */
2280 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2281 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2282 if (GET_MODE_SIZE (tmode) < max_size)
2283 mode = tmode;
2285 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2287 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2288 data.autinc_to = 1;
2289 data.explicit_inc_to = -1;
2291 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2293 data.to_addr = copy_addr_to_reg (to_addr);
2294 data.autinc_to = 1;
2295 data.explicit_inc_to = 1;
2297 if (!data.autinc_to && CONSTANT_P (to_addr))
2298 data.to_addr = copy_addr_to_reg (to_addr);
2301 if (! SLOW_UNALIGNED_ACCESS
2302 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2303 align = MOVE_MAX;
2305 /* First move what we can in the largest integer mode, then go to
2306 successively smaller modes. */
2308 while (max_size > 1)
2310 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2311 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2312 if (GET_MODE_SIZE (tmode) < max_size)
2313 mode = tmode;
2315 if (mode == VOIDmode)
2316 break;
2318 icode = mov_optab->handlers[(int) mode].insn_code;
2319 if (icode != CODE_FOR_nothing
2320 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2321 GET_MODE_SIZE (mode)))
2322 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2324 max_size = GET_MODE_SIZE (mode);
2327 /* The code above should have handled everything. */
2328 if (data.len != 0)
2329 abort ();
2332 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2333 with move instructions for mode MODE. GENFUN is the gen_... function
2334 to make a move insn for that mode. DATA has all the other info. */
2336 static void
2337 clear_by_pieces_1 (genfun, mode, data)
2338 rtx (*genfun) PROTO ((rtx, ...));
2339 enum machine_mode mode;
2340 struct clear_by_pieces *data;
2342 register int size = GET_MODE_SIZE (mode);
2343 register rtx to1;
2345 while (data->len >= size)
2347 if (data->reverse) data->offset -= size;
2349 to1 = (data->autinc_to
2350 ? gen_rtx_MEM (mode, data->to_addr)
2351 : copy_rtx (change_address (data->to, mode,
2352 plus_constant (data->to_addr,
2353 data->offset))));
2354 MEM_IN_STRUCT_P (to1) = data->to_struct;
2356 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2357 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2359 emit_insn ((*genfun) (to1, const0_rtx));
2360 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2361 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2363 if (! data->reverse) data->offset += size;
2365 data->len -= size;
2369 /* Write zeros through the storage of OBJECT.
2370 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2371 the maximum alignment we can is has, measured in bytes.
2373 If we call a function that returns the length of the block, return it. */
2376 clear_storage (object, size, align)
2377 rtx object;
2378 rtx size;
2379 int align;
2381 #ifdef TARGET_MEM_FUNCTIONS
2382 static tree fn;
2383 tree call_expr, arg_list;
2384 #endif
2385 rtx retval = 0;
2387 if (GET_MODE (object) == BLKmode)
2389 object = protect_from_queue (object, 1);
2390 size = protect_from_queue (size, 0);
2392 if (GET_CODE (size) == CONST_INT
2393 && MOVE_BY_PIECES_P (INTVAL (size), align))
2394 clear_by_pieces (object, INTVAL (size), align);
2396 else
2398 /* Try the most limited insn first, because there's no point
2399 including more than one in the machine description unless
2400 the more limited one has some advantage. */
2402 rtx opalign = GEN_INT (align);
2403 enum machine_mode mode;
2405 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2406 mode = GET_MODE_WIDER_MODE (mode))
2408 enum insn_code code = clrstr_optab[(int) mode];
2410 if (code != CODE_FOR_nothing
2411 /* We don't need MODE to be narrower than
2412 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2413 the mode mask, as it is returned by the macro, it will
2414 definitely be less than the actual mode mask. */
2415 && ((GET_CODE (size) == CONST_INT
2416 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2417 <= (GET_MODE_MASK (mode) >> 1)))
2418 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2419 && (insn_operand_predicate[(int) code][0] == 0
2420 || (*insn_operand_predicate[(int) code][0]) (object,
2421 BLKmode))
2422 && (insn_operand_predicate[(int) code][2] == 0
2423 || (*insn_operand_predicate[(int) code][2]) (opalign,
2424 VOIDmode)))
2426 rtx op1;
2427 rtx last = get_last_insn ();
2428 rtx pat;
2430 op1 = convert_to_mode (mode, size, 1);
2431 if (insn_operand_predicate[(int) code][1] != 0
2432 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2433 mode))
2434 op1 = copy_to_mode_reg (mode, op1);
2436 pat = GEN_FCN ((int) code) (object, op1, opalign);
2437 if (pat)
2439 emit_insn (pat);
2440 return 0;
2442 else
2443 delete_insns_since (last);
2448 #ifdef TARGET_MEM_FUNCTIONS
2449 /* It is incorrect to use the libcall calling conventions to call
2450 memset in this context.
2452 This could be a user call to memset and the user may wish to
2453 examine the return value from memset.
2455 For targets where libcalls and normal calls have different conventions
2456 for returning pointers, we could end up generating incorrect code.
2458 So instead of using a libcall sequence we build up a suitable
2459 CALL_EXPR and expand the call in the normal fashion. */
2460 if (fn == NULL_TREE)
2462 tree fntype;
2464 /* This was copied from except.c, I don't know if all this is
2465 necessary in this context or not. */
2466 fn = get_identifier ("memset");
2467 push_obstacks_nochange ();
2468 end_temporary_allocation ();
2469 fntype = build_pointer_type (void_type_node);
2470 fntype = build_function_type (fntype, NULL_TREE);
2471 fn = build_decl (FUNCTION_DECL, fn, fntype);
2472 DECL_EXTERNAL (fn) = 1;
2473 TREE_PUBLIC (fn) = 1;
2474 DECL_ARTIFICIAL (fn) = 1;
2475 make_decl_rtl (fn, NULL_PTR, 1);
2476 assemble_external (fn);
2477 pop_obstacks ();
2480 /* We need to make an argument list for the function call.
2482 memset has three arguments, the first is a void * addresses, the
2483 second a integer with the initialization value, the last is a size_t
2484 byte count for the copy. */
2485 arg_list
2486 = build_tree_list (NULL_TREE,
2487 make_tree (build_pointer_type (void_type_node),
2488 XEXP (object, 0)));
2489 TREE_CHAIN (arg_list)
2490 = build_tree_list (NULL_TREE,
2491 make_tree (integer_type_node, const0_rtx));
2492 TREE_CHAIN (TREE_CHAIN (arg_list))
2493 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2494 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2496 /* Now we have to build up the CALL_EXPR itself. */
2497 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2498 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2499 call_expr, arg_list, NULL_TREE);
2500 TREE_SIDE_EFFECTS (call_expr) = 1;
2502 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2503 #else
2504 emit_library_call (bzero_libfunc, 0,
2505 VOIDmode, 2,
2506 XEXP (object, 0), Pmode,
2507 convert_to_mode
2508 (TYPE_MODE (integer_type_node), size,
2509 TREE_UNSIGNED (integer_type_node)),
2510 TYPE_MODE (integer_type_node));
2511 #endif
2514 else
2515 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2517 return retval;
2520 /* Generate code to copy Y into X.
2521 Both Y and X must have the same mode, except that
2522 Y can be a constant with VOIDmode.
2523 This mode cannot be BLKmode; use emit_block_move for that.
2525 Return the last instruction emitted. */
2528 emit_move_insn (x, y)
2529 rtx x, y;
2531 enum machine_mode mode = GET_MODE (x);
2533 x = protect_from_queue (x, 1);
2534 y = protect_from_queue (y, 0);
2536 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2537 abort ();
2539 /* Never force constant_p_rtx to memory. */
2540 if (GET_CODE (y) == CONSTANT_P_RTX)
2542 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2543 y = force_const_mem (mode, y);
2545 /* If X or Y are memory references, verify that their addresses are valid
2546 for the machine. */
2547 if (GET_CODE (x) == MEM
2548 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2549 && ! push_operand (x, GET_MODE (x)))
2550 || (flag_force_addr
2551 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2552 x = change_address (x, VOIDmode, XEXP (x, 0));
2554 if (GET_CODE (y) == MEM
2555 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2556 || (flag_force_addr
2557 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2558 y = change_address (y, VOIDmode, XEXP (y, 0));
2560 if (mode == BLKmode)
2561 abort ();
2563 return emit_move_insn_1 (x, y);
2566 /* Low level part of emit_move_insn.
2567 Called just like emit_move_insn, but assumes X and Y
2568 are basically valid. */
2571 emit_move_insn_1 (x, y)
2572 rtx x, y;
2574 enum machine_mode mode = GET_MODE (x);
2575 enum machine_mode submode;
2576 enum mode_class class = GET_MODE_CLASS (mode);
2577 int i;
2579 if (mode >= MAX_MACHINE_MODE)
2580 abort ();
2582 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2583 return
2584 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2586 /* Expand complex moves by moving real part and imag part, if possible. */
2587 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2588 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2589 * BITS_PER_UNIT),
2590 (class == MODE_COMPLEX_INT
2591 ? MODE_INT : MODE_FLOAT),
2593 && (mov_optab->handlers[(int) submode].insn_code
2594 != CODE_FOR_nothing))
2596 /* Don't split destination if it is a stack push. */
2597 int stack = push_operand (x, GET_MODE (x));
2599 /* If this is a stack, push the highpart first, so it
2600 will be in the argument order.
2602 In that case, change_address is used only to convert
2603 the mode, not to change the address. */
2604 if (stack)
2606 /* Note that the real part always precedes the imag part in memory
2607 regardless of machine's endianness. */
2608 #ifdef STACK_GROWS_DOWNWARD
2609 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2610 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2611 gen_imagpart (submode, y)));
2612 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2613 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2614 gen_realpart (submode, y)));
2615 #else
2616 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2617 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2618 gen_realpart (submode, y)));
2619 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2620 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2621 gen_imagpart (submode, y)));
2622 #endif
2624 else
2626 /* Show the output dies here. This is necessary for pseudos;
2627 hard regs shouldn't appear here except as return values.
2628 We never want to emit such a clobber after reload. */
2629 if (x != y
2630 && ! (reload_in_progress || reload_completed))
2632 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2635 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2636 (gen_realpart (submode, x), gen_realpart (submode, y)));
2637 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2638 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2641 return get_last_insn ();
2644 /* This will handle any multi-word mode that lacks a move_insn pattern.
2645 However, you will get better code if you define such patterns,
2646 even if they must turn into multiple assembler instructions. */
2647 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2649 rtx last_insn = 0;
2651 #ifdef PUSH_ROUNDING
2653 /* If X is a push on the stack, do the push now and replace
2654 X with a reference to the stack pointer. */
2655 if (push_operand (x, GET_MODE (x)))
2657 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2658 x = change_address (x, VOIDmode, stack_pointer_rtx);
2660 #endif
2662 /* Show the output dies here. This is necessary for pseudos;
2663 hard regs shouldn't appear here except as return values.
2664 We never want to emit such a clobber after reload. */
2665 if (x != y
2666 && ! (reload_in_progress || reload_completed))
2668 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2671 for (i = 0;
2672 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2673 i++)
2675 rtx xpart = operand_subword (x, i, 1, mode);
2676 rtx ypart = operand_subword (y, i, 1, mode);
2678 /* If we can't get a part of Y, put Y into memory if it is a
2679 constant. Otherwise, force it into a register. If we still
2680 can't get a part of Y, abort. */
2681 if (ypart == 0 && CONSTANT_P (y))
2683 y = force_const_mem (mode, y);
2684 ypart = operand_subword (y, i, 1, mode);
2686 else if (ypart == 0)
2687 ypart = operand_subword_force (y, i, mode);
2689 if (xpart == 0 || ypart == 0)
2690 abort ();
2692 last_insn = emit_move_insn (xpart, ypart);
2695 return last_insn;
2697 else
2698 abort ();
2701 /* Pushing data onto the stack. */
2703 /* Push a block of length SIZE (perhaps variable)
2704 and return an rtx to address the beginning of the block.
2705 Note that it is not possible for the value returned to be a QUEUED.
2706 The value may be virtual_outgoing_args_rtx.
2708 EXTRA is the number of bytes of padding to push in addition to SIZE.
2709 BELOW nonzero means this padding comes at low addresses;
2710 otherwise, the padding comes at high addresses. */
2713 push_block (size, extra, below)
2714 rtx size;
2715 int extra, below;
2717 register rtx temp;
2719 size = convert_modes (Pmode, ptr_mode, size, 1);
2720 if (CONSTANT_P (size))
2721 anti_adjust_stack (plus_constant (size, extra));
2722 else if (GET_CODE (size) == REG && extra == 0)
2723 anti_adjust_stack (size);
2724 else
2726 rtx temp = copy_to_mode_reg (Pmode, size);
2727 if (extra != 0)
2728 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2729 temp, 0, OPTAB_LIB_WIDEN);
2730 anti_adjust_stack (temp);
2733 #if defined (STACK_GROWS_DOWNWARD) \
2734 || (defined (ARGS_GROW_DOWNWARD) \
2735 && !defined (ACCUMULATE_OUTGOING_ARGS))
2737 /* Return the lowest stack address when STACK or ARGS grow downward and
2738 we are not aaccumulating outgoing arguments (the c4x port uses such
2739 conventions). */
2740 temp = virtual_outgoing_args_rtx;
2741 if (extra != 0 && below)
2742 temp = plus_constant (temp, extra);
2743 #else
2744 if (GET_CODE (size) == CONST_INT)
2745 temp = plus_constant (virtual_outgoing_args_rtx,
2746 - INTVAL (size) - (below ? 0 : extra));
2747 else if (extra != 0 && !below)
2748 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2749 negate_rtx (Pmode, plus_constant (size, extra)));
2750 else
2751 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2752 negate_rtx (Pmode, size));
2753 #endif
2755 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2759 gen_push_operand ()
2761 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2764 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2765 block of SIZE bytes. */
2767 static rtx
2768 get_push_address (size)
2769 int size;
2771 register rtx temp;
2773 if (STACK_PUSH_CODE == POST_DEC)
2774 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2775 else if (STACK_PUSH_CODE == POST_INC)
2776 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2777 else
2778 temp = stack_pointer_rtx;
2780 return copy_to_reg (temp);
2783 /* Generate code to push X onto the stack, assuming it has mode MODE and
2784 type TYPE.
2785 MODE is redundant except when X is a CONST_INT (since they don't
2786 carry mode info).
2787 SIZE is an rtx for the size of data to be copied (in bytes),
2788 needed only if X is BLKmode.
2790 ALIGN (in bytes) is maximum alignment we can assume.
2792 If PARTIAL and REG are both nonzero, then copy that many of the first
2793 words of X into registers starting with REG, and push the rest of X.
2794 The amount of space pushed is decreased by PARTIAL words,
2795 rounded *down* to a multiple of PARM_BOUNDARY.
2796 REG must be a hard register in this case.
2797 If REG is zero but PARTIAL is not, take any all others actions for an
2798 argument partially in registers, but do not actually load any
2799 registers.
2801 EXTRA is the amount in bytes of extra space to leave next to this arg.
2802 This is ignored if an argument block has already been allocated.
2804 On a machine that lacks real push insns, ARGS_ADDR is the address of
2805 the bottom of the argument block for this call. We use indexing off there
2806 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2807 argument block has not been preallocated.
2809 ARGS_SO_FAR is the size of args previously pushed for this call.
2811 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2812 for arguments passed in registers. If nonzero, it will be the number
2813 of bytes required. */
2815 void
2816 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2817 args_addr, args_so_far, reg_parm_stack_space)
2818 register rtx x;
2819 enum machine_mode mode;
2820 tree type;
2821 rtx size;
2822 int align;
2823 int partial;
2824 rtx reg;
2825 int extra;
2826 rtx args_addr;
2827 rtx args_so_far;
2828 int reg_parm_stack_space;
2830 rtx xinner;
2831 enum direction stack_direction
2832 #ifdef STACK_GROWS_DOWNWARD
2833 = downward;
2834 #else
2835 = upward;
2836 #endif
2838 /* Decide where to pad the argument: `downward' for below,
2839 `upward' for above, or `none' for don't pad it.
2840 Default is below for small data on big-endian machines; else above. */
2841 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2843 /* Invert direction if stack is post-update. */
2844 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2845 if (where_pad != none)
2846 where_pad = (where_pad == downward ? upward : downward);
2848 xinner = x = protect_from_queue (x, 0);
2850 if (mode == BLKmode)
2852 /* Copy a block into the stack, entirely or partially. */
2854 register rtx temp;
2855 int used = partial * UNITS_PER_WORD;
2856 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2857 int skip;
2859 if (size == 0)
2860 abort ();
2862 used -= offset;
2864 /* USED is now the # of bytes we need not copy to the stack
2865 because registers will take care of them. */
2867 if (partial != 0)
2868 xinner = change_address (xinner, BLKmode,
2869 plus_constant (XEXP (xinner, 0), used));
2871 /* If the partial register-part of the arg counts in its stack size,
2872 skip the part of stack space corresponding to the registers.
2873 Otherwise, start copying to the beginning of the stack space,
2874 by setting SKIP to 0. */
2875 skip = (reg_parm_stack_space == 0) ? 0 : used;
2877 #ifdef PUSH_ROUNDING
2878 /* Do it with several push insns if that doesn't take lots of insns
2879 and if there is no difficulty with push insns that skip bytes
2880 on the stack for alignment purposes. */
2881 if (args_addr == 0
2882 && GET_CODE (size) == CONST_INT
2883 && skip == 0
2884 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2885 /* Here we avoid the case of a structure whose weak alignment
2886 forces many pushes of a small amount of data,
2887 and such small pushes do rounding that causes trouble. */
2888 && ((! SLOW_UNALIGNED_ACCESS)
2889 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2890 || PUSH_ROUNDING (align) == align)
2891 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2893 /* Push padding now if padding above and stack grows down,
2894 or if padding below and stack grows up.
2895 But if space already allocated, this has already been done. */
2896 if (extra && args_addr == 0
2897 && where_pad != none && where_pad != stack_direction)
2898 anti_adjust_stack (GEN_INT (extra));
2900 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2901 INTVAL (size) - used, align);
2903 if (current_function_check_memory_usage && ! in_check_memory_usage)
2905 rtx temp;
2907 in_check_memory_usage = 1;
2908 temp = get_push_address (INTVAL(size) - used);
2909 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2910 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2911 temp, Pmode,
2912 XEXP (xinner, 0), Pmode,
2913 GEN_INT (INTVAL(size) - used),
2914 TYPE_MODE (sizetype));
2915 else
2916 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2917 temp, Pmode,
2918 GEN_INT (INTVAL(size) - used),
2919 TYPE_MODE (sizetype),
2920 GEN_INT (MEMORY_USE_RW),
2921 TYPE_MODE (integer_type_node));
2922 in_check_memory_usage = 0;
2925 else
2926 #endif /* PUSH_ROUNDING */
2928 /* Otherwise make space on the stack and copy the data
2929 to the address of that space. */
2931 /* Deduct words put into registers from the size we must copy. */
2932 if (partial != 0)
2934 if (GET_CODE (size) == CONST_INT)
2935 size = GEN_INT (INTVAL (size) - used);
2936 else
2937 size = expand_binop (GET_MODE (size), sub_optab, size,
2938 GEN_INT (used), NULL_RTX, 0,
2939 OPTAB_LIB_WIDEN);
2942 /* Get the address of the stack space.
2943 In this case, we do not deal with EXTRA separately.
2944 A single stack adjust will do. */
2945 if (! args_addr)
2947 temp = push_block (size, extra, where_pad == downward);
2948 extra = 0;
2950 else if (GET_CODE (args_so_far) == CONST_INT)
2951 temp = memory_address (BLKmode,
2952 plus_constant (args_addr,
2953 skip + INTVAL (args_so_far)));
2954 else
2955 temp = memory_address (BLKmode,
2956 plus_constant (gen_rtx_PLUS (Pmode,
2957 args_addr,
2958 args_so_far),
2959 skip));
2960 if (current_function_check_memory_usage && ! in_check_memory_usage)
2962 rtx target;
2964 in_check_memory_usage = 1;
2965 target = copy_to_reg (temp);
2966 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2967 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2968 target, Pmode,
2969 XEXP (xinner, 0), Pmode,
2970 size, TYPE_MODE (sizetype));
2971 else
2972 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2973 target, Pmode,
2974 size, TYPE_MODE (sizetype),
2975 GEN_INT (MEMORY_USE_RW),
2976 TYPE_MODE (integer_type_node));
2977 in_check_memory_usage = 0;
2980 /* TEMP is the address of the block. Copy the data there. */
2981 if (GET_CODE (size) == CONST_INT
2982 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
2984 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2985 INTVAL (size), align);
2986 goto ret;
2988 else
2990 rtx opalign = GEN_INT (align);
2991 enum machine_mode mode;
2992 rtx target = gen_rtx_MEM (BLKmode, temp);
2994 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2995 mode != VOIDmode;
2996 mode = GET_MODE_WIDER_MODE (mode))
2998 enum insn_code code = movstr_optab[(int) mode];
3000 if (code != CODE_FOR_nothing
3001 && ((GET_CODE (size) == CONST_INT
3002 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3003 <= (GET_MODE_MASK (mode) >> 1)))
3004 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3005 && (insn_operand_predicate[(int) code][0] == 0
3006 || ((*insn_operand_predicate[(int) code][0])
3007 (target, BLKmode)))
3008 && (insn_operand_predicate[(int) code][1] == 0
3009 || ((*insn_operand_predicate[(int) code][1])
3010 (xinner, BLKmode)))
3011 && (insn_operand_predicate[(int) code][3] == 0
3012 || ((*insn_operand_predicate[(int) code][3])
3013 (opalign, VOIDmode))))
3015 rtx op2 = convert_to_mode (mode, size, 1);
3016 rtx last = get_last_insn ();
3017 rtx pat;
3019 if (insn_operand_predicate[(int) code][2] != 0
3020 && ! ((*insn_operand_predicate[(int) code][2])
3021 (op2, mode)))
3022 op2 = copy_to_mode_reg (mode, op2);
3024 pat = GEN_FCN ((int) code) (target, xinner,
3025 op2, opalign);
3026 if (pat)
3028 emit_insn (pat);
3029 goto ret;
3031 else
3032 delete_insns_since (last);
3037 #ifndef ACCUMULATE_OUTGOING_ARGS
3038 /* If the source is referenced relative to the stack pointer,
3039 copy it to another register to stabilize it. We do not need
3040 to do this if we know that we won't be changing sp. */
3042 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3043 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3044 temp = copy_to_reg (temp);
3045 #endif
3047 /* Make inhibit_defer_pop nonzero around the library call
3048 to force it to pop the bcopy-arguments right away. */
3049 NO_DEFER_POP;
3050 #ifdef TARGET_MEM_FUNCTIONS
3051 emit_library_call (memcpy_libfunc, 0,
3052 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3053 convert_to_mode (TYPE_MODE (sizetype),
3054 size, TREE_UNSIGNED (sizetype)),
3055 TYPE_MODE (sizetype));
3056 #else
3057 emit_library_call (bcopy_libfunc, 0,
3058 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3059 convert_to_mode (TYPE_MODE (integer_type_node),
3060 size,
3061 TREE_UNSIGNED (integer_type_node)),
3062 TYPE_MODE (integer_type_node));
3063 #endif
3064 OK_DEFER_POP;
3067 else if (partial > 0)
3069 /* Scalar partly in registers. */
3071 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3072 int i;
3073 int not_stack;
3074 /* # words of start of argument
3075 that we must make space for but need not store. */
3076 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3077 int args_offset = INTVAL (args_so_far);
3078 int skip;
3080 /* Push padding now if padding above and stack grows down,
3081 or if padding below and stack grows up.
3082 But if space already allocated, this has already been done. */
3083 if (extra && args_addr == 0
3084 && where_pad != none && where_pad != stack_direction)
3085 anti_adjust_stack (GEN_INT (extra));
3087 /* If we make space by pushing it, we might as well push
3088 the real data. Otherwise, we can leave OFFSET nonzero
3089 and leave the space uninitialized. */
3090 if (args_addr == 0)
3091 offset = 0;
3093 /* Now NOT_STACK gets the number of words that we don't need to
3094 allocate on the stack. */
3095 not_stack = partial - offset;
3097 /* If the partial register-part of the arg counts in its stack size,
3098 skip the part of stack space corresponding to the registers.
3099 Otherwise, start copying to the beginning of the stack space,
3100 by setting SKIP to 0. */
3101 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3103 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3104 x = validize_mem (force_const_mem (mode, x));
3106 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3107 SUBREGs of such registers are not allowed. */
3108 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3109 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3110 x = copy_to_reg (x);
3112 /* Loop over all the words allocated on the stack for this arg. */
3113 /* We can do it by words, because any scalar bigger than a word
3114 has a size a multiple of a word. */
3115 #ifndef PUSH_ARGS_REVERSED
3116 for (i = not_stack; i < size; i++)
3117 #else
3118 for (i = size - 1; i >= not_stack; i--)
3119 #endif
3120 if (i >= not_stack + offset)
3121 emit_push_insn (operand_subword_force (x, i, mode),
3122 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3123 0, args_addr,
3124 GEN_INT (args_offset + ((i - not_stack + skip)
3125 * UNITS_PER_WORD)),
3126 reg_parm_stack_space);
3128 else
3130 rtx addr;
3131 rtx target = NULL_RTX;
3133 /* Push padding now if padding above and stack grows down,
3134 or if padding below and stack grows up.
3135 But if space already allocated, this has already been done. */
3136 if (extra && args_addr == 0
3137 && where_pad != none && where_pad != stack_direction)
3138 anti_adjust_stack (GEN_INT (extra));
3140 #ifdef PUSH_ROUNDING
3141 if (args_addr == 0)
3142 addr = gen_push_operand ();
3143 else
3144 #endif
3146 if (GET_CODE (args_so_far) == CONST_INT)
3147 addr
3148 = memory_address (mode,
3149 plus_constant (args_addr,
3150 INTVAL (args_so_far)));
3151 else
3152 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3153 args_so_far));
3154 target = addr;
3157 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3159 if (current_function_check_memory_usage && ! in_check_memory_usage)
3161 in_check_memory_usage = 1;
3162 if (target == 0)
3163 target = get_push_address (GET_MODE_SIZE (mode));
3165 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3166 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3167 target, Pmode,
3168 XEXP (x, 0), Pmode,
3169 GEN_INT (GET_MODE_SIZE (mode)),
3170 TYPE_MODE (sizetype));
3171 else
3172 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3173 target, Pmode,
3174 GEN_INT (GET_MODE_SIZE (mode)),
3175 TYPE_MODE (sizetype),
3176 GEN_INT (MEMORY_USE_RW),
3177 TYPE_MODE (integer_type_node));
3178 in_check_memory_usage = 0;
3182 ret:
3183 /* If part should go in registers, copy that part
3184 into the appropriate registers. Do this now, at the end,
3185 since mem-to-mem copies above may do function calls. */
3186 if (partial > 0 && reg != 0)
3188 /* Handle calls that pass values in multiple non-contiguous locations.
3189 The Irix 6 ABI has examples of this. */
3190 if (GET_CODE (reg) == PARALLEL)
3191 emit_group_load (reg, x, -1, align); /* ??? size? */
3192 else
3193 move_block_to_reg (REGNO (reg), x, partial, mode);
3196 if (extra && args_addr == 0 && where_pad == stack_direction)
3197 anti_adjust_stack (GEN_INT (extra));
3200 /* Expand an assignment that stores the value of FROM into TO.
3201 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3202 (This may contain a QUEUED rtx;
3203 if the value is constant, this rtx is a constant.)
3204 Otherwise, the returned value is NULL_RTX.
3206 SUGGEST_REG is no longer actually used.
3207 It used to mean, copy the value through a register
3208 and return that register, if that is possible.
3209 We now use WANT_VALUE to decide whether to do this. */
3212 expand_assignment (to, from, want_value, suggest_reg)
3213 tree to, from;
3214 int want_value;
3215 int suggest_reg;
3217 register rtx to_rtx = 0;
3218 rtx result;
3220 /* Don't crash if the lhs of the assignment was erroneous. */
3222 if (TREE_CODE (to) == ERROR_MARK)
3224 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3225 return want_value ? result : NULL_RTX;
3228 /* Assignment of a structure component needs special treatment
3229 if the structure component's rtx is not simply a MEM.
3230 Assignment of an array element at a constant index, and assignment of
3231 an array element in an unaligned packed structure field, has the same
3232 problem. */
3234 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3235 || TREE_CODE (to) == ARRAY_REF)
3237 enum machine_mode mode1;
3238 int bitsize;
3239 int bitpos;
3240 tree offset;
3241 int unsignedp;
3242 int volatilep = 0;
3243 tree tem;
3244 int alignment;
3246 push_temp_slots ();
3247 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3248 &unsignedp, &volatilep, &alignment);
3250 /* If we are going to use store_bit_field and extract_bit_field,
3251 make sure to_rtx will be safe for multiple use. */
3253 if (mode1 == VOIDmode && want_value)
3254 tem = stabilize_reference (tem);
3256 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3257 if (offset != 0)
3259 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3261 if (GET_CODE (to_rtx) != MEM)
3262 abort ();
3264 if (GET_MODE (offset_rtx) != ptr_mode)
3266 #ifdef POINTERS_EXTEND_UNSIGNED
3267 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3268 #else
3269 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3270 #endif
3273 /* A constant address in TO_RTX can have VOIDmode, we must not try
3274 to call force_reg for that case. Avoid that case. */
3275 if (GET_CODE (to_rtx) == MEM
3276 && GET_MODE (to_rtx) == BLKmode
3277 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3278 && bitsize
3279 && (bitpos % bitsize) == 0
3280 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3281 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3283 rtx temp = change_address (to_rtx, mode1,
3284 plus_constant (XEXP (to_rtx, 0),
3285 (bitpos /
3286 BITS_PER_UNIT)));
3287 if (GET_CODE (XEXP (temp, 0)) == REG)
3288 to_rtx = temp;
3289 else
3290 to_rtx = change_address (to_rtx, mode1,
3291 force_reg (GET_MODE (XEXP (temp, 0)),
3292 XEXP (temp, 0)));
3293 bitpos = 0;
3296 to_rtx = change_address (to_rtx, VOIDmode,
3297 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3298 force_reg (ptr_mode, offset_rtx)));
3300 if (volatilep)
3302 if (GET_CODE (to_rtx) == MEM)
3304 /* When the offset is zero, to_rtx is the address of the
3305 structure we are storing into, and hence may be shared.
3306 We must make a new MEM before setting the volatile bit. */
3307 if (offset == 0)
3308 to_rtx = copy_rtx (to_rtx);
3310 MEM_VOLATILE_P (to_rtx) = 1;
3312 #if 0 /* This was turned off because, when a field is volatile
3313 in an object which is not volatile, the object may be in a register,
3314 and then we would abort over here. */
3315 else
3316 abort ();
3317 #endif
3320 if (TREE_CODE (to) == COMPONENT_REF
3321 && TREE_READONLY (TREE_OPERAND (to, 1)))
3323 if (offset == 0)
3324 to_rtx = copy_rtx (to_rtx);
3326 RTX_UNCHANGING_P (to_rtx) = 1;
3329 /* Check the access. */
3330 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3332 rtx to_addr;
3333 int size;
3334 int best_mode_size;
3335 enum machine_mode best_mode;
3337 best_mode = get_best_mode (bitsize, bitpos,
3338 TYPE_ALIGN (TREE_TYPE (tem)),
3339 mode1, volatilep);
3340 if (best_mode == VOIDmode)
3341 best_mode = QImode;
3343 best_mode_size = GET_MODE_BITSIZE (best_mode);
3344 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3345 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3346 size *= GET_MODE_SIZE (best_mode);
3348 /* Check the access right of the pointer. */
3349 if (size)
3350 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3351 to_addr, Pmode,
3352 GEN_INT (size), TYPE_MODE (sizetype),
3353 GEN_INT (MEMORY_USE_WO),
3354 TYPE_MODE (integer_type_node));
3357 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3358 (want_value
3359 /* Spurious cast makes HPUX compiler happy. */
3360 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3361 : VOIDmode),
3362 unsignedp,
3363 /* Required alignment of containing datum. */
3364 alignment,
3365 int_size_in_bytes (TREE_TYPE (tem)),
3366 get_alias_set (to));
3367 preserve_temp_slots (result);
3368 free_temp_slots ();
3369 pop_temp_slots ();
3371 /* If the value is meaningful, convert RESULT to the proper mode.
3372 Otherwise, return nothing. */
3373 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3374 TYPE_MODE (TREE_TYPE (from)),
3375 result,
3376 TREE_UNSIGNED (TREE_TYPE (to)))
3377 : NULL_RTX);
3380 /* If the rhs is a function call and its value is not an aggregate,
3381 call the function before we start to compute the lhs.
3382 This is needed for correct code for cases such as
3383 val = setjmp (buf) on machines where reference to val
3384 requires loading up part of an address in a separate insn.
3386 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3387 a promoted variable where the zero- or sign- extension needs to be done.
3388 Handling this in the normal way is safe because no computation is done
3389 before the call. */
3390 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3391 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3392 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3394 rtx value;
3396 push_temp_slots ();
3397 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3398 if (to_rtx == 0)
3399 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3401 /* Handle calls that return values in multiple non-contiguous locations.
3402 The Irix 6 ABI has examples of this. */
3403 if (GET_CODE (to_rtx) == PARALLEL)
3404 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3405 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3406 else if (GET_MODE (to_rtx) == BLKmode)
3407 emit_block_move (to_rtx, value, expr_size (from),
3408 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3409 else
3411 #ifdef POINTERS_EXTEND_UNSIGNED
3412 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3413 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3414 value = convert_memory_address (GET_MODE (to_rtx), value);
3415 #endif
3416 emit_move_insn (to_rtx, value);
3418 preserve_temp_slots (to_rtx);
3419 free_temp_slots ();
3420 pop_temp_slots ();
3421 return want_value ? to_rtx : NULL_RTX;
3424 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3425 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3427 if (to_rtx == 0)
3429 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3430 if (GET_CODE (to_rtx) == MEM)
3431 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3434 /* Don't move directly into a return register. */
3435 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3437 rtx temp;
3439 push_temp_slots ();
3440 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3441 emit_move_insn (to_rtx, temp);
3442 preserve_temp_slots (to_rtx);
3443 free_temp_slots ();
3444 pop_temp_slots ();
3445 return want_value ? to_rtx : NULL_RTX;
3448 /* In case we are returning the contents of an object which overlaps
3449 the place the value is being stored, use a safe function when copying
3450 a value through a pointer into a structure value return block. */
3451 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3452 && current_function_returns_struct
3453 && !current_function_returns_pcc_struct)
3455 rtx from_rtx, size;
3457 push_temp_slots ();
3458 size = expr_size (from);
3459 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3460 EXPAND_MEMORY_USE_DONT);
3462 /* Copy the rights of the bitmap. */
3463 if (current_function_check_memory_usage)
3464 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3465 XEXP (to_rtx, 0), Pmode,
3466 XEXP (from_rtx, 0), Pmode,
3467 convert_to_mode (TYPE_MODE (sizetype),
3468 size, TREE_UNSIGNED (sizetype)),
3469 TYPE_MODE (sizetype));
3471 #ifdef TARGET_MEM_FUNCTIONS
3472 emit_library_call (memcpy_libfunc, 0,
3473 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3474 XEXP (from_rtx, 0), Pmode,
3475 convert_to_mode (TYPE_MODE (sizetype),
3476 size, TREE_UNSIGNED (sizetype)),
3477 TYPE_MODE (sizetype));
3478 #else
3479 emit_library_call (bcopy_libfunc, 0,
3480 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3481 XEXP (to_rtx, 0), Pmode,
3482 convert_to_mode (TYPE_MODE (integer_type_node),
3483 size, TREE_UNSIGNED (integer_type_node)),
3484 TYPE_MODE (integer_type_node));
3485 #endif
3487 preserve_temp_slots (to_rtx);
3488 free_temp_slots ();
3489 pop_temp_slots ();
3490 return want_value ? to_rtx : NULL_RTX;
3493 /* Compute FROM and store the value in the rtx we got. */
3495 push_temp_slots ();
3496 result = store_expr (from, to_rtx, want_value);
3497 preserve_temp_slots (result);
3498 free_temp_slots ();
3499 pop_temp_slots ();
3500 return want_value ? result : NULL_RTX;
3503 /* Generate code for computing expression EXP,
3504 and storing the value into TARGET.
3505 TARGET may contain a QUEUED rtx.
3507 If WANT_VALUE is nonzero, return a copy of the value
3508 not in TARGET, so that we can be sure to use the proper
3509 value in a containing expression even if TARGET has something
3510 else stored in it. If possible, we copy the value through a pseudo
3511 and return that pseudo. Or, if the value is constant, we try to
3512 return the constant. In some cases, we return a pseudo
3513 copied *from* TARGET.
3515 If the mode is BLKmode then we may return TARGET itself.
3516 It turns out that in BLKmode it doesn't cause a problem.
3517 because C has no operators that could combine two different
3518 assignments into the same BLKmode object with different values
3519 with no sequence point. Will other languages need this to
3520 be more thorough?
3522 If WANT_VALUE is 0, we return NULL, to make sure
3523 to catch quickly any cases where the caller uses the value
3524 and fails to set WANT_VALUE. */
3527 store_expr (exp, target, want_value)
3528 register tree exp;
3529 register rtx target;
3530 int want_value;
3532 register rtx temp;
3533 int dont_return_target = 0;
3535 if (TREE_CODE (exp) == COMPOUND_EXPR)
3537 /* Perform first part of compound expression, then assign from second
3538 part. */
3539 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3540 emit_queue ();
3541 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3543 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3545 /* For conditional expression, get safe form of the target. Then
3546 test the condition, doing the appropriate assignment on either
3547 side. This avoids the creation of unnecessary temporaries.
3548 For non-BLKmode, it is more efficient not to do this. */
3550 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3552 emit_queue ();
3553 target = protect_from_queue (target, 1);
3555 do_pending_stack_adjust ();
3556 NO_DEFER_POP;
3557 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3558 start_cleanup_deferral ();
3559 store_expr (TREE_OPERAND (exp, 1), target, 0);
3560 end_cleanup_deferral ();
3561 emit_queue ();
3562 emit_jump_insn (gen_jump (lab2));
3563 emit_barrier ();
3564 emit_label (lab1);
3565 start_cleanup_deferral ();
3566 store_expr (TREE_OPERAND (exp, 2), target, 0);
3567 end_cleanup_deferral ();
3568 emit_queue ();
3569 emit_label (lab2);
3570 OK_DEFER_POP;
3572 return want_value ? target : NULL_RTX;
3574 else if (queued_subexp_p (target))
3575 /* If target contains a postincrement, let's not risk
3576 using it as the place to generate the rhs. */
3578 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3580 /* Expand EXP into a new pseudo. */
3581 temp = gen_reg_rtx (GET_MODE (target));
3582 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3584 else
3585 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3587 /* If target is volatile, ANSI requires accessing the value
3588 *from* the target, if it is accessed. So make that happen.
3589 In no case return the target itself. */
3590 if (! MEM_VOLATILE_P (target) && want_value)
3591 dont_return_target = 1;
3593 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3594 && GET_MODE (target) != BLKmode)
3595 /* If target is in memory and caller wants value in a register instead,
3596 arrange that. Pass TARGET as target for expand_expr so that,
3597 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3598 We know expand_expr will not use the target in that case.
3599 Don't do this if TARGET is volatile because we are supposed
3600 to write it and then read it. */
3602 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3603 GET_MODE (target), 0);
3604 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3605 temp = copy_to_reg (temp);
3606 dont_return_target = 1;
3608 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3609 /* If this is an scalar in a register that is stored in a wider mode
3610 than the declared mode, compute the result into its declared mode
3611 and then convert to the wider mode. Our value is the computed
3612 expression. */
3614 /* If we don't want a value, we can do the conversion inside EXP,
3615 which will often result in some optimizations. Do the conversion
3616 in two steps: first change the signedness, if needed, then
3617 the extend. But don't do this if the type of EXP is a subtype
3618 of something else since then the conversion might involve
3619 more than just converting modes. */
3620 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3621 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3623 if (TREE_UNSIGNED (TREE_TYPE (exp))
3624 != SUBREG_PROMOTED_UNSIGNED_P (target))
3626 = convert
3627 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3628 TREE_TYPE (exp)),
3629 exp);
3631 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3632 SUBREG_PROMOTED_UNSIGNED_P (target)),
3633 exp);
3636 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3638 /* If TEMP is a volatile MEM and we want a result value, make
3639 the access now so it gets done only once. Likewise if
3640 it contains TARGET. */
3641 if (GET_CODE (temp) == MEM && want_value
3642 && (MEM_VOLATILE_P (temp)
3643 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3644 temp = copy_to_reg (temp);
3646 /* If TEMP is a VOIDmode constant, use convert_modes to make
3647 sure that we properly convert it. */
3648 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3649 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3650 TYPE_MODE (TREE_TYPE (exp)), temp,
3651 SUBREG_PROMOTED_UNSIGNED_P (target));
3653 convert_move (SUBREG_REG (target), temp,
3654 SUBREG_PROMOTED_UNSIGNED_P (target));
3655 return want_value ? temp : NULL_RTX;
3657 else
3659 temp = expand_expr (exp, target, GET_MODE (target), 0);
3660 /* Return TARGET if it's a specified hardware register.
3661 If TARGET is a volatile mem ref, either return TARGET
3662 or return a reg copied *from* TARGET; ANSI requires this.
3664 Otherwise, if TEMP is not TARGET, return TEMP
3665 if it is constant (for efficiency),
3666 or if we really want the correct value. */
3667 if (!(target && GET_CODE (target) == REG
3668 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3669 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3670 && ! rtx_equal_p (temp, target)
3671 && (CONSTANT_P (temp) || want_value))
3672 dont_return_target = 1;
3675 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3676 the same as that of TARGET, adjust the constant. This is needed, for
3677 example, in case it is a CONST_DOUBLE and we want only a word-sized
3678 value. */
3679 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3680 && TREE_CODE (exp) != ERROR_MARK
3681 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3682 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3683 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3685 if (current_function_check_memory_usage
3686 && GET_CODE (target) == MEM
3687 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3689 if (GET_CODE (temp) == MEM)
3690 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3691 XEXP (target, 0), Pmode,
3692 XEXP (temp, 0), Pmode,
3693 expr_size (exp), TYPE_MODE (sizetype));
3694 else
3695 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3696 XEXP (target, 0), Pmode,
3697 expr_size (exp), TYPE_MODE (sizetype),
3698 GEN_INT (MEMORY_USE_WO),
3699 TYPE_MODE (integer_type_node));
3702 /* If value was not generated in the target, store it there.
3703 Convert the value to TARGET's type first if nec. */
3704 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3705 one or both of them are volatile memory refs, we have to distinguish
3706 two cases:
3707 - expand_expr has used TARGET. In this case, we must not generate
3708 another copy. This can be detected by TARGET being equal according
3709 to == .
3710 - expand_expr has not used TARGET - that means that the source just
3711 happens to have the same RTX form. Since temp will have been created
3712 by expand_expr, it will compare unequal according to == .
3713 We must generate a copy in this case, to reach the correct number
3714 of volatile memory references. */
3716 if ((! rtx_equal_p (temp, target)
3717 || (temp != target && (side_effects_p (temp)
3718 || side_effects_p (target))))
3719 && TREE_CODE (exp) != ERROR_MARK)
3721 target = protect_from_queue (target, 1);
3722 if (GET_MODE (temp) != GET_MODE (target)
3723 && GET_MODE (temp) != VOIDmode)
3725 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3726 if (dont_return_target)
3728 /* In this case, we will return TEMP,
3729 so make sure it has the proper mode.
3730 But don't forget to store the value into TARGET. */
3731 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3732 emit_move_insn (target, temp);
3734 else
3735 convert_move (target, temp, unsignedp);
3738 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3740 /* Handle copying a string constant into an array.
3741 The string constant may be shorter than the array.
3742 So copy just the string's actual length, and clear the rest. */
3743 rtx size;
3744 rtx addr;
3746 /* Get the size of the data type of the string,
3747 which is actually the size of the target. */
3748 size = expr_size (exp);
3749 if (GET_CODE (size) == CONST_INT
3750 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3751 emit_block_move (target, temp, size,
3752 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3753 else
3755 /* Compute the size of the data to copy from the string. */
3756 tree copy_size
3757 = size_binop (MIN_EXPR,
3758 make_tree (sizetype, size),
3759 convert (sizetype,
3760 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3761 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3762 VOIDmode, 0);
3763 rtx label = 0;
3765 /* Copy that much. */
3766 emit_block_move (target, temp, copy_size_rtx,
3767 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3769 /* Figure out how much is left in TARGET that we have to clear.
3770 Do all calculations in ptr_mode. */
3772 addr = XEXP (target, 0);
3773 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3775 if (GET_CODE (copy_size_rtx) == CONST_INT)
3777 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3778 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3780 else
3782 addr = force_reg (ptr_mode, addr);
3783 addr = expand_binop (ptr_mode, add_optab, addr,
3784 copy_size_rtx, NULL_RTX, 0,
3785 OPTAB_LIB_WIDEN);
3787 size = expand_binop (ptr_mode, sub_optab, size,
3788 copy_size_rtx, NULL_RTX, 0,
3789 OPTAB_LIB_WIDEN);
3791 label = gen_label_rtx ();
3792 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3793 GET_MODE (size), 0, 0, label);
3796 if (size != const0_rtx)
3798 /* Be sure we can write on ADDR. */
3799 if (current_function_check_memory_usage)
3800 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3801 addr, Pmode,
3802 size, TYPE_MODE (sizetype),
3803 GEN_INT (MEMORY_USE_WO),
3804 TYPE_MODE (integer_type_node));
3805 #ifdef TARGET_MEM_FUNCTIONS
3806 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3807 addr, ptr_mode,
3808 const0_rtx, TYPE_MODE (integer_type_node),
3809 convert_to_mode (TYPE_MODE (sizetype),
3810 size,
3811 TREE_UNSIGNED (sizetype)),
3812 TYPE_MODE (sizetype));
3813 #else
3814 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3815 addr, ptr_mode,
3816 convert_to_mode (TYPE_MODE (integer_type_node),
3817 size,
3818 TREE_UNSIGNED (integer_type_node)),
3819 TYPE_MODE (integer_type_node));
3820 #endif
3823 if (label)
3824 emit_label (label);
3827 /* Handle calls that return values in multiple non-contiguous locations.
3828 The Irix 6 ABI has examples of this. */
3829 else if (GET_CODE (target) == PARALLEL)
3830 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3831 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3832 else if (GET_MODE (temp) == BLKmode)
3833 emit_block_move (target, temp, expr_size (exp),
3834 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3835 else
3836 emit_move_insn (target, temp);
3839 /* If we don't want a value, return NULL_RTX. */
3840 if (! want_value)
3841 return NULL_RTX;
3843 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3844 ??? The latter test doesn't seem to make sense. */
3845 else if (dont_return_target && GET_CODE (temp) != MEM)
3846 return temp;
3848 /* Return TARGET itself if it is a hard register. */
3849 else if (want_value && GET_MODE (target) != BLKmode
3850 && ! (GET_CODE (target) == REG
3851 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3852 return copy_to_reg (target);
3854 else
3855 return target;
3858 /* Return 1 if EXP just contains zeros. */
3860 static int
3861 is_zeros_p (exp)
3862 tree exp;
3864 tree elt;
3866 switch (TREE_CODE (exp))
3868 case CONVERT_EXPR:
3869 case NOP_EXPR:
3870 case NON_LVALUE_EXPR:
3871 return is_zeros_p (TREE_OPERAND (exp, 0));
3873 case INTEGER_CST:
3874 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3876 case COMPLEX_CST:
3877 return
3878 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3880 case REAL_CST:
3881 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3883 case CONSTRUCTOR:
3884 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3885 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3886 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3887 if (! is_zeros_p (TREE_VALUE (elt)))
3888 return 0;
3890 return 1;
3892 default:
3893 return 0;
3897 /* Return 1 if EXP contains mostly (3/4) zeros. */
3899 static int
3900 mostly_zeros_p (exp)
3901 tree exp;
3903 if (TREE_CODE (exp) == CONSTRUCTOR)
3905 int elts = 0, zeros = 0;
3906 tree elt = CONSTRUCTOR_ELTS (exp);
3907 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3909 /* If there are no ranges of true bits, it is all zero. */
3910 return elt == NULL_TREE;
3912 for (; elt; elt = TREE_CHAIN (elt))
3914 /* We do not handle the case where the index is a RANGE_EXPR,
3915 so the statistic will be somewhat inaccurate.
3916 We do make a more accurate count in store_constructor itself,
3917 so since this function is only used for nested array elements,
3918 this should be close enough. */
3919 if (mostly_zeros_p (TREE_VALUE (elt)))
3920 zeros++;
3921 elts++;
3924 return 4 * zeros >= 3 * elts;
3927 return is_zeros_p (exp);
3930 /* Helper function for store_constructor.
3931 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3932 TYPE is the type of the CONSTRUCTOR, not the element type.
3933 CLEARED is as for store_constructor.
3935 This provides a recursive shortcut back to store_constructor when it isn't
3936 necessary to go through store_field. This is so that we can pass through
3937 the cleared field to let store_constructor know that we may not have to
3938 clear a substructure if the outer structure has already been cleared. */
3940 static void
3941 store_constructor_field (target, bitsize, bitpos,
3942 mode, exp, type, cleared)
3943 rtx target;
3944 int bitsize, bitpos;
3945 enum machine_mode mode;
3946 tree exp, type;
3947 int cleared;
3949 if (TREE_CODE (exp) == CONSTRUCTOR
3950 && bitpos % BITS_PER_UNIT == 0
3951 /* If we have a non-zero bitpos for a register target, then we just
3952 let store_field do the bitfield handling. This is unlikely to
3953 generate unnecessary clear instructions anyways. */
3954 && (bitpos == 0 || GET_CODE (target) == MEM))
3956 if (bitpos != 0)
3957 target = change_address (target, VOIDmode,
3958 plus_constant (XEXP (target, 0),
3959 bitpos / BITS_PER_UNIT));
3960 store_constructor (exp, target, cleared);
3962 else
3963 store_field (target, bitsize, bitpos, mode, exp,
3964 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3965 int_size_in_bytes (type), 0);
3968 /* Store the value of constructor EXP into the rtx TARGET.
3969 TARGET is either a REG or a MEM.
3970 CLEARED is true if TARGET is known to have been zero'd. */
3972 static void
3973 store_constructor (exp, target, cleared)
3974 tree exp;
3975 rtx target;
3976 int cleared;
3978 tree type = TREE_TYPE (exp);
3979 rtx exp_size = expr_size (exp);
3981 /* We know our target cannot conflict, since safe_from_p has been called. */
3982 #if 0
3983 /* Don't try copying piece by piece into a hard register
3984 since that is vulnerable to being clobbered by EXP.
3985 Instead, construct in a pseudo register and then copy it all. */
3986 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3988 rtx temp = gen_reg_rtx (GET_MODE (target));
3989 store_constructor (exp, temp, 0);
3990 emit_move_insn (target, temp);
3991 return;
3993 #endif
3995 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3996 || TREE_CODE (type) == QUAL_UNION_TYPE)
3998 register tree elt;
4000 /* Inform later passes that the whole union value is dead. */
4001 if (TREE_CODE (type) == UNION_TYPE
4002 || TREE_CODE (type) == QUAL_UNION_TYPE)
4003 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4005 /* If we are building a static constructor into a register,
4006 set the initial value as zero so we can fold the value into
4007 a constant. But if more than one register is involved,
4008 this probably loses. */
4009 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4010 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4012 if (! cleared)
4013 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4015 cleared = 1;
4018 /* If the constructor has fewer fields than the structure
4019 or if we are initializing the structure to mostly zeros,
4020 clear the whole structure first. */
4021 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4022 != list_length (TYPE_FIELDS (type)))
4023 || mostly_zeros_p (exp))
4025 if (! cleared)
4026 clear_storage (target, expr_size (exp),
4027 TYPE_ALIGN (type) / BITS_PER_UNIT);
4029 cleared = 1;
4031 else
4032 /* Inform later passes that the old value is dead. */
4033 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4035 /* Store each element of the constructor into
4036 the corresponding field of TARGET. */
4038 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4040 register tree field = TREE_PURPOSE (elt);
4041 tree value = TREE_VALUE (elt);
4042 register enum machine_mode mode;
4043 int bitsize;
4044 int bitpos = 0;
4045 int unsignedp;
4046 tree pos, constant = 0, offset = 0;
4047 rtx to_rtx = target;
4049 /* Just ignore missing fields.
4050 We cleared the whole structure, above,
4051 if any fields are missing. */
4052 if (field == 0)
4053 continue;
4055 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4056 continue;
4058 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4059 unsignedp = TREE_UNSIGNED (field);
4060 mode = DECL_MODE (field);
4061 if (DECL_BIT_FIELD (field))
4062 mode = VOIDmode;
4064 pos = DECL_FIELD_BITPOS (field);
4065 if (TREE_CODE (pos) == INTEGER_CST)
4066 constant = pos;
4067 else if (TREE_CODE (pos) == PLUS_EXPR
4068 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4069 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4070 else
4071 offset = pos;
4073 if (constant)
4074 bitpos = TREE_INT_CST_LOW (constant);
4076 if (offset)
4078 rtx offset_rtx;
4080 if (contains_placeholder_p (offset))
4081 offset = build (WITH_RECORD_EXPR, sizetype,
4082 offset, make_tree (TREE_TYPE (exp), target));
4084 offset = size_binop (FLOOR_DIV_EXPR, offset,
4085 size_int (BITS_PER_UNIT));
4087 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4088 if (GET_CODE (to_rtx) != MEM)
4089 abort ();
4091 if (GET_MODE (offset_rtx) != ptr_mode)
4093 #ifdef POINTERS_EXTEND_UNSIGNED
4094 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4095 #else
4096 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4097 #endif
4100 to_rtx
4101 = change_address (to_rtx, VOIDmode,
4102 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4103 force_reg (ptr_mode, offset_rtx)));
4105 if (TREE_READONLY (field))
4107 if (GET_CODE (to_rtx) == MEM)
4108 to_rtx = copy_rtx (to_rtx);
4110 RTX_UNCHANGING_P (to_rtx) = 1;
4113 #ifdef WORD_REGISTER_OPERATIONS
4114 /* If this initializes a field that is smaller than a word, at the
4115 start of a word, try to widen it to a full word.
4116 This special case allows us to output C++ member function
4117 initializations in a form that the optimizers can understand. */
4118 if (constant
4119 && GET_CODE (target) == REG
4120 && bitsize < BITS_PER_WORD
4121 && bitpos % BITS_PER_WORD == 0
4122 && GET_MODE_CLASS (mode) == MODE_INT
4123 && TREE_CODE (value) == INTEGER_CST
4124 && GET_CODE (exp_size) == CONST_INT
4125 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4127 tree type = TREE_TYPE (value);
4128 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4130 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4131 value = convert (type, value);
4133 if (BYTES_BIG_ENDIAN)
4134 value
4135 = fold (build (LSHIFT_EXPR, type, value,
4136 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4137 bitsize = BITS_PER_WORD;
4138 mode = word_mode;
4140 #endif
4141 store_constructor_field (to_rtx, bitsize, bitpos,
4142 mode, value, type, cleared);
4145 else if (TREE_CODE (type) == ARRAY_TYPE)
4147 register tree elt;
4148 register int i;
4149 int need_to_clear;
4150 tree domain = TYPE_DOMAIN (type);
4151 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4152 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4153 tree elttype = TREE_TYPE (type);
4155 /* If the constructor has fewer elements than the array,
4156 clear the whole array first. Similarly if this is
4157 static constructor of a non-BLKmode object. */
4158 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4159 need_to_clear = 1;
4160 else
4162 HOST_WIDE_INT count = 0, zero_count = 0;
4163 need_to_clear = 0;
4164 /* This loop is a more accurate version of the loop in
4165 mostly_zeros_p (it handles RANGE_EXPR in an index).
4166 It is also needed to check for missing elements. */
4167 for (elt = CONSTRUCTOR_ELTS (exp);
4168 elt != NULL_TREE;
4169 elt = TREE_CHAIN (elt))
4171 tree index = TREE_PURPOSE (elt);
4172 HOST_WIDE_INT this_node_count;
4173 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4175 tree lo_index = TREE_OPERAND (index, 0);
4176 tree hi_index = TREE_OPERAND (index, 1);
4177 if (TREE_CODE (lo_index) != INTEGER_CST
4178 || TREE_CODE (hi_index) != INTEGER_CST)
4180 need_to_clear = 1;
4181 break;
4183 this_node_count = TREE_INT_CST_LOW (hi_index)
4184 - TREE_INT_CST_LOW (lo_index) + 1;
4186 else
4187 this_node_count = 1;
4188 count += this_node_count;
4189 if (mostly_zeros_p (TREE_VALUE (elt)))
4190 zero_count += this_node_count;
4192 /* Clear the entire array first if there are any missing elements,
4193 or if the incidence of zero elements is >= 75%. */
4194 if (count < maxelt - minelt + 1
4195 || 4 * zero_count >= 3 * count)
4196 need_to_clear = 1;
4198 if (need_to_clear)
4200 if (! cleared)
4201 clear_storage (target, expr_size (exp),
4202 TYPE_ALIGN (type) / BITS_PER_UNIT);
4203 cleared = 1;
4205 else
4206 /* Inform later passes that the old value is dead. */
4207 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4209 /* Store each element of the constructor into
4210 the corresponding element of TARGET, determined
4211 by counting the elements. */
4212 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4213 elt;
4214 elt = TREE_CHAIN (elt), i++)
4216 register enum machine_mode mode;
4217 int bitsize;
4218 int bitpos;
4219 int unsignedp;
4220 tree value = TREE_VALUE (elt);
4221 tree index = TREE_PURPOSE (elt);
4222 rtx xtarget = target;
4224 if (cleared && is_zeros_p (value))
4225 continue;
4227 mode = TYPE_MODE (elttype);
4228 bitsize = GET_MODE_BITSIZE (mode);
4229 unsignedp = TREE_UNSIGNED (elttype);
4231 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4233 tree lo_index = TREE_OPERAND (index, 0);
4234 tree hi_index = TREE_OPERAND (index, 1);
4235 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4236 struct nesting *loop;
4237 HOST_WIDE_INT lo, hi, count;
4238 tree position;
4240 /* If the range is constant and "small", unroll the loop. */
4241 if (TREE_CODE (lo_index) == INTEGER_CST
4242 && TREE_CODE (hi_index) == INTEGER_CST
4243 && (lo = TREE_INT_CST_LOW (lo_index),
4244 hi = TREE_INT_CST_LOW (hi_index),
4245 count = hi - lo + 1,
4246 (GET_CODE (target) != MEM
4247 || count <= 2
4248 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4249 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4250 <= 40 * 8))))
4252 lo -= minelt; hi -= minelt;
4253 for (; lo <= hi; lo++)
4255 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4256 store_constructor_field (target, bitsize, bitpos,
4257 mode, value, type, cleared);
4260 else
4262 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4263 loop_top = gen_label_rtx ();
4264 loop_end = gen_label_rtx ();
4266 unsignedp = TREE_UNSIGNED (domain);
4268 index = build_decl (VAR_DECL, NULL_TREE, domain);
4270 DECL_RTL (index) = index_r
4271 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4272 &unsignedp, 0));
4274 if (TREE_CODE (value) == SAVE_EXPR
4275 && SAVE_EXPR_RTL (value) == 0)
4277 /* Make sure value gets expanded once before the
4278 loop. */
4279 expand_expr (value, const0_rtx, VOIDmode, 0);
4280 emit_queue ();
4282 store_expr (lo_index, index_r, 0);
4283 loop = expand_start_loop (0);
4285 /* Assign value to element index. */
4286 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4287 size_int (BITS_PER_UNIT));
4288 position = size_binop (MULT_EXPR,
4289 size_binop (MINUS_EXPR, index,
4290 TYPE_MIN_VALUE (domain)),
4291 position);
4292 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4293 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4294 xtarget = change_address (target, mode, addr);
4295 if (TREE_CODE (value) == CONSTRUCTOR)
4296 store_constructor (value, xtarget, cleared);
4297 else
4298 store_expr (value, xtarget, 0);
4300 expand_exit_loop_if_false (loop,
4301 build (LT_EXPR, integer_type_node,
4302 index, hi_index));
4304 expand_increment (build (PREINCREMENT_EXPR,
4305 TREE_TYPE (index),
4306 index, integer_one_node), 0, 0);
4307 expand_end_loop ();
4308 emit_label (loop_end);
4310 /* Needed by stupid register allocation. to extend the
4311 lifetime of pseudo-regs used by target past the end
4312 of the loop. */
4313 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4316 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4317 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4319 rtx pos_rtx, addr;
4320 tree position;
4322 if (index == 0)
4323 index = size_int (i);
4325 if (minelt)
4326 index = size_binop (MINUS_EXPR, index,
4327 TYPE_MIN_VALUE (domain));
4328 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4329 size_int (BITS_PER_UNIT));
4330 position = size_binop (MULT_EXPR, index, position);
4331 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4332 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4333 xtarget = change_address (target, mode, addr);
4334 store_expr (value, xtarget, 0);
4336 else
4338 if (index != 0)
4339 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4340 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4341 else
4342 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4343 store_constructor_field (target, bitsize, bitpos,
4344 mode, value, type, cleared);
4348 /* set constructor assignments */
4349 else if (TREE_CODE (type) == SET_TYPE)
4351 tree elt = CONSTRUCTOR_ELTS (exp);
4352 int nbytes = int_size_in_bytes (type), nbits;
4353 tree domain = TYPE_DOMAIN (type);
4354 tree domain_min, domain_max, bitlength;
4356 /* The default implementation strategy is to extract the constant
4357 parts of the constructor, use that to initialize the target,
4358 and then "or" in whatever non-constant ranges we need in addition.
4360 If a large set is all zero or all ones, it is
4361 probably better to set it using memset (if available) or bzero.
4362 Also, if a large set has just a single range, it may also be
4363 better to first clear all the first clear the set (using
4364 bzero/memset), and set the bits we want. */
4366 /* Check for all zeros. */
4367 if (elt == NULL_TREE)
4369 if (!cleared)
4370 clear_storage (target, expr_size (exp),
4371 TYPE_ALIGN (type) / BITS_PER_UNIT);
4372 return;
4375 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4376 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4377 bitlength = size_binop (PLUS_EXPR,
4378 size_binop (MINUS_EXPR, domain_max, domain_min),
4379 size_one_node);
4381 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4382 abort ();
4383 nbits = TREE_INT_CST_LOW (bitlength);
4385 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4386 are "complicated" (more than one range), initialize (the
4387 constant parts) by copying from a constant. */
4388 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4389 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4391 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4392 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4393 char *bit_buffer = (char *) alloca (nbits);
4394 HOST_WIDE_INT word = 0;
4395 int bit_pos = 0;
4396 int ibit = 0;
4397 int offset = 0; /* In bytes from beginning of set. */
4398 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4399 for (;;)
4401 if (bit_buffer[ibit])
4403 if (BYTES_BIG_ENDIAN)
4404 word |= (1 << (set_word_size - 1 - bit_pos));
4405 else
4406 word |= 1 << bit_pos;
4408 bit_pos++; ibit++;
4409 if (bit_pos >= set_word_size || ibit == nbits)
4411 if (word != 0 || ! cleared)
4413 rtx datum = GEN_INT (word);
4414 rtx to_rtx;
4415 /* The assumption here is that it is safe to use
4416 XEXP if the set is multi-word, but not if
4417 it's single-word. */
4418 if (GET_CODE (target) == MEM)
4420 to_rtx = plus_constant (XEXP (target, 0), offset);
4421 to_rtx = change_address (target, mode, to_rtx);
4423 else if (offset == 0)
4424 to_rtx = target;
4425 else
4426 abort ();
4427 emit_move_insn (to_rtx, datum);
4429 if (ibit == nbits)
4430 break;
4431 word = 0;
4432 bit_pos = 0;
4433 offset += set_word_size / BITS_PER_UNIT;
4437 else if (!cleared)
4439 /* Don't bother clearing storage if the set is all ones. */
4440 if (TREE_CHAIN (elt) != NULL_TREE
4441 || (TREE_PURPOSE (elt) == NULL_TREE
4442 ? nbits != 1
4443 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4444 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4445 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4446 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4447 != nbits))))
4448 clear_storage (target, expr_size (exp),
4449 TYPE_ALIGN (type) / BITS_PER_UNIT);
4452 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4454 /* start of range of element or NULL */
4455 tree startbit = TREE_PURPOSE (elt);
4456 /* end of range of element, or element value */
4457 tree endbit = TREE_VALUE (elt);
4458 #ifdef TARGET_MEM_FUNCTIONS
4459 HOST_WIDE_INT startb, endb;
4460 #endif
4461 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4463 bitlength_rtx = expand_expr (bitlength,
4464 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4466 /* handle non-range tuple element like [ expr ] */
4467 if (startbit == NULL_TREE)
4469 startbit = save_expr (endbit);
4470 endbit = startbit;
4472 startbit = convert (sizetype, startbit);
4473 endbit = convert (sizetype, endbit);
4474 if (! integer_zerop (domain_min))
4476 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4477 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4479 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4480 EXPAND_CONST_ADDRESS);
4481 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4482 EXPAND_CONST_ADDRESS);
4484 if (REG_P (target))
4486 targetx = assign_stack_temp (GET_MODE (target),
4487 GET_MODE_SIZE (GET_MODE (target)),
4489 emit_move_insn (targetx, target);
4491 else if (GET_CODE (target) == MEM)
4492 targetx = target;
4493 else
4494 abort ();
4496 #ifdef TARGET_MEM_FUNCTIONS
4497 /* Optimization: If startbit and endbit are
4498 constants divisible by BITS_PER_UNIT,
4499 call memset instead. */
4500 if (TREE_CODE (startbit) == INTEGER_CST
4501 && TREE_CODE (endbit) == INTEGER_CST
4502 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4503 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4505 emit_library_call (memset_libfunc, 0,
4506 VOIDmode, 3,
4507 plus_constant (XEXP (targetx, 0),
4508 startb / BITS_PER_UNIT),
4509 Pmode,
4510 constm1_rtx, TYPE_MODE (integer_type_node),
4511 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4512 TYPE_MODE (sizetype));
4514 else
4515 #endif
4517 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4518 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4519 bitlength_rtx, TYPE_MODE (sizetype),
4520 startbit_rtx, TYPE_MODE (sizetype),
4521 endbit_rtx, TYPE_MODE (sizetype));
4523 if (REG_P (target))
4524 emit_move_insn (target, targetx);
4528 else
4529 abort ();
4532 /* Store the value of EXP (an expression tree)
4533 into a subfield of TARGET which has mode MODE and occupies
4534 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4535 If MODE is VOIDmode, it means that we are storing into a bit-field.
4537 If VALUE_MODE is VOIDmode, return nothing in particular.
4538 UNSIGNEDP is not used in this case.
4540 Otherwise, return an rtx for the value stored. This rtx
4541 has mode VALUE_MODE if that is convenient to do.
4542 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4544 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4545 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4547 ALIAS_SET is the alias set for the destination. This value will
4548 (in general) be different from that for TARGET, since TARGET is a
4549 reference to the containing structure. */
4551 static rtx
4552 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4553 unsignedp, align, total_size, alias_set)
4554 rtx target;
4555 int bitsize, bitpos;
4556 enum machine_mode mode;
4557 tree exp;
4558 enum machine_mode value_mode;
4559 int unsignedp;
4560 int align;
4561 int total_size;
4562 int alias_set;
4564 HOST_WIDE_INT width_mask = 0;
4566 if (TREE_CODE (exp) == ERROR_MARK)
4567 return const0_rtx;
4569 if (bitsize < HOST_BITS_PER_WIDE_INT)
4570 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4572 /* If we are storing into an unaligned field of an aligned union that is
4573 in a register, we may have the mode of TARGET being an integer mode but
4574 MODE == BLKmode. In that case, get an aligned object whose size and
4575 alignment are the same as TARGET and store TARGET into it (we can avoid
4576 the store if the field being stored is the entire width of TARGET). Then
4577 call ourselves recursively to store the field into a BLKmode version of
4578 that object. Finally, load from the object into TARGET. This is not
4579 very efficient in general, but should only be slightly more expensive
4580 than the otherwise-required unaligned accesses. Perhaps this can be
4581 cleaned up later. */
4583 if (mode == BLKmode
4584 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4586 rtx object = assign_stack_temp (GET_MODE (target),
4587 GET_MODE_SIZE (GET_MODE (target)), 0);
4588 rtx blk_object = copy_rtx (object);
4590 MEM_SET_IN_STRUCT_P (object, 1);
4591 MEM_SET_IN_STRUCT_P (blk_object, 1);
4592 PUT_MODE (blk_object, BLKmode);
4594 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4595 emit_move_insn (object, target);
4597 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4598 align, total_size, alias_set);
4600 /* Even though we aren't returning target, we need to
4601 give it the updated value. */
4602 emit_move_insn (target, object);
4604 return blk_object;
4607 /* If the structure is in a register or if the component
4608 is a bit field, we cannot use addressing to access it.
4609 Use bit-field techniques or SUBREG to store in it. */
4611 if (mode == VOIDmode
4612 || (mode != BLKmode && ! direct_store[(int) mode]
4613 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4614 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4615 || GET_CODE (target) == REG
4616 || GET_CODE (target) == SUBREG
4617 /* If the field isn't aligned enough to store as an ordinary memref,
4618 store it as a bit field. */
4619 || (SLOW_UNALIGNED_ACCESS
4620 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4621 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4623 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4625 /* If BITSIZE is narrower than the size of the type of EXP
4626 we will be narrowing TEMP. Normally, what's wanted are the
4627 low-order bits. However, if EXP's type is a record and this is
4628 big-endian machine, we want the upper BITSIZE bits. */
4629 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4630 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4631 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4632 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4633 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4634 - bitsize),
4635 temp, 1);
4637 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4638 MODE. */
4639 if (mode != VOIDmode && mode != BLKmode
4640 && mode != TYPE_MODE (TREE_TYPE (exp)))
4641 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4643 /* If the modes of TARGET and TEMP are both BLKmode, both
4644 must be in memory and BITPOS must be aligned on a byte
4645 boundary. If so, we simply do a block copy. */
4646 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4648 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4649 || bitpos % BITS_PER_UNIT != 0)
4650 abort ();
4652 target = change_address (target, VOIDmode,
4653 plus_constant (XEXP (target, 0),
4654 bitpos / BITS_PER_UNIT));
4656 emit_block_move (target, temp,
4657 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4658 / BITS_PER_UNIT),
4661 return value_mode == VOIDmode ? const0_rtx : target;
4664 /* Store the value in the bitfield. */
4665 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4666 if (value_mode != VOIDmode)
4668 /* The caller wants an rtx for the value. */
4669 /* If possible, avoid refetching from the bitfield itself. */
4670 if (width_mask != 0
4671 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4673 tree count;
4674 enum machine_mode tmode;
4676 if (unsignedp)
4677 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4678 tmode = GET_MODE (temp);
4679 if (tmode == VOIDmode)
4680 tmode = value_mode;
4681 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4682 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4683 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4685 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4686 NULL_RTX, value_mode, 0, align,
4687 total_size);
4689 return const0_rtx;
4691 else
4693 rtx addr = XEXP (target, 0);
4694 rtx to_rtx;
4696 /* If a value is wanted, it must be the lhs;
4697 so make the address stable for multiple use. */
4699 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4700 && ! CONSTANT_ADDRESS_P (addr)
4701 /* A frame-pointer reference is already stable. */
4702 && ! (GET_CODE (addr) == PLUS
4703 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4704 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4705 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4706 addr = copy_to_reg (addr);
4708 /* Now build a reference to just the desired component. */
4710 to_rtx = copy_rtx (change_address (target, mode,
4711 plus_constant (addr,
4712 (bitpos
4713 / BITS_PER_UNIT))));
4714 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4715 MEM_ALIAS_SET (to_rtx) = alias_set;
4717 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4721 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4722 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4723 ARRAY_REFs and find the ultimate containing object, which we return.
4725 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4726 bit position, and *PUNSIGNEDP to the signedness of the field.
4727 If the position of the field is variable, we store a tree
4728 giving the variable offset (in units) in *POFFSET.
4729 This offset is in addition to the bit position.
4730 If the position is not variable, we store 0 in *POFFSET.
4731 We set *PALIGNMENT to the alignment in bytes of the address that will be
4732 computed. This is the alignment of the thing we return if *POFFSET
4733 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4735 If any of the extraction expressions is volatile,
4736 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4738 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4739 is a mode that can be used to access the field. In that case, *PBITSIZE
4740 is redundant.
4742 If the field describes a variable-sized object, *PMODE is set to
4743 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4744 this case, but the address of the object can be found. */
4746 tree
4747 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4748 punsignedp, pvolatilep, palignment)
4749 tree exp;
4750 int *pbitsize;
4751 int *pbitpos;
4752 tree *poffset;
4753 enum machine_mode *pmode;
4754 int *punsignedp;
4755 int *pvolatilep;
4756 int *palignment;
4758 tree orig_exp = exp;
4759 tree size_tree = 0;
4760 enum machine_mode mode = VOIDmode;
4761 tree offset = integer_zero_node;
4762 unsigned int alignment = BIGGEST_ALIGNMENT;
4764 if (TREE_CODE (exp) == COMPONENT_REF)
4766 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4767 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4768 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4769 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4771 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4773 size_tree = TREE_OPERAND (exp, 1);
4774 *punsignedp = TREE_UNSIGNED (exp);
4776 else
4778 mode = TYPE_MODE (TREE_TYPE (exp));
4779 if (mode == BLKmode)
4780 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4782 *pbitsize = GET_MODE_BITSIZE (mode);
4783 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4786 if (size_tree)
4788 if (TREE_CODE (size_tree) != INTEGER_CST)
4789 mode = BLKmode, *pbitsize = -1;
4790 else
4791 *pbitsize = TREE_INT_CST_LOW (size_tree);
4794 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4795 and find the ultimate containing object. */
4797 *pbitpos = 0;
4799 while (1)
4801 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4803 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4804 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4805 : TREE_OPERAND (exp, 2));
4806 tree constant = integer_zero_node, var = pos;
4808 /* If this field hasn't been filled in yet, don't go
4809 past it. This should only happen when folding expressions
4810 made during type construction. */
4811 if (pos == 0)
4812 break;
4814 /* Assume here that the offset is a multiple of a unit.
4815 If not, there should be an explicitly added constant. */
4816 if (TREE_CODE (pos) == PLUS_EXPR
4817 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4818 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4819 else if (TREE_CODE (pos) == INTEGER_CST)
4820 constant = pos, var = integer_zero_node;
4822 *pbitpos += TREE_INT_CST_LOW (constant);
4823 offset = size_binop (PLUS_EXPR, offset,
4824 size_binop (EXACT_DIV_EXPR, var,
4825 size_int (BITS_PER_UNIT)));
4828 else if (TREE_CODE (exp) == ARRAY_REF)
4830 /* This code is based on the code in case ARRAY_REF in expand_expr
4831 below. We assume here that the size of an array element is
4832 always an integral multiple of BITS_PER_UNIT. */
4834 tree index = TREE_OPERAND (exp, 1);
4835 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4836 tree low_bound
4837 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4838 tree index_type = TREE_TYPE (index);
4839 tree xindex;
4841 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4843 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4844 index);
4845 index_type = TREE_TYPE (index);
4848 /* Optimize the special-case of a zero lower bound.
4850 We convert the low_bound to sizetype to avoid some problems
4851 with constant folding. (E.g. suppose the lower bound is 1,
4852 and its mode is QI. Without the conversion, (ARRAY
4853 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4854 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4856 But sizetype isn't quite right either (especially if
4857 the lowbound is negative). FIXME */
4859 if (! integer_zerop (low_bound))
4860 index = fold (build (MINUS_EXPR, index_type, index,
4861 convert (sizetype, low_bound)));
4863 if (TREE_CODE (index) == INTEGER_CST)
4865 index = convert (sbitsizetype, index);
4866 index_type = TREE_TYPE (index);
4869 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4870 convert (sbitsizetype,
4871 TYPE_SIZE (TREE_TYPE (exp)))));
4873 if (TREE_CODE (xindex) == INTEGER_CST
4874 && TREE_INT_CST_HIGH (xindex) == 0)
4875 *pbitpos += TREE_INT_CST_LOW (xindex);
4876 else
4878 /* Either the bit offset calculated above is not constant, or
4879 it overflowed. In either case, redo the multiplication
4880 against the size in units. This is especially important
4881 in the non-constant case to avoid a division at runtime. */
4882 xindex = fold (build (MULT_EXPR, ssizetype, index,
4883 convert (ssizetype,
4884 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4886 if (contains_placeholder_p (xindex))
4887 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4889 offset = size_binop (PLUS_EXPR, offset, xindex);
4892 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4893 && ! ((TREE_CODE (exp) == NOP_EXPR
4894 || TREE_CODE (exp) == CONVERT_EXPR)
4895 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4896 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4897 != UNION_TYPE))
4898 && (TYPE_MODE (TREE_TYPE (exp))
4899 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4900 break;
4902 /* If any reference in the chain is volatile, the effect is volatile. */
4903 if (TREE_THIS_VOLATILE (exp))
4904 *pvolatilep = 1;
4906 /* If the offset is non-constant already, then we can't assume any
4907 alignment more than the alignment here. */
4908 if (! integer_zerop (offset))
4909 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4911 exp = TREE_OPERAND (exp, 0);
4914 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4915 alignment = MIN (alignment, DECL_ALIGN (exp));
4916 else if (TREE_TYPE (exp) != 0)
4917 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4919 if (integer_zerop (offset))
4920 offset = 0;
4922 if (offset != 0 && contains_placeholder_p (offset))
4923 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4925 *pmode = mode;
4926 *poffset = offset;
4927 *palignment = alignment / BITS_PER_UNIT;
4928 return exp;
4931 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4932 static enum memory_use_mode
4933 get_memory_usage_from_modifier (modifier)
4934 enum expand_modifier modifier;
4936 switch (modifier)
4938 case EXPAND_NORMAL:
4939 case EXPAND_SUM:
4940 return MEMORY_USE_RO;
4941 break;
4942 case EXPAND_MEMORY_USE_WO:
4943 return MEMORY_USE_WO;
4944 break;
4945 case EXPAND_MEMORY_USE_RW:
4946 return MEMORY_USE_RW;
4947 break;
4948 case EXPAND_MEMORY_USE_DONT:
4949 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4950 MEMORY_USE_DONT, because they are modifiers to a call of
4951 expand_expr in the ADDR_EXPR case of expand_expr. */
4952 case EXPAND_CONST_ADDRESS:
4953 case EXPAND_INITIALIZER:
4954 return MEMORY_USE_DONT;
4955 case EXPAND_MEMORY_USE_BAD:
4956 default:
4957 abort ();
4961 /* Given an rtx VALUE that may contain additions and multiplications,
4962 return an equivalent value that just refers to a register or memory.
4963 This is done by generating instructions to perform the arithmetic
4964 and returning a pseudo-register containing the value.
4966 The returned value may be a REG, SUBREG, MEM or constant. */
4969 force_operand (value, target)
4970 rtx value, target;
4972 register optab binoptab = 0;
4973 /* Use a temporary to force order of execution of calls to
4974 `force_operand'. */
4975 rtx tmp;
4976 register rtx op2;
4977 /* Use subtarget as the target for operand 0 of a binary operation. */
4978 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4980 /* Check for a PIC address load. */
4981 if (flag_pic
4982 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4983 && XEXP (value, 0) == pic_offset_table_rtx
4984 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4985 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4986 || GET_CODE (XEXP (value, 1)) == CONST))
4988 if (!subtarget)
4989 subtarget = gen_reg_rtx (GET_MODE (value));
4990 emit_move_insn (subtarget, value);
4991 return subtarget;
4994 if (GET_CODE (value) == PLUS)
4995 binoptab = add_optab;
4996 else if (GET_CODE (value) == MINUS)
4997 binoptab = sub_optab;
4998 else if (GET_CODE (value) == MULT)
5000 op2 = XEXP (value, 1);
5001 if (!CONSTANT_P (op2)
5002 && !(GET_CODE (op2) == REG && op2 != subtarget))
5003 subtarget = 0;
5004 tmp = force_operand (XEXP (value, 0), subtarget);
5005 return expand_mult (GET_MODE (value), tmp,
5006 force_operand (op2, NULL_RTX),
5007 target, 0);
5010 if (binoptab)
5012 op2 = XEXP (value, 1);
5013 if (!CONSTANT_P (op2)
5014 && !(GET_CODE (op2) == REG && op2 != subtarget))
5015 subtarget = 0;
5016 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5018 binoptab = add_optab;
5019 op2 = negate_rtx (GET_MODE (value), op2);
5022 /* Check for an addition with OP2 a constant integer and our first
5023 operand a PLUS of a virtual register and something else. In that
5024 case, we want to emit the sum of the virtual register and the
5025 constant first and then add the other value. This allows virtual
5026 register instantiation to simply modify the constant rather than
5027 creating another one around this addition. */
5028 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5029 && GET_CODE (XEXP (value, 0)) == PLUS
5030 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5031 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5032 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5034 rtx temp = expand_binop (GET_MODE (value), binoptab,
5035 XEXP (XEXP (value, 0), 0), op2,
5036 subtarget, 0, OPTAB_LIB_WIDEN);
5037 return expand_binop (GET_MODE (value), binoptab, temp,
5038 force_operand (XEXP (XEXP (value, 0), 1), 0),
5039 target, 0, OPTAB_LIB_WIDEN);
5042 tmp = force_operand (XEXP (value, 0), subtarget);
5043 return expand_binop (GET_MODE (value), binoptab, tmp,
5044 force_operand (op2, NULL_RTX),
5045 target, 0, OPTAB_LIB_WIDEN);
5046 /* We give UNSIGNEDP = 0 to expand_binop
5047 because the only operations we are expanding here are signed ones. */
5049 return value;
5052 /* Subroutine of expand_expr:
5053 save the non-copied parts (LIST) of an expr (LHS), and return a list
5054 which can restore these values to their previous values,
5055 should something modify their storage. */
5057 static tree
5058 save_noncopied_parts (lhs, list)
5059 tree lhs;
5060 tree list;
5062 tree tail;
5063 tree parts = 0;
5065 for (tail = list; tail; tail = TREE_CHAIN (tail))
5066 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5067 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5068 else
5070 tree part = TREE_VALUE (tail);
5071 tree part_type = TREE_TYPE (part);
5072 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5073 rtx target = assign_temp (part_type, 0, 1, 1);
5074 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5075 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5076 parts = tree_cons (to_be_saved,
5077 build (RTL_EXPR, part_type, NULL_TREE,
5078 (tree) target),
5079 parts);
5080 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5082 return parts;
5085 /* Subroutine of expand_expr:
5086 record the non-copied parts (LIST) of an expr (LHS), and return a list
5087 which specifies the initial values of these parts. */
5089 static tree
5090 init_noncopied_parts (lhs, list)
5091 tree lhs;
5092 tree list;
5094 tree tail;
5095 tree parts = 0;
5097 for (tail = list; tail; tail = TREE_CHAIN (tail))
5098 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5099 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5100 else if (TREE_PURPOSE (tail))
5102 tree part = TREE_VALUE (tail);
5103 tree part_type = TREE_TYPE (part);
5104 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5105 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5107 return parts;
5110 /* Subroutine of expand_expr: return nonzero iff there is no way that
5111 EXP can reference X, which is being modified. TOP_P is nonzero if this
5112 call is going to be used to determine whether we need a temporary
5113 for EXP, as opposed to a recursive call to this function.
5115 It is always safe for this routine to return zero since it merely
5116 searches for optimization opportunities. */
5118 static int
5119 safe_from_p (x, exp, top_p)
5120 rtx x;
5121 tree exp;
5122 int top_p;
5124 rtx exp_rtl = 0;
5125 int i, nops;
5126 static int save_expr_count;
5127 static int save_expr_size = 0;
5128 static tree *save_expr_rewritten;
5129 static tree save_expr_trees[256];
5131 if (x == 0
5132 /* If EXP has varying size, we MUST use a target since we currently
5133 have no way of allocating temporaries of variable size
5134 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5135 So we assume here that something at a higher level has prevented a
5136 clash. This is somewhat bogus, but the best we can do. Only
5137 do this when X is BLKmode and when we are at the top level. */
5138 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5139 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5140 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5141 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5142 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5143 != INTEGER_CST)
5144 && GET_MODE (x) == BLKmode))
5145 return 1;
5147 if (top_p && save_expr_size == 0)
5149 int rtn;
5151 save_expr_count = 0;
5152 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5153 save_expr_rewritten = &save_expr_trees[0];
5155 rtn = safe_from_p (x, exp, 1);
5157 for (i = 0; i < save_expr_count; ++i)
5159 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5160 abort ();
5161 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5164 save_expr_size = 0;
5166 return rtn;
5169 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5170 find the underlying pseudo. */
5171 if (GET_CODE (x) == SUBREG)
5173 x = SUBREG_REG (x);
5174 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5175 return 0;
5178 /* If X is a location in the outgoing argument area, it is always safe. */
5179 if (GET_CODE (x) == MEM
5180 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5181 || (GET_CODE (XEXP (x, 0)) == PLUS
5182 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5183 return 1;
5185 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5187 case 'd':
5188 exp_rtl = DECL_RTL (exp);
5189 break;
5191 case 'c':
5192 return 1;
5194 case 'x':
5195 if (TREE_CODE (exp) == TREE_LIST)
5196 return ((TREE_VALUE (exp) == 0
5197 || safe_from_p (x, TREE_VALUE (exp), 0))
5198 && (TREE_CHAIN (exp) == 0
5199 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5200 else if (TREE_CODE (exp) == ERROR_MARK)
5201 return 1; /* An already-visited SAVE_EXPR? */
5202 else
5203 return 0;
5205 case '1':
5206 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5208 case '2':
5209 case '<':
5210 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5211 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5213 case 'e':
5214 case 'r':
5215 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5216 the expression. If it is set, we conflict iff we are that rtx or
5217 both are in memory. Otherwise, we check all operands of the
5218 expression recursively. */
5220 switch (TREE_CODE (exp))
5222 case ADDR_EXPR:
5223 return (staticp (TREE_OPERAND (exp, 0))
5224 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5225 || TREE_STATIC (exp));
5227 case INDIRECT_REF:
5228 if (GET_CODE (x) == MEM)
5229 return 0;
5230 break;
5232 case CALL_EXPR:
5233 exp_rtl = CALL_EXPR_RTL (exp);
5234 if (exp_rtl == 0)
5236 /* Assume that the call will clobber all hard registers and
5237 all of memory. */
5238 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5239 || GET_CODE (x) == MEM)
5240 return 0;
5243 break;
5245 case RTL_EXPR:
5246 /* If a sequence exists, we would have to scan every instruction
5247 in the sequence to see if it was safe. This is probably not
5248 worthwhile. */
5249 if (RTL_EXPR_SEQUENCE (exp))
5250 return 0;
5252 exp_rtl = RTL_EXPR_RTL (exp);
5253 break;
5255 case WITH_CLEANUP_EXPR:
5256 exp_rtl = RTL_EXPR_RTL (exp);
5257 break;
5259 case CLEANUP_POINT_EXPR:
5260 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5262 case SAVE_EXPR:
5263 exp_rtl = SAVE_EXPR_RTL (exp);
5264 if (exp_rtl)
5265 break;
5267 /* This SAVE_EXPR might appear many times in the top-level
5268 safe_from_p() expression, and if it has a complex
5269 subexpression, examining it multiple times could result
5270 in a combinatorial explosion. E.g. on an Alpha
5271 running at least 200MHz, a Fortran test case compiled with
5272 optimization took about 28 minutes to compile -- even though
5273 it was only a few lines long, and the complicated line causing
5274 so much time to be spent in the earlier version of safe_from_p()
5275 had only 293 or so unique nodes.
5277 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5278 where it is so we can turn it back in the top-level safe_from_p()
5279 when we're done. */
5281 /* For now, don't bother re-sizing the array. */
5282 if (save_expr_count >= save_expr_size)
5283 return 0;
5284 save_expr_rewritten[save_expr_count++] = exp;
5286 nops = tree_code_length[(int) SAVE_EXPR];
5287 for (i = 0; i < nops; i++)
5289 tree operand = TREE_OPERAND (exp, i);
5290 if (operand == NULL_TREE)
5291 continue;
5292 TREE_SET_CODE (exp, ERROR_MARK);
5293 if (!safe_from_p (x, operand, 0))
5294 return 0;
5295 TREE_SET_CODE (exp, SAVE_EXPR);
5297 TREE_SET_CODE (exp, ERROR_MARK);
5298 return 1;
5300 case BIND_EXPR:
5301 /* The only operand we look at is operand 1. The rest aren't
5302 part of the expression. */
5303 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5305 case METHOD_CALL_EXPR:
5306 /* This takes a rtx argument, but shouldn't appear here. */
5307 abort ();
5309 default:
5310 break;
5313 /* If we have an rtx, we do not need to scan our operands. */
5314 if (exp_rtl)
5315 break;
5317 nops = tree_code_length[(int) TREE_CODE (exp)];
5318 for (i = 0; i < nops; i++)
5319 if (TREE_OPERAND (exp, i) != 0
5320 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5321 return 0;
5324 /* If we have an rtl, find any enclosed object. Then see if we conflict
5325 with it. */
5326 if (exp_rtl)
5328 if (GET_CODE (exp_rtl) == SUBREG)
5330 exp_rtl = SUBREG_REG (exp_rtl);
5331 if (GET_CODE (exp_rtl) == REG
5332 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5333 return 0;
5336 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5337 are memory and EXP is not readonly. */
5338 return ! (rtx_equal_p (x, exp_rtl)
5339 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5340 && ! TREE_READONLY (exp)));
5343 /* If we reach here, it is safe. */
5344 return 1;
5347 /* Subroutine of expand_expr: return nonzero iff EXP is an
5348 expression whose type is statically determinable. */
5350 static int
5351 fixed_type_p (exp)
5352 tree exp;
5354 if (TREE_CODE (exp) == PARM_DECL
5355 || TREE_CODE (exp) == VAR_DECL
5356 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5357 || TREE_CODE (exp) == COMPONENT_REF
5358 || TREE_CODE (exp) == ARRAY_REF)
5359 return 1;
5360 return 0;
5363 /* Subroutine of expand_expr: return rtx if EXP is a
5364 variable or parameter; else return 0. */
5366 static rtx
5367 var_rtx (exp)
5368 tree exp;
5370 STRIP_NOPS (exp);
5371 switch (TREE_CODE (exp))
5373 case PARM_DECL:
5374 case VAR_DECL:
5375 return DECL_RTL (exp);
5376 default:
5377 return 0;
5381 #ifdef MAX_INTEGER_COMPUTATION_MODE
5382 void
5383 check_max_integer_computation_mode (exp)
5384 tree exp;
5386 enum tree_code code = TREE_CODE (exp);
5387 enum machine_mode mode;
5389 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5390 if (code == NOP_EXPR
5391 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5392 return;
5394 /* First check the type of the overall operation. We need only look at
5395 unary, binary and relational operations. */
5396 if (TREE_CODE_CLASS (code) == '1'
5397 || TREE_CODE_CLASS (code) == '2'
5398 || TREE_CODE_CLASS (code) == '<')
5400 mode = TYPE_MODE (TREE_TYPE (exp));
5401 if (GET_MODE_CLASS (mode) == MODE_INT
5402 && mode > MAX_INTEGER_COMPUTATION_MODE)
5403 fatal ("unsupported wide integer operation");
5406 /* Check operand of a unary op. */
5407 if (TREE_CODE_CLASS (code) == '1')
5409 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5410 if (GET_MODE_CLASS (mode) == MODE_INT
5411 && mode > MAX_INTEGER_COMPUTATION_MODE)
5412 fatal ("unsupported wide integer operation");
5415 /* Check operands of a binary/comparison op. */
5416 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5418 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5419 if (GET_MODE_CLASS (mode) == MODE_INT
5420 && mode > MAX_INTEGER_COMPUTATION_MODE)
5421 fatal ("unsupported wide integer operation");
5423 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5424 if (GET_MODE_CLASS (mode) == MODE_INT
5425 && mode > MAX_INTEGER_COMPUTATION_MODE)
5426 fatal ("unsupported wide integer operation");
5429 #endif
5432 /* expand_expr: generate code for computing expression EXP.
5433 An rtx for the computed value is returned. The value is never null.
5434 In the case of a void EXP, const0_rtx is returned.
5436 The value may be stored in TARGET if TARGET is nonzero.
5437 TARGET is just a suggestion; callers must assume that
5438 the rtx returned may not be the same as TARGET.
5440 If TARGET is CONST0_RTX, it means that the value will be ignored.
5442 If TMODE is not VOIDmode, it suggests generating the
5443 result in mode TMODE. But this is done only when convenient.
5444 Otherwise, TMODE is ignored and the value generated in its natural mode.
5445 TMODE is just a suggestion; callers must assume that
5446 the rtx returned may not have mode TMODE.
5448 Note that TARGET may have neither TMODE nor MODE. In that case, it
5449 probably will not be used.
5451 If MODIFIER is EXPAND_SUM then when EXP is an addition
5452 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5453 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5454 products as above, or REG or MEM, or constant.
5455 Ordinarily in such cases we would output mul or add instructions
5456 and then return a pseudo reg containing the sum.
5458 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5459 it also marks a label as absolutely required (it can't be dead).
5460 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5461 This is used for outputting expressions used in initializers.
5463 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5464 with a constant address even if that address is not normally legitimate.
5465 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5468 expand_expr (exp, target, tmode, modifier)
5469 register tree exp;
5470 rtx target;
5471 enum machine_mode tmode;
5472 enum expand_modifier modifier;
5474 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5475 This is static so it will be accessible to our recursive callees. */
5476 static tree placeholder_list = 0;
5477 register rtx op0, op1, temp;
5478 tree type = TREE_TYPE (exp);
5479 int unsignedp = TREE_UNSIGNED (type);
5480 register enum machine_mode mode;
5481 register enum tree_code code = TREE_CODE (exp);
5482 optab this_optab;
5483 rtx subtarget, original_target;
5484 int ignore;
5485 tree context;
5486 /* Used by check-memory-usage to make modifier read only. */
5487 enum expand_modifier ro_modifier;
5489 /* Handle ERROR_MARK before anybody tries to access its type. */
5490 if (TREE_CODE (exp) == ERROR_MARK)
5492 op0 = CONST0_RTX (tmode);
5493 if (op0 != 0)
5494 return op0;
5495 return const0_rtx;
5498 mode = TYPE_MODE (type);
5499 /* Use subtarget as the target for operand 0 of a binary operation. */
5500 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5501 original_target = target;
5502 ignore = (target == const0_rtx
5503 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5504 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5505 || code == COND_EXPR)
5506 && TREE_CODE (type) == VOID_TYPE));
5508 /* Make a read-only version of the modifier. */
5509 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5510 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5511 ro_modifier = modifier;
5512 else
5513 ro_modifier = EXPAND_NORMAL;
5515 /* Don't use hard regs as subtargets, because the combiner
5516 can only handle pseudo regs. */
5517 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5518 subtarget = 0;
5519 /* Avoid subtargets inside loops,
5520 since they hide some invariant expressions. */
5521 if (preserve_subexpressions_p ())
5522 subtarget = 0;
5524 /* If we are going to ignore this result, we need only do something
5525 if there is a side-effect somewhere in the expression. If there
5526 is, short-circuit the most common cases here. Note that we must
5527 not call expand_expr with anything but const0_rtx in case this
5528 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5530 if (ignore)
5532 if (! TREE_SIDE_EFFECTS (exp))
5533 return const0_rtx;
5535 /* Ensure we reference a volatile object even if value is ignored. */
5536 if (TREE_THIS_VOLATILE (exp)
5537 && TREE_CODE (exp) != FUNCTION_DECL
5538 && mode != VOIDmode && mode != BLKmode)
5540 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5541 if (GET_CODE (temp) == MEM)
5542 temp = copy_to_reg (temp);
5543 return const0_rtx;
5546 if (TREE_CODE_CLASS (code) == '1')
5547 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5548 VOIDmode, ro_modifier);
5549 else if (TREE_CODE_CLASS (code) == '2'
5550 || TREE_CODE_CLASS (code) == '<')
5552 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5553 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5554 return const0_rtx;
5556 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5557 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5558 /* If the second operand has no side effects, just evaluate
5559 the first. */
5560 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5561 VOIDmode, ro_modifier);
5563 target = 0;
5566 #ifdef MAX_INTEGER_COMPUTATION_MODE
5567 if (target
5568 && TREE_CODE (exp) != INTEGER_CST
5569 && TREE_CODE (exp) != PARM_DECL
5570 && TREE_CODE (exp) != ARRAY_REF
5571 && TREE_CODE (exp) != COMPONENT_REF
5572 && TREE_CODE (exp) != BIT_FIELD_REF
5573 && TREE_CODE (exp) != INDIRECT_REF
5574 && TREE_CODE (exp) != CALL_EXPR
5575 && TREE_CODE (exp) != VAR_DECL)
5577 enum machine_mode mode = GET_MODE (target);
5579 if (GET_MODE_CLASS (mode) == MODE_INT
5580 && mode > MAX_INTEGER_COMPUTATION_MODE)
5581 fatal ("unsupported wide integer operation");
5584 if (TREE_CODE (exp) != INTEGER_CST
5585 && TREE_CODE (exp) != PARM_DECL
5586 && TREE_CODE (exp) != ARRAY_REF
5587 && TREE_CODE (exp) != COMPONENT_REF
5588 && TREE_CODE (exp) != BIT_FIELD_REF
5589 && TREE_CODE (exp) != INDIRECT_REF
5590 && TREE_CODE (exp) != VAR_DECL
5591 && TREE_CODE (exp) != CALL_EXPR
5592 && GET_MODE_CLASS (tmode) == MODE_INT
5593 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5594 fatal ("unsupported wide integer operation");
5596 check_max_integer_computation_mode (exp);
5597 #endif
5599 /* If will do cse, generate all results into pseudo registers
5600 since 1) that allows cse to find more things
5601 and 2) otherwise cse could produce an insn the machine
5602 cannot support. */
5604 if (! cse_not_expected && mode != BLKmode && target
5605 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5606 target = subtarget;
5608 switch (code)
5610 case LABEL_DECL:
5612 tree function = decl_function_context (exp);
5613 /* Handle using a label in a containing function. */
5614 if (function != current_function_decl
5615 && function != inline_function_decl && function != 0)
5617 struct function *p = find_function_data (function);
5618 /* Allocate in the memory associated with the function
5619 that the label is in. */
5620 push_obstacks (p->function_obstack,
5621 p->function_maybepermanent_obstack);
5623 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5624 label_rtx (exp),
5625 p->forced_labels);
5626 pop_obstacks ();
5628 else
5630 if (modifier == EXPAND_INITIALIZER)
5631 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5632 label_rtx (exp),
5633 forced_labels);
5635 temp = gen_rtx_MEM (FUNCTION_MODE,
5636 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5637 if (function != current_function_decl
5638 && function != inline_function_decl && function != 0)
5639 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5640 return temp;
5643 case PARM_DECL:
5644 if (DECL_RTL (exp) == 0)
5646 error_with_decl (exp, "prior parameter's size depends on `%s'");
5647 return CONST0_RTX (mode);
5650 /* ... fall through ... */
5652 case VAR_DECL:
5653 /* If a static var's type was incomplete when the decl was written,
5654 but the type is complete now, lay out the decl now. */
5655 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5656 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5658 push_obstacks_nochange ();
5659 end_temporary_allocation ();
5660 layout_decl (exp, 0);
5661 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5662 pop_obstacks ();
5665 /* Although static-storage variables start off initialized, according to
5666 ANSI C, a memcpy could overwrite them with uninitialized values. So
5667 we check them too. This also lets us check for read-only variables
5668 accessed via a non-const declaration, in case it won't be detected
5669 any other way (e.g., in an embedded system or OS kernel without
5670 memory protection).
5672 Aggregates are not checked here; they're handled elsewhere. */
5673 if (current_function_check_memory_usage && code == VAR_DECL
5674 && GET_CODE (DECL_RTL (exp)) == MEM
5675 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5677 enum memory_use_mode memory_usage;
5678 memory_usage = get_memory_usage_from_modifier (modifier);
5680 if (memory_usage != MEMORY_USE_DONT)
5681 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5682 XEXP (DECL_RTL (exp), 0), Pmode,
5683 GEN_INT (int_size_in_bytes (type)),
5684 TYPE_MODE (sizetype),
5685 GEN_INT (memory_usage),
5686 TYPE_MODE (integer_type_node));
5689 /* ... fall through ... */
5691 case FUNCTION_DECL:
5692 case RESULT_DECL:
5693 if (DECL_RTL (exp) == 0)
5694 abort ();
5696 /* Ensure variable marked as used even if it doesn't go through
5697 a parser. If it hasn't be used yet, write out an external
5698 definition. */
5699 if (! TREE_USED (exp))
5701 assemble_external (exp);
5702 TREE_USED (exp) = 1;
5705 /* Show we haven't gotten RTL for this yet. */
5706 temp = 0;
5708 /* Handle variables inherited from containing functions. */
5709 context = decl_function_context (exp);
5711 /* We treat inline_function_decl as an alias for the current function
5712 because that is the inline function whose vars, types, etc.
5713 are being merged into the current function.
5714 See expand_inline_function. */
5716 if (context != 0 && context != current_function_decl
5717 && context != inline_function_decl
5718 /* If var is static, we don't need a static chain to access it. */
5719 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5720 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5722 rtx addr;
5724 /* Mark as non-local and addressable. */
5725 DECL_NONLOCAL (exp) = 1;
5726 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5727 abort ();
5728 mark_addressable (exp);
5729 if (GET_CODE (DECL_RTL (exp)) != MEM)
5730 abort ();
5731 addr = XEXP (DECL_RTL (exp), 0);
5732 if (GET_CODE (addr) == MEM)
5733 addr = gen_rtx_MEM (Pmode,
5734 fix_lexical_addr (XEXP (addr, 0), exp));
5735 else
5736 addr = fix_lexical_addr (addr, exp);
5737 temp = change_address (DECL_RTL (exp), mode, addr);
5740 /* This is the case of an array whose size is to be determined
5741 from its initializer, while the initializer is still being parsed.
5742 See expand_decl. */
5744 else if (GET_CODE (DECL_RTL (exp)) == MEM
5745 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5746 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5747 XEXP (DECL_RTL (exp), 0));
5749 /* If DECL_RTL is memory, we are in the normal case and either
5750 the address is not valid or it is not a register and -fforce-addr
5751 is specified, get the address into a register. */
5753 else if (GET_CODE (DECL_RTL (exp)) == MEM
5754 && modifier != EXPAND_CONST_ADDRESS
5755 && modifier != EXPAND_SUM
5756 && modifier != EXPAND_INITIALIZER
5757 && (! memory_address_p (DECL_MODE (exp),
5758 XEXP (DECL_RTL (exp), 0))
5759 || (flag_force_addr
5760 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5761 temp = change_address (DECL_RTL (exp), VOIDmode,
5762 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5764 /* If we got something, return it. But first, set the alignment
5765 the address is a register. */
5766 if (temp != 0)
5768 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5769 mark_reg_pointer (XEXP (temp, 0),
5770 DECL_ALIGN (exp) / BITS_PER_UNIT);
5772 return temp;
5775 /* If the mode of DECL_RTL does not match that of the decl, it
5776 must be a promoted value. We return a SUBREG of the wanted mode,
5777 but mark it so that we know that it was already extended. */
5779 if (GET_CODE (DECL_RTL (exp)) == REG
5780 && GET_MODE (DECL_RTL (exp)) != mode)
5782 /* Get the signedness used for this variable. Ensure we get the
5783 same mode we got when the variable was declared. */
5784 if (GET_MODE (DECL_RTL (exp))
5785 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5786 abort ();
5788 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5789 SUBREG_PROMOTED_VAR_P (temp) = 1;
5790 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5791 return temp;
5794 return DECL_RTL (exp);
5796 case INTEGER_CST:
5797 return immed_double_const (TREE_INT_CST_LOW (exp),
5798 TREE_INT_CST_HIGH (exp),
5799 mode);
5801 case CONST_DECL:
5802 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5803 EXPAND_MEMORY_USE_BAD);
5805 case REAL_CST:
5806 /* If optimized, generate immediate CONST_DOUBLE
5807 which will be turned into memory by reload if necessary.
5809 We used to force a register so that loop.c could see it. But
5810 this does not allow gen_* patterns to perform optimizations with
5811 the constants. It also produces two insns in cases like "x = 1.0;".
5812 On most machines, floating-point constants are not permitted in
5813 many insns, so we'd end up copying it to a register in any case.
5815 Now, we do the copying in expand_binop, if appropriate. */
5816 return immed_real_const (exp);
5818 case COMPLEX_CST:
5819 case STRING_CST:
5820 if (! TREE_CST_RTL (exp))
5821 output_constant_def (exp);
5823 /* TREE_CST_RTL probably contains a constant address.
5824 On RISC machines where a constant address isn't valid,
5825 make some insns to get that address into a register. */
5826 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5827 && modifier != EXPAND_CONST_ADDRESS
5828 && modifier != EXPAND_INITIALIZER
5829 && modifier != EXPAND_SUM
5830 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5831 || (flag_force_addr
5832 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5833 return change_address (TREE_CST_RTL (exp), VOIDmode,
5834 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5835 return TREE_CST_RTL (exp);
5837 case EXPR_WITH_FILE_LOCATION:
5839 rtx to_return;
5840 char *saved_input_filename = input_filename;
5841 int saved_lineno = lineno;
5842 input_filename = EXPR_WFL_FILENAME (exp);
5843 lineno = EXPR_WFL_LINENO (exp);
5844 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5845 emit_line_note (input_filename, lineno);
5846 /* Possibly avoid switching back and force here */
5847 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5848 input_filename = saved_input_filename;
5849 lineno = saved_lineno;
5850 return to_return;
5853 case SAVE_EXPR:
5854 context = decl_function_context (exp);
5856 /* If this SAVE_EXPR was at global context, assume we are an
5857 initialization function and move it into our context. */
5858 if (context == 0)
5859 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5861 /* We treat inline_function_decl as an alias for the current function
5862 because that is the inline function whose vars, types, etc.
5863 are being merged into the current function.
5864 See expand_inline_function. */
5865 if (context == current_function_decl || context == inline_function_decl)
5866 context = 0;
5868 /* If this is non-local, handle it. */
5869 if (context)
5871 /* The following call just exists to abort if the context is
5872 not of a containing function. */
5873 find_function_data (context);
5875 temp = SAVE_EXPR_RTL (exp);
5876 if (temp && GET_CODE (temp) == REG)
5878 put_var_into_stack (exp);
5879 temp = SAVE_EXPR_RTL (exp);
5881 if (temp == 0 || GET_CODE (temp) != MEM)
5882 abort ();
5883 return change_address (temp, mode,
5884 fix_lexical_addr (XEXP (temp, 0), exp));
5886 if (SAVE_EXPR_RTL (exp) == 0)
5888 if (mode == VOIDmode)
5889 temp = const0_rtx;
5890 else
5891 temp = assign_temp (type, 3, 0, 0);
5893 SAVE_EXPR_RTL (exp) = temp;
5894 if (!optimize && GET_CODE (temp) == REG)
5895 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5896 save_expr_regs);
5898 /* If the mode of TEMP does not match that of the expression, it
5899 must be a promoted value. We pass store_expr a SUBREG of the
5900 wanted mode but mark it so that we know that it was already
5901 extended. Note that `unsignedp' was modified above in
5902 this case. */
5904 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5906 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5907 SUBREG_PROMOTED_VAR_P (temp) = 1;
5908 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5911 if (temp == const0_rtx)
5912 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5913 EXPAND_MEMORY_USE_BAD);
5914 else
5915 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5917 TREE_USED (exp) = 1;
5920 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5921 must be a promoted value. We return a SUBREG of the wanted mode,
5922 but mark it so that we know that it was already extended. */
5924 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5925 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5927 /* Compute the signedness and make the proper SUBREG. */
5928 promote_mode (type, mode, &unsignedp, 0);
5929 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5930 SUBREG_PROMOTED_VAR_P (temp) = 1;
5931 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5932 return temp;
5935 return SAVE_EXPR_RTL (exp);
5937 case UNSAVE_EXPR:
5939 rtx temp;
5940 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5941 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5942 return temp;
5945 case PLACEHOLDER_EXPR:
5947 tree placeholder_expr;
5949 /* If there is an object on the head of the placeholder list,
5950 see if some object in it of type TYPE or a pointer to it. For
5951 further information, see tree.def. */
5952 for (placeholder_expr = placeholder_list;
5953 placeholder_expr != 0;
5954 placeholder_expr = TREE_CHAIN (placeholder_expr))
5956 tree need_type = TYPE_MAIN_VARIANT (type);
5957 tree object = 0;
5958 tree old_list = placeholder_list;
5959 tree elt;
5961 /* Find the outermost reference that is of the type we want.
5962 If none, see if any object has a type that is a pointer to
5963 the type we want. */
5964 for (elt = TREE_PURPOSE (placeholder_expr);
5965 elt != 0 && object == 0;
5967 = ((TREE_CODE (elt) == COMPOUND_EXPR
5968 || TREE_CODE (elt) == COND_EXPR)
5969 ? TREE_OPERAND (elt, 1)
5970 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5971 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5972 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5973 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5974 ? TREE_OPERAND (elt, 0) : 0))
5975 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5976 object = elt;
5978 for (elt = TREE_PURPOSE (placeholder_expr);
5979 elt != 0 && object == 0;
5981 = ((TREE_CODE (elt) == COMPOUND_EXPR
5982 || TREE_CODE (elt) == COND_EXPR)
5983 ? TREE_OPERAND (elt, 1)
5984 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5985 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5986 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5987 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5988 ? TREE_OPERAND (elt, 0) : 0))
5989 if (POINTER_TYPE_P (TREE_TYPE (elt))
5990 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5991 == need_type))
5992 object = build1 (INDIRECT_REF, need_type, elt);
5994 if (object != 0)
5996 /* Expand this object skipping the list entries before
5997 it was found in case it is also a PLACEHOLDER_EXPR.
5998 In that case, we want to translate it using subsequent
5999 entries. */
6000 placeholder_list = TREE_CHAIN (placeholder_expr);
6001 temp = expand_expr (object, original_target, tmode,
6002 ro_modifier);
6003 placeholder_list = old_list;
6004 return temp;
6009 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6010 abort ();
6012 case WITH_RECORD_EXPR:
6013 /* Put the object on the placeholder list, expand our first operand,
6014 and pop the list. */
6015 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6016 placeholder_list);
6017 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6018 tmode, ro_modifier);
6019 placeholder_list = TREE_CHAIN (placeholder_list);
6020 return target;
6022 case GOTO_EXPR:
6023 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6024 expand_goto (TREE_OPERAND (exp, 0));
6025 else
6026 expand_computed_goto (TREE_OPERAND (exp, 0));
6027 return const0_rtx;
6029 case EXIT_EXPR:
6030 expand_exit_loop_if_false (NULL_PTR,
6031 invert_truthvalue (TREE_OPERAND (exp, 0)));
6032 return const0_rtx;
6034 case LABELED_BLOCK_EXPR:
6035 if (LABELED_BLOCK_BODY (exp))
6036 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6037 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6038 return const0_rtx;
6040 case EXIT_BLOCK_EXPR:
6041 if (EXIT_BLOCK_RETURN (exp))
6042 sorry ("returned value in block_exit_expr");
6043 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6044 return const0_rtx;
6046 case LOOP_EXPR:
6047 push_temp_slots ();
6048 expand_start_loop (1);
6049 expand_expr_stmt (TREE_OPERAND (exp, 0));
6050 expand_end_loop ();
6051 pop_temp_slots ();
6053 return const0_rtx;
6055 case BIND_EXPR:
6057 tree vars = TREE_OPERAND (exp, 0);
6058 int vars_need_expansion = 0;
6060 /* Need to open a binding contour here because
6061 if there are any cleanups they must be contained here. */
6062 expand_start_bindings (0);
6064 /* Mark the corresponding BLOCK for output in its proper place. */
6065 if (TREE_OPERAND (exp, 2) != 0
6066 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6067 insert_block (TREE_OPERAND (exp, 2));
6069 /* If VARS have not yet been expanded, expand them now. */
6070 while (vars)
6072 if (DECL_RTL (vars) == 0)
6074 vars_need_expansion = 1;
6075 expand_decl (vars);
6077 expand_decl_init (vars);
6078 vars = TREE_CHAIN (vars);
6081 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6083 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6085 return temp;
6088 case RTL_EXPR:
6089 if (RTL_EXPR_SEQUENCE (exp))
6091 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6092 abort ();
6093 emit_insns (RTL_EXPR_SEQUENCE (exp));
6094 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6096 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6097 free_temps_for_rtl_expr (exp);
6098 return RTL_EXPR_RTL (exp);
6100 case CONSTRUCTOR:
6101 /* If we don't need the result, just ensure we evaluate any
6102 subexpressions. */
6103 if (ignore)
6105 tree elt;
6106 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6107 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6108 EXPAND_MEMORY_USE_BAD);
6109 return const0_rtx;
6112 /* All elts simple constants => refer to a constant in memory. But
6113 if this is a non-BLKmode mode, let it store a field at a time
6114 since that should make a CONST_INT or CONST_DOUBLE when we
6115 fold. Likewise, if we have a target we can use, it is best to
6116 store directly into the target unless the type is large enough
6117 that memcpy will be used. If we are making an initializer and
6118 all operands are constant, put it in memory as well. */
6119 else if ((TREE_STATIC (exp)
6120 && ((mode == BLKmode
6121 && ! (target != 0 && safe_from_p (target, exp, 1)))
6122 || TREE_ADDRESSABLE (exp)
6123 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6124 && (!MOVE_BY_PIECES_P
6125 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6126 TYPE_ALIGN (type) / BITS_PER_UNIT))
6127 && ! mostly_zeros_p (exp))))
6128 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6130 rtx constructor = output_constant_def (exp);
6131 if (modifier != EXPAND_CONST_ADDRESS
6132 && modifier != EXPAND_INITIALIZER
6133 && modifier != EXPAND_SUM
6134 && (! memory_address_p (GET_MODE (constructor),
6135 XEXP (constructor, 0))
6136 || (flag_force_addr
6137 && GET_CODE (XEXP (constructor, 0)) != REG)))
6138 constructor = change_address (constructor, VOIDmode,
6139 XEXP (constructor, 0));
6140 return constructor;
6143 else
6145 /* Handle calls that pass values in multiple non-contiguous
6146 locations. The Irix 6 ABI has examples of this. */
6147 if (target == 0 || ! safe_from_p (target, exp, 1)
6148 || GET_CODE (target) == PARALLEL)
6150 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6151 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6152 else
6153 target = assign_temp (type, 0, 1, 1);
6156 if (TREE_READONLY (exp))
6158 if (GET_CODE (target) == MEM)
6159 target = copy_rtx (target);
6161 RTX_UNCHANGING_P (target) = 1;
6164 store_constructor (exp, target, 0);
6165 return target;
6168 case INDIRECT_REF:
6170 tree exp1 = TREE_OPERAND (exp, 0);
6171 tree exp2;
6172 tree index;
6173 tree string = string_constant (exp1, &index);
6174 int i;
6176 /* Try to optimize reads from const strings. */
6177 if (string
6178 && TREE_CODE (string) == STRING_CST
6179 && TREE_CODE (index) == INTEGER_CST
6180 && !TREE_INT_CST_HIGH (index)
6181 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6182 && GET_MODE_CLASS (mode) == MODE_INT
6183 && GET_MODE_SIZE (mode) == 1
6184 && modifier != EXPAND_MEMORY_USE_WO)
6185 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6187 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6188 op0 = memory_address (mode, op0);
6190 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6192 enum memory_use_mode memory_usage;
6193 memory_usage = get_memory_usage_from_modifier (modifier);
6195 if (memory_usage != MEMORY_USE_DONT)
6197 in_check_memory_usage = 1;
6198 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6199 op0, Pmode,
6200 GEN_INT (int_size_in_bytes (type)),
6201 TYPE_MODE (sizetype),
6202 GEN_INT (memory_usage),
6203 TYPE_MODE (integer_type_node));
6204 in_check_memory_usage = 0;
6208 temp = gen_rtx_MEM (mode, op0);
6209 /* If address was computed by addition,
6210 mark this as an element of an aggregate. */
6211 if (TREE_CODE (exp1) == PLUS_EXPR
6212 || (TREE_CODE (exp1) == SAVE_EXPR
6213 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6214 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6215 || (TREE_CODE (exp1) == ADDR_EXPR
6216 && (exp2 = TREE_OPERAND (exp1, 0))
6217 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6218 MEM_SET_IN_STRUCT_P (temp, 1);
6220 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6221 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6223 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6224 here, because, in C and C++, the fact that a location is accessed
6225 through a pointer to const does not mean that the value there can
6226 never change. Languages where it can never change should
6227 also set TREE_STATIC. */
6228 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6229 return temp;
6232 case ARRAY_REF:
6233 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6234 abort ();
6237 tree array = TREE_OPERAND (exp, 0);
6238 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6239 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6240 tree index = TREE_OPERAND (exp, 1);
6241 tree index_type = TREE_TYPE (index);
6242 HOST_WIDE_INT i;
6244 /* Optimize the special-case of a zero lower bound.
6246 We convert the low_bound to sizetype to avoid some problems
6247 with constant folding. (E.g. suppose the lower bound is 1,
6248 and its mode is QI. Without the conversion, (ARRAY
6249 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6250 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6252 But sizetype isn't quite right either (especially if
6253 the lowbound is negative). FIXME */
6255 if (! integer_zerop (low_bound))
6256 index = fold (build (MINUS_EXPR, index_type, index,
6257 convert (sizetype, low_bound)));
6259 /* Fold an expression like: "foo"[2].
6260 This is not done in fold so it won't happen inside &.
6261 Don't fold if this is for wide characters since it's too
6262 difficult to do correctly and this is a very rare case. */
6264 if (TREE_CODE (array) == STRING_CST
6265 && TREE_CODE (index) == INTEGER_CST
6266 && !TREE_INT_CST_HIGH (index)
6267 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6268 && GET_MODE_CLASS (mode) == MODE_INT
6269 && GET_MODE_SIZE (mode) == 1)
6270 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6272 /* If this is a constant index into a constant array,
6273 just get the value from the array. Handle both the cases when
6274 we have an explicit constructor and when our operand is a variable
6275 that was declared const. */
6277 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6279 if (TREE_CODE (index) == INTEGER_CST
6280 && TREE_INT_CST_HIGH (index) == 0)
6282 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6284 i = TREE_INT_CST_LOW (index);
6285 while (elem && i--)
6286 elem = TREE_CHAIN (elem);
6287 if (elem)
6288 return expand_expr (fold (TREE_VALUE (elem)), target,
6289 tmode, ro_modifier);
6293 else if (optimize >= 1
6294 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6295 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6296 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6298 if (TREE_CODE (index) == INTEGER_CST)
6300 tree init = DECL_INITIAL (array);
6302 i = TREE_INT_CST_LOW (index);
6303 if (TREE_CODE (init) == CONSTRUCTOR)
6305 tree elem = CONSTRUCTOR_ELTS (init);
6307 while (elem
6308 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6309 elem = TREE_CHAIN (elem);
6310 if (elem)
6311 return expand_expr (fold (TREE_VALUE (elem)), target,
6312 tmode, ro_modifier);
6314 else if (TREE_CODE (init) == STRING_CST
6315 && TREE_INT_CST_HIGH (index) == 0
6316 && (TREE_INT_CST_LOW (index)
6317 < TREE_STRING_LENGTH (init)))
6318 return (GEN_INT
6319 (TREE_STRING_POINTER
6320 (init)[TREE_INT_CST_LOW (index)]));
6325 /* ... fall through ... */
6327 case COMPONENT_REF:
6328 case BIT_FIELD_REF:
6329 /* If the operand is a CONSTRUCTOR, we can just extract the
6330 appropriate field if it is present. Don't do this if we have
6331 already written the data since we want to refer to that copy
6332 and varasm.c assumes that's what we'll do. */
6333 if (code != ARRAY_REF
6334 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6335 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6337 tree elt;
6339 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6340 elt = TREE_CHAIN (elt))
6341 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6342 /* We can normally use the value of the field in the
6343 CONSTRUCTOR. However, if this is a bitfield in
6344 an integral mode that we can fit in a HOST_WIDE_INT,
6345 we must mask only the number of bits in the bitfield,
6346 since this is done implicitly by the constructor. If
6347 the bitfield does not meet either of those conditions,
6348 we can't do this optimization. */
6349 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6350 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6351 == MODE_INT)
6352 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6353 <= HOST_BITS_PER_WIDE_INT))))
6355 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6356 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6358 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6360 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6362 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6363 op0 = expand_and (op0, op1, target);
6365 else
6367 enum machine_mode imode
6368 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6369 tree count
6370 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6373 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6374 target, 0);
6375 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6376 target, 0);
6380 return op0;
6385 enum machine_mode mode1;
6386 int bitsize;
6387 int bitpos;
6388 tree offset;
6389 int volatilep = 0;
6390 int alignment;
6391 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6392 &mode1, &unsignedp, &volatilep,
6393 &alignment);
6395 /* If we got back the original object, something is wrong. Perhaps
6396 we are evaluating an expression too early. In any event, don't
6397 infinitely recurse. */
6398 if (tem == exp)
6399 abort ();
6401 /* If TEM's type is a union of variable size, pass TARGET to the inner
6402 computation, since it will need a temporary and TARGET is known
6403 to have to do. This occurs in unchecked conversion in Ada. */
6405 op0 = expand_expr (tem,
6406 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6407 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6408 != INTEGER_CST)
6409 ? target : NULL_RTX),
6410 VOIDmode,
6411 modifier == EXPAND_INITIALIZER
6412 ? modifier : EXPAND_NORMAL);
6414 /* If this is a constant, put it into a register if it is a
6415 legitimate constant and memory if it isn't. */
6416 if (CONSTANT_P (op0))
6418 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6419 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6420 op0 = force_reg (mode, op0);
6421 else
6422 op0 = validize_mem (force_const_mem (mode, op0));
6425 if (offset != 0)
6427 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6429 if (GET_CODE (op0) != MEM)
6430 abort ();
6432 if (GET_MODE (offset_rtx) != ptr_mode)
6434 #ifdef POINTERS_EXTEND_UNSIGNED
6435 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6436 #else
6437 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6438 #endif
6441 /* A constant address in TO_RTX can have VOIDmode, we must not try
6442 to call force_reg for that case. Avoid that case. */
6443 if (GET_CODE (op0) == MEM
6444 && GET_MODE (op0) == BLKmode
6445 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6446 && bitsize
6447 && (bitpos % bitsize) == 0
6448 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6449 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6451 rtx temp = change_address (op0, mode1,
6452 plus_constant (XEXP (op0, 0),
6453 (bitpos /
6454 BITS_PER_UNIT)));
6455 if (GET_CODE (XEXP (temp, 0)) == REG)
6456 op0 = temp;
6457 else
6458 op0 = change_address (op0, mode1,
6459 force_reg (GET_MODE (XEXP (temp, 0)),
6460 XEXP (temp, 0)));
6461 bitpos = 0;
6465 op0 = change_address (op0, VOIDmode,
6466 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6467 force_reg (ptr_mode, offset_rtx)));
6470 /* Don't forget about volatility even if this is a bitfield. */
6471 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6473 op0 = copy_rtx (op0);
6474 MEM_VOLATILE_P (op0) = 1;
6477 /* Check the access. */
6478 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6480 enum memory_use_mode memory_usage;
6481 memory_usage = get_memory_usage_from_modifier (modifier);
6483 if (memory_usage != MEMORY_USE_DONT)
6485 rtx to;
6486 int size;
6488 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6489 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6491 /* Check the access right of the pointer. */
6492 if (size > BITS_PER_UNIT)
6493 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6494 to, Pmode,
6495 GEN_INT (size / BITS_PER_UNIT),
6496 TYPE_MODE (sizetype),
6497 GEN_INT (memory_usage),
6498 TYPE_MODE (integer_type_node));
6502 /* In cases where an aligned union has an unaligned object
6503 as a field, we might be extracting a BLKmode value from
6504 an integer-mode (e.g., SImode) object. Handle this case
6505 by doing the extract into an object as wide as the field
6506 (which we know to be the width of a basic mode), then
6507 storing into memory, and changing the mode to BLKmode.
6508 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6509 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6510 if (mode1 == VOIDmode
6511 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6512 || (modifier != EXPAND_CONST_ADDRESS
6513 && modifier != EXPAND_INITIALIZER
6514 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6515 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6516 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6517 /* If the field isn't aligned enough to fetch as a memref,
6518 fetch it as a bit field. */
6519 || (SLOW_UNALIGNED_ACCESS
6520 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6521 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6523 enum machine_mode ext_mode = mode;
6525 if (ext_mode == BLKmode)
6526 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6528 if (ext_mode == BLKmode)
6530 /* In this case, BITPOS must start at a byte boundary and
6531 TARGET, if specified, must be a MEM. */
6532 if (GET_CODE (op0) != MEM
6533 || (target != 0 && GET_CODE (target) != MEM)
6534 || bitpos % BITS_PER_UNIT != 0)
6535 abort ();
6537 op0 = change_address (op0, VOIDmode,
6538 plus_constant (XEXP (op0, 0),
6539 bitpos / BITS_PER_UNIT));
6540 if (target == 0)
6541 target = assign_temp (type, 0, 1, 1);
6543 emit_block_move (target, op0,
6544 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6545 / BITS_PER_UNIT),
6548 return target;
6551 op0 = validize_mem (op0);
6553 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6554 mark_reg_pointer (XEXP (op0, 0), alignment);
6556 op0 = extract_bit_field (op0, bitsize, bitpos,
6557 unsignedp, target, ext_mode, ext_mode,
6558 alignment,
6559 int_size_in_bytes (TREE_TYPE (tem)));
6561 /* If the result is a record type and BITSIZE is narrower than
6562 the mode of OP0, an integral mode, and this is a big endian
6563 machine, we must put the field into the high-order bits. */
6564 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6565 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6566 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6567 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6568 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6569 - bitsize),
6570 op0, 1);
6572 if (mode == BLKmode)
6574 rtx new = assign_stack_temp (ext_mode,
6575 bitsize / BITS_PER_UNIT, 0);
6577 emit_move_insn (new, op0);
6578 op0 = copy_rtx (new);
6579 PUT_MODE (op0, BLKmode);
6580 MEM_SET_IN_STRUCT_P (op0, 1);
6583 return op0;
6586 /* If the result is BLKmode, use that to access the object
6587 now as well. */
6588 if (mode == BLKmode)
6589 mode1 = BLKmode;
6591 /* Get a reference to just this component. */
6592 if (modifier == EXPAND_CONST_ADDRESS
6593 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6594 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6595 (bitpos / BITS_PER_UNIT)));
6596 else
6597 op0 = change_address (op0, mode1,
6598 plus_constant (XEXP (op0, 0),
6599 (bitpos / BITS_PER_UNIT)));
6601 if (GET_CODE (op0) == MEM)
6602 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6604 if (GET_CODE (XEXP (op0, 0)) == REG)
6605 mark_reg_pointer (XEXP (op0, 0), alignment);
6607 MEM_SET_IN_STRUCT_P (op0, 1);
6608 MEM_VOLATILE_P (op0) |= volatilep;
6609 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6610 || modifier == EXPAND_CONST_ADDRESS
6611 || modifier == EXPAND_INITIALIZER)
6612 return op0;
6613 else if (target == 0)
6614 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6616 convert_move (target, op0, unsignedp);
6617 return target;
6620 /* Intended for a reference to a buffer of a file-object in Pascal.
6621 But it's not certain that a special tree code will really be
6622 necessary for these. INDIRECT_REF might work for them. */
6623 case BUFFER_REF:
6624 abort ();
6626 case IN_EXPR:
6628 /* Pascal set IN expression.
6630 Algorithm:
6631 rlo = set_low - (set_low%bits_per_word);
6632 the_word = set [ (index - rlo)/bits_per_word ];
6633 bit_index = index % bits_per_word;
6634 bitmask = 1 << bit_index;
6635 return !!(the_word & bitmask); */
6637 tree set = TREE_OPERAND (exp, 0);
6638 tree index = TREE_OPERAND (exp, 1);
6639 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6640 tree set_type = TREE_TYPE (set);
6641 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6642 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6643 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6644 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6645 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6646 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6647 rtx setaddr = XEXP (setval, 0);
6648 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6649 rtx rlow;
6650 rtx diff, quo, rem, addr, bit, result;
6652 preexpand_calls (exp);
6654 /* If domain is empty, answer is no. Likewise if index is constant
6655 and out of bounds. */
6656 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6657 && TREE_CODE (set_low_bound) == INTEGER_CST
6658 && tree_int_cst_lt (set_high_bound, set_low_bound))
6659 || (TREE_CODE (index) == INTEGER_CST
6660 && TREE_CODE (set_low_bound) == INTEGER_CST
6661 && tree_int_cst_lt (index, set_low_bound))
6662 || (TREE_CODE (set_high_bound) == INTEGER_CST
6663 && TREE_CODE (index) == INTEGER_CST
6664 && tree_int_cst_lt (set_high_bound, index))))
6665 return const0_rtx;
6667 if (target == 0)
6668 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6670 /* If we get here, we have to generate the code for both cases
6671 (in range and out of range). */
6673 op0 = gen_label_rtx ();
6674 op1 = gen_label_rtx ();
6676 if (! (GET_CODE (index_val) == CONST_INT
6677 && GET_CODE (lo_r) == CONST_INT))
6679 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6680 GET_MODE (index_val), iunsignedp, 0, op1);
6683 if (! (GET_CODE (index_val) == CONST_INT
6684 && GET_CODE (hi_r) == CONST_INT))
6686 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6687 GET_MODE (index_val), iunsignedp, 0, op1);
6690 /* Calculate the element number of bit zero in the first word
6691 of the set. */
6692 if (GET_CODE (lo_r) == CONST_INT)
6693 rlow = GEN_INT (INTVAL (lo_r)
6694 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6695 else
6696 rlow = expand_binop (index_mode, and_optab, lo_r,
6697 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6698 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6700 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6701 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6703 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6704 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6705 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6706 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6708 addr = memory_address (byte_mode,
6709 expand_binop (index_mode, add_optab, diff,
6710 setaddr, NULL_RTX, iunsignedp,
6711 OPTAB_LIB_WIDEN));
6713 /* Extract the bit we want to examine */
6714 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6715 gen_rtx_MEM (byte_mode, addr),
6716 make_tree (TREE_TYPE (index), rem),
6717 NULL_RTX, 1);
6718 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6719 GET_MODE (target) == byte_mode ? target : 0,
6720 1, OPTAB_LIB_WIDEN);
6722 if (result != target)
6723 convert_move (target, result, 1);
6725 /* Output the code to handle the out-of-range case. */
6726 emit_jump (op0);
6727 emit_label (op1);
6728 emit_move_insn (target, const0_rtx);
6729 emit_label (op0);
6730 return target;
6733 case WITH_CLEANUP_EXPR:
6734 if (RTL_EXPR_RTL (exp) == 0)
6736 RTL_EXPR_RTL (exp)
6737 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6738 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6740 /* That's it for this cleanup. */
6741 TREE_OPERAND (exp, 2) = 0;
6743 return RTL_EXPR_RTL (exp);
6745 case CLEANUP_POINT_EXPR:
6747 /* Start a new binding layer that will keep track of all cleanup
6748 actions to be performed. */
6749 expand_start_bindings (0);
6751 target_temp_slot_level = temp_slot_level;
6753 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6754 /* If we're going to use this value, load it up now. */
6755 if (! ignore)
6756 op0 = force_not_mem (op0);
6757 preserve_temp_slots (op0);
6758 expand_end_bindings (NULL_TREE, 0, 0);
6760 return op0;
6762 case CALL_EXPR:
6763 /* Check for a built-in function. */
6764 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6765 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6766 == FUNCTION_DECL)
6767 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6768 return expand_builtin (exp, target, subtarget, tmode, ignore);
6770 /* If this call was expanded already by preexpand_calls,
6771 just return the result we got. */
6772 if (CALL_EXPR_RTL (exp) != 0)
6773 return CALL_EXPR_RTL (exp);
6775 return expand_call (exp, target, ignore);
6777 case NON_LVALUE_EXPR:
6778 case NOP_EXPR:
6779 case CONVERT_EXPR:
6780 case REFERENCE_EXPR:
6781 if (TREE_CODE (type) == UNION_TYPE)
6783 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6784 if (target == 0)
6786 if (mode != BLKmode)
6787 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6788 else
6789 target = assign_temp (type, 0, 1, 1);
6792 if (GET_CODE (target) == MEM)
6793 /* Store data into beginning of memory target. */
6794 store_expr (TREE_OPERAND (exp, 0),
6795 change_address (target, TYPE_MODE (valtype), 0), 0);
6797 else if (GET_CODE (target) == REG)
6798 /* Store this field into a union of the proper type. */
6799 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6800 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6801 VOIDmode, 0, 1,
6802 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6804 else
6805 abort ();
6807 /* Return the entire union. */
6808 return target;
6811 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6813 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6814 ro_modifier);
6816 /* If the signedness of the conversion differs and OP0 is
6817 a promoted SUBREG, clear that indication since we now
6818 have to do the proper extension. */
6819 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6820 && GET_CODE (op0) == SUBREG)
6821 SUBREG_PROMOTED_VAR_P (op0) = 0;
6823 return op0;
6826 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6827 if (GET_MODE (op0) == mode)
6828 return op0;
6830 /* If OP0 is a constant, just convert it into the proper mode. */
6831 if (CONSTANT_P (op0))
6832 return
6833 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6834 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6836 if (modifier == EXPAND_INITIALIZER)
6837 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6839 if (target == 0)
6840 return
6841 convert_to_mode (mode, op0,
6842 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6843 else
6844 convert_move (target, op0,
6845 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6846 return target;
6848 case PLUS_EXPR:
6849 /* We come here from MINUS_EXPR when the second operand is a
6850 constant. */
6851 plus_expr:
6852 this_optab = add_optab;
6854 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6855 something else, make sure we add the register to the constant and
6856 then to the other thing. This case can occur during strength
6857 reduction and doing it this way will produce better code if the
6858 frame pointer or argument pointer is eliminated.
6860 fold-const.c will ensure that the constant is always in the inner
6861 PLUS_EXPR, so the only case we need to do anything about is if
6862 sp, ap, or fp is our second argument, in which case we must swap
6863 the innermost first argument and our second argument. */
6865 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6866 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6867 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6868 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6869 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6870 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6872 tree t = TREE_OPERAND (exp, 1);
6874 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6875 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6878 /* If the result is to be ptr_mode and we are adding an integer to
6879 something, we might be forming a constant. So try to use
6880 plus_constant. If it produces a sum and we can't accept it,
6881 use force_operand. This allows P = &ARR[const] to generate
6882 efficient code on machines where a SYMBOL_REF is not a valid
6883 address.
6885 If this is an EXPAND_SUM call, always return the sum. */
6886 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6887 || mode == ptr_mode)
6889 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6890 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6891 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6893 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6894 EXPAND_SUM);
6895 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6896 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6897 op1 = force_operand (op1, target);
6898 return op1;
6901 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6902 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6903 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6905 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6906 EXPAND_SUM);
6907 if (! CONSTANT_P (op0))
6909 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6910 VOIDmode, modifier);
6911 /* Don't go to both_summands if modifier
6912 says it's not right to return a PLUS. */
6913 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6914 goto binop2;
6915 goto both_summands;
6917 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6918 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6919 op0 = force_operand (op0, target);
6920 return op0;
6924 /* No sense saving up arithmetic to be done
6925 if it's all in the wrong mode to form part of an address.
6926 And force_operand won't know whether to sign-extend or
6927 zero-extend. */
6928 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6929 || mode != ptr_mode)
6930 goto binop;
6932 preexpand_calls (exp);
6933 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6934 subtarget = 0;
6936 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6937 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6939 both_summands:
6940 /* Make sure any term that's a sum with a constant comes last. */
6941 if (GET_CODE (op0) == PLUS
6942 && CONSTANT_P (XEXP (op0, 1)))
6944 temp = op0;
6945 op0 = op1;
6946 op1 = temp;
6948 /* If adding to a sum including a constant,
6949 associate it to put the constant outside. */
6950 if (GET_CODE (op1) == PLUS
6951 && CONSTANT_P (XEXP (op1, 1)))
6953 rtx constant_term = const0_rtx;
6955 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6956 if (temp != 0)
6957 op0 = temp;
6958 /* Ensure that MULT comes first if there is one. */
6959 else if (GET_CODE (op0) == MULT)
6960 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6961 else
6962 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6964 /* Let's also eliminate constants from op0 if possible. */
6965 op0 = eliminate_constant_term (op0, &constant_term);
6967 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6968 their sum should be a constant. Form it into OP1, since the
6969 result we want will then be OP0 + OP1. */
6971 temp = simplify_binary_operation (PLUS, mode, constant_term,
6972 XEXP (op1, 1));
6973 if (temp != 0)
6974 op1 = temp;
6975 else
6976 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6979 /* Put a constant term last and put a multiplication first. */
6980 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6981 temp = op1, op1 = op0, op0 = temp;
6983 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6984 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6986 case MINUS_EXPR:
6987 /* For initializers, we are allowed to return a MINUS of two
6988 symbolic constants. Here we handle all cases when both operands
6989 are constant. */
6990 /* Handle difference of two symbolic constants,
6991 for the sake of an initializer. */
6992 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6993 && really_constant_p (TREE_OPERAND (exp, 0))
6994 && really_constant_p (TREE_OPERAND (exp, 1)))
6996 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6997 VOIDmode, ro_modifier);
6998 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6999 VOIDmode, ro_modifier);
7001 /* If the last operand is a CONST_INT, use plus_constant of
7002 the negated constant. Else make the MINUS. */
7003 if (GET_CODE (op1) == CONST_INT)
7004 return plus_constant (op0, - INTVAL (op1));
7005 else
7006 return gen_rtx_MINUS (mode, op0, op1);
7008 /* Convert A - const to A + (-const). */
7009 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7011 tree negated = fold (build1 (NEGATE_EXPR, type,
7012 TREE_OPERAND (exp, 1)));
7014 /* Deal with the case where we can't negate the constant
7015 in TYPE. */
7016 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7018 tree newtype = signed_type (type);
7019 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7020 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7021 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7023 if (! TREE_OVERFLOW (newneg))
7024 return expand_expr (convert (type,
7025 build (PLUS_EXPR, newtype,
7026 newop0, newneg)),
7027 target, tmode, ro_modifier);
7029 else
7031 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7032 goto plus_expr;
7035 this_optab = sub_optab;
7036 goto binop;
7038 case MULT_EXPR:
7039 preexpand_calls (exp);
7040 /* If first operand is constant, swap them.
7041 Thus the following special case checks need only
7042 check the second operand. */
7043 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7045 register tree t1 = TREE_OPERAND (exp, 0);
7046 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7047 TREE_OPERAND (exp, 1) = t1;
7050 /* Attempt to return something suitable for generating an
7051 indexed address, for machines that support that. */
7053 if (modifier == EXPAND_SUM && mode == ptr_mode
7054 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7055 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7057 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7058 EXPAND_SUM);
7060 /* Apply distributive law if OP0 is x+c. */
7061 if (GET_CODE (op0) == PLUS
7062 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7063 return gen_rtx_PLUS (mode,
7064 gen_rtx_MULT (mode, XEXP (op0, 0),
7065 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7066 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7067 * INTVAL (XEXP (op0, 1))));
7069 if (GET_CODE (op0) != REG)
7070 op0 = force_operand (op0, NULL_RTX);
7071 if (GET_CODE (op0) != REG)
7072 op0 = copy_to_mode_reg (mode, op0);
7074 return gen_rtx_MULT (mode, op0,
7075 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7078 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7079 subtarget = 0;
7081 /* Check for multiplying things that have been extended
7082 from a narrower type. If this machine supports multiplying
7083 in that narrower type with a result in the desired type,
7084 do it that way, and avoid the explicit type-conversion. */
7085 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7086 && TREE_CODE (type) == INTEGER_TYPE
7087 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7088 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7089 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7090 && int_fits_type_p (TREE_OPERAND (exp, 1),
7091 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7092 /* Don't use a widening multiply if a shift will do. */
7093 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7094 > HOST_BITS_PER_WIDE_INT)
7095 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7097 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7098 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7100 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7101 /* If both operands are extended, they must either both
7102 be zero-extended or both be sign-extended. */
7103 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7105 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7107 enum machine_mode innermode
7108 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7109 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7110 ? smul_widen_optab : umul_widen_optab);
7111 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7112 ? umul_widen_optab : smul_widen_optab);
7113 if (mode == GET_MODE_WIDER_MODE (innermode))
7115 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7117 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7118 NULL_RTX, VOIDmode, 0);
7119 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7120 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7121 VOIDmode, 0);
7122 else
7123 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7124 NULL_RTX, VOIDmode, 0);
7125 goto binop2;
7127 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7128 && innermode == word_mode)
7130 rtx htem;
7131 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7132 NULL_RTX, VOIDmode, 0);
7133 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7134 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7135 VOIDmode, 0);
7136 else
7137 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7138 NULL_RTX, VOIDmode, 0);
7139 temp = expand_binop (mode, other_optab, op0, op1, target,
7140 unsignedp, OPTAB_LIB_WIDEN);
7141 htem = expand_mult_highpart_adjust (innermode,
7142 gen_highpart (innermode, temp),
7143 op0, op1,
7144 gen_highpart (innermode, temp),
7145 unsignedp);
7146 emit_move_insn (gen_highpart (innermode, temp), htem);
7147 return temp;
7151 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7152 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7153 return expand_mult (mode, op0, op1, target, unsignedp);
7155 case TRUNC_DIV_EXPR:
7156 case FLOOR_DIV_EXPR:
7157 case CEIL_DIV_EXPR:
7158 case ROUND_DIV_EXPR:
7159 case EXACT_DIV_EXPR:
7160 preexpand_calls (exp);
7161 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7162 subtarget = 0;
7163 /* Possible optimization: compute the dividend with EXPAND_SUM
7164 then if the divisor is constant can optimize the case
7165 where some terms of the dividend have coeffs divisible by it. */
7166 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7167 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7168 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7170 case RDIV_EXPR:
7171 this_optab = flodiv_optab;
7172 goto binop;
7174 case TRUNC_MOD_EXPR:
7175 case FLOOR_MOD_EXPR:
7176 case CEIL_MOD_EXPR:
7177 case ROUND_MOD_EXPR:
7178 preexpand_calls (exp);
7179 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7180 subtarget = 0;
7181 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7182 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7183 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7185 case FIX_ROUND_EXPR:
7186 case FIX_FLOOR_EXPR:
7187 case FIX_CEIL_EXPR:
7188 abort (); /* Not used for C. */
7190 case FIX_TRUNC_EXPR:
7191 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7192 if (target == 0)
7193 target = gen_reg_rtx (mode);
7194 expand_fix (target, op0, unsignedp);
7195 return target;
7197 case FLOAT_EXPR:
7198 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7199 if (target == 0)
7200 target = gen_reg_rtx (mode);
7201 /* expand_float can't figure out what to do if FROM has VOIDmode.
7202 So give it the correct mode. With -O, cse will optimize this. */
7203 if (GET_MODE (op0) == VOIDmode)
7204 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7205 op0);
7206 expand_float (target, op0,
7207 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7208 return target;
7210 case NEGATE_EXPR:
7211 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7212 temp = expand_unop (mode, neg_optab, op0, target, 0);
7213 if (temp == 0)
7214 abort ();
7215 return temp;
7217 case ABS_EXPR:
7218 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7220 /* Handle complex values specially. */
7221 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7222 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7223 return expand_complex_abs (mode, op0, target, unsignedp);
7225 /* Unsigned abs is simply the operand. Testing here means we don't
7226 risk generating incorrect code below. */
7227 if (TREE_UNSIGNED (type))
7228 return op0;
7230 return expand_abs (mode, op0, target,
7231 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7233 case MAX_EXPR:
7234 case MIN_EXPR:
7235 target = original_target;
7236 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7237 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7238 || GET_MODE (target) != mode
7239 || (GET_CODE (target) == REG
7240 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7241 target = gen_reg_rtx (mode);
7242 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7243 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7245 /* First try to do it with a special MIN or MAX instruction.
7246 If that does not win, use a conditional jump to select the proper
7247 value. */
7248 this_optab = (TREE_UNSIGNED (type)
7249 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7250 : (code == MIN_EXPR ? smin_optab : smax_optab));
7252 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7253 OPTAB_WIDEN);
7254 if (temp != 0)
7255 return temp;
7257 /* At this point, a MEM target is no longer useful; we will get better
7258 code without it. */
7260 if (GET_CODE (target) == MEM)
7261 target = gen_reg_rtx (mode);
7263 if (target != op0)
7264 emit_move_insn (target, op0);
7266 op0 = gen_label_rtx ();
7268 /* If this mode is an integer too wide to compare properly,
7269 compare word by word. Rely on cse to optimize constant cases. */
7270 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7272 if (code == MAX_EXPR)
7273 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7274 target, op1, NULL_RTX, op0);
7275 else
7276 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7277 op1, target, NULL_RTX, op0);
7278 emit_move_insn (target, op1);
7280 else
7282 if (code == MAX_EXPR)
7283 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7284 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7285 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7286 else
7287 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7288 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7289 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7290 if (temp == const0_rtx)
7291 emit_move_insn (target, op1);
7292 else if (temp != const_true_rtx)
7294 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7295 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7296 else
7297 abort ();
7298 emit_move_insn (target, op1);
7301 emit_label (op0);
7302 return target;
7304 case BIT_NOT_EXPR:
7305 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7306 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7307 if (temp == 0)
7308 abort ();
7309 return temp;
7311 case FFS_EXPR:
7312 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7313 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7314 if (temp == 0)
7315 abort ();
7316 return temp;
7318 /* ??? Can optimize bitwise operations with one arg constant.
7319 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7320 and (a bitwise1 b) bitwise2 b (etc)
7321 but that is probably not worth while. */
7323 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7324 boolean values when we want in all cases to compute both of them. In
7325 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7326 as actual zero-or-1 values and then bitwise anding. In cases where
7327 there cannot be any side effects, better code would be made by
7328 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7329 how to recognize those cases. */
7331 case TRUTH_AND_EXPR:
7332 case BIT_AND_EXPR:
7333 this_optab = and_optab;
7334 goto binop;
7336 case TRUTH_OR_EXPR:
7337 case BIT_IOR_EXPR:
7338 this_optab = ior_optab;
7339 goto binop;
7341 case TRUTH_XOR_EXPR:
7342 case BIT_XOR_EXPR:
7343 this_optab = xor_optab;
7344 goto binop;
7346 case LSHIFT_EXPR:
7347 case RSHIFT_EXPR:
7348 case LROTATE_EXPR:
7349 case RROTATE_EXPR:
7350 preexpand_calls (exp);
7351 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7352 subtarget = 0;
7353 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7354 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7355 unsignedp);
7357 /* Could determine the answer when only additive constants differ. Also,
7358 the addition of one can be handled by changing the condition. */
7359 case LT_EXPR:
7360 case LE_EXPR:
7361 case GT_EXPR:
7362 case GE_EXPR:
7363 case EQ_EXPR:
7364 case NE_EXPR:
7365 preexpand_calls (exp);
7366 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7367 if (temp != 0)
7368 return temp;
7370 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7371 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7372 && original_target
7373 && GET_CODE (original_target) == REG
7374 && (GET_MODE (original_target)
7375 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7377 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7378 VOIDmode, 0);
7380 if (temp != original_target)
7381 temp = copy_to_reg (temp);
7383 op1 = gen_label_rtx ();
7384 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7385 GET_MODE (temp), unsignedp, 0, op1);
7386 emit_move_insn (temp, const1_rtx);
7387 emit_label (op1);
7388 return temp;
7391 /* If no set-flag instruction, must generate a conditional
7392 store into a temporary variable. Drop through
7393 and handle this like && and ||. */
7395 case TRUTH_ANDIF_EXPR:
7396 case TRUTH_ORIF_EXPR:
7397 if (! ignore
7398 && (target == 0 || ! safe_from_p (target, exp, 1)
7399 /* Make sure we don't have a hard reg (such as function's return
7400 value) live across basic blocks, if not optimizing. */
7401 || (!optimize && GET_CODE (target) == REG
7402 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7403 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7405 if (target)
7406 emit_clr_insn (target);
7408 op1 = gen_label_rtx ();
7409 jumpifnot (exp, op1);
7411 if (target)
7412 emit_0_to_1_insn (target);
7414 emit_label (op1);
7415 return ignore ? const0_rtx : target;
7417 case TRUTH_NOT_EXPR:
7418 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7419 /* The parser is careful to generate TRUTH_NOT_EXPR
7420 only with operands that are always zero or one. */
7421 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7422 target, 1, OPTAB_LIB_WIDEN);
7423 if (temp == 0)
7424 abort ();
7425 return temp;
7427 case COMPOUND_EXPR:
7428 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7429 emit_queue ();
7430 return expand_expr (TREE_OPERAND (exp, 1),
7431 (ignore ? const0_rtx : target),
7432 VOIDmode, 0);
7434 case COND_EXPR:
7435 /* If we would have a "singleton" (see below) were it not for a
7436 conversion in each arm, bring that conversion back out. */
7437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7438 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7439 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7440 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7442 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7443 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7445 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7446 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7447 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7448 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7449 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7450 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7451 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7452 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7453 return expand_expr (build1 (NOP_EXPR, type,
7454 build (COND_EXPR, TREE_TYPE (true),
7455 TREE_OPERAND (exp, 0),
7456 true, false)),
7457 target, tmode, modifier);
7461 /* Note that COND_EXPRs whose type is a structure or union
7462 are required to be constructed to contain assignments of
7463 a temporary variable, so that we can evaluate them here
7464 for side effect only. If type is void, we must do likewise. */
7466 /* If an arm of the branch requires a cleanup,
7467 only that cleanup is performed. */
7469 tree singleton = 0;
7470 tree binary_op = 0, unary_op = 0;
7472 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7473 convert it to our mode, if necessary. */
7474 if (integer_onep (TREE_OPERAND (exp, 1))
7475 && integer_zerop (TREE_OPERAND (exp, 2))
7476 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7478 if (ignore)
7480 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7481 ro_modifier);
7482 return const0_rtx;
7485 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7486 if (GET_MODE (op0) == mode)
7487 return op0;
7489 if (target == 0)
7490 target = gen_reg_rtx (mode);
7491 convert_move (target, op0, unsignedp);
7492 return target;
7495 /* Check for X ? A + B : A. If we have this, we can copy A to the
7496 output and conditionally add B. Similarly for unary operations.
7497 Don't do this if X has side-effects because those side effects
7498 might affect A or B and the "?" operation is a sequence point in
7499 ANSI. (operand_equal_p tests for side effects.) */
7501 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7502 && operand_equal_p (TREE_OPERAND (exp, 2),
7503 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7504 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7505 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7506 && operand_equal_p (TREE_OPERAND (exp, 1),
7507 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7508 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7509 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7510 && operand_equal_p (TREE_OPERAND (exp, 2),
7511 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7512 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7513 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7514 && operand_equal_p (TREE_OPERAND (exp, 1),
7515 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7516 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7518 /* If we are not to produce a result, we have no target. Otherwise,
7519 if a target was specified use it; it will not be used as an
7520 intermediate target unless it is safe. If no target, use a
7521 temporary. */
7523 if (ignore)
7524 temp = 0;
7525 else if (original_target
7526 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7527 || (singleton && GET_CODE (original_target) == REG
7528 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7529 && original_target == var_rtx (singleton)))
7530 && GET_MODE (original_target) == mode
7531 #ifdef HAVE_conditional_move
7532 && (! can_conditionally_move_p (mode)
7533 || GET_CODE (original_target) == REG
7534 || TREE_ADDRESSABLE (type))
7535 #endif
7536 && ! (GET_CODE (original_target) == MEM
7537 && MEM_VOLATILE_P (original_target)))
7538 temp = original_target;
7539 else if (TREE_ADDRESSABLE (type))
7540 abort ();
7541 else
7542 temp = assign_temp (type, 0, 0, 1);
7544 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7545 do the test of X as a store-flag operation, do this as
7546 A + ((X != 0) << log C). Similarly for other simple binary
7547 operators. Only do for C == 1 if BRANCH_COST is low. */
7548 if (temp && singleton && binary_op
7549 && (TREE_CODE (binary_op) == PLUS_EXPR
7550 || TREE_CODE (binary_op) == MINUS_EXPR
7551 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7552 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7553 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7554 : integer_onep (TREE_OPERAND (binary_op, 1)))
7555 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7557 rtx result;
7558 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7559 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7560 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7561 : xor_optab);
7563 /* If we had X ? A : A + 1, do this as A + (X == 0).
7565 We have to invert the truth value here and then put it
7566 back later if do_store_flag fails. We cannot simply copy
7567 TREE_OPERAND (exp, 0) to another variable and modify that
7568 because invert_truthvalue can modify the tree pointed to
7569 by its argument. */
7570 if (singleton == TREE_OPERAND (exp, 1))
7571 TREE_OPERAND (exp, 0)
7572 = invert_truthvalue (TREE_OPERAND (exp, 0));
7574 result = do_store_flag (TREE_OPERAND (exp, 0),
7575 (safe_from_p (temp, singleton, 1)
7576 ? temp : NULL_RTX),
7577 mode, BRANCH_COST <= 1);
7579 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7580 result = expand_shift (LSHIFT_EXPR, mode, result,
7581 build_int_2 (tree_log2
7582 (TREE_OPERAND
7583 (binary_op, 1)),
7585 (safe_from_p (temp, singleton, 1)
7586 ? temp : NULL_RTX), 0);
7588 if (result)
7590 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7591 return expand_binop (mode, boptab, op1, result, temp,
7592 unsignedp, OPTAB_LIB_WIDEN);
7594 else if (singleton == TREE_OPERAND (exp, 1))
7595 TREE_OPERAND (exp, 0)
7596 = invert_truthvalue (TREE_OPERAND (exp, 0));
7599 do_pending_stack_adjust ();
7600 NO_DEFER_POP;
7601 op0 = gen_label_rtx ();
7603 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7605 if (temp != 0)
7607 /* If the target conflicts with the other operand of the
7608 binary op, we can't use it. Also, we can't use the target
7609 if it is a hard register, because evaluating the condition
7610 might clobber it. */
7611 if ((binary_op
7612 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7613 || (GET_CODE (temp) == REG
7614 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7615 temp = gen_reg_rtx (mode);
7616 store_expr (singleton, temp, 0);
7618 else
7619 expand_expr (singleton,
7620 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7621 if (singleton == TREE_OPERAND (exp, 1))
7622 jumpif (TREE_OPERAND (exp, 0), op0);
7623 else
7624 jumpifnot (TREE_OPERAND (exp, 0), op0);
7626 start_cleanup_deferral ();
7627 if (binary_op && temp == 0)
7628 /* Just touch the other operand. */
7629 expand_expr (TREE_OPERAND (binary_op, 1),
7630 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7631 else if (binary_op)
7632 store_expr (build (TREE_CODE (binary_op), type,
7633 make_tree (type, temp),
7634 TREE_OPERAND (binary_op, 1)),
7635 temp, 0);
7636 else
7637 store_expr (build1 (TREE_CODE (unary_op), type,
7638 make_tree (type, temp)),
7639 temp, 0);
7640 op1 = op0;
7642 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7643 comparison operator. If we have one of these cases, set the
7644 output to A, branch on A (cse will merge these two references),
7645 then set the output to FOO. */
7646 else if (temp
7647 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7648 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7649 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7650 TREE_OPERAND (exp, 1), 0)
7651 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7652 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7653 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7655 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7656 temp = gen_reg_rtx (mode);
7657 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7658 jumpif (TREE_OPERAND (exp, 0), op0);
7660 start_cleanup_deferral ();
7661 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7662 op1 = op0;
7664 else if (temp
7665 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7666 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7667 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7668 TREE_OPERAND (exp, 2), 0)
7669 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7670 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7671 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7673 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7674 temp = gen_reg_rtx (mode);
7675 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7676 jumpifnot (TREE_OPERAND (exp, 0), op0);
7678 start_cleanup_deferral ();
7679 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7680 op1 = op0;
7682 else
7684 op1 = gen_label_rtx ();
7685 jumpifnot (TREE_OPERAND (exp, 0), op0);
7687 start_cleanup_deferral ();
7688 if (temp != 0)
7689 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7690 else
7691 expand_expr (TREE_OPERAND (exp, 1),
7692 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7693 end_cleanup_deferral ();
7694 emit_queue ();
7695 emit_jump_insn (gen_jump (op1));
7696 emit_barrier ();
7697 emit_label (op0);
7698 start_cleanup_deferral ();
7699 if (temp != 0)
7700 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7701 else
7702 expand_expr (TREE_OPERAND (exp, 2),
7703 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7706 end_cleanup_deferral ();
7708 emit_queue ();
7709 emit_label (op1);
7710 OK_DEFER_POP;
7712 return temp;
7715 case TARGET_EXPR:
7717 /* Something needs to be initialized, but we didn't know
7718 where that thing was when building the tree. For example,
7719 it could be the return value of a function, or a parameter
7720 to a function which lays down in the stack, or a temporary
7721 variable which must be passed by reference.
7723 We guarantee that the expression will either be constructed
7724 or copied into our original target. */
7726 tree slot = TREE_OPERAND (exp, 0);
7727 tree cleanups = NULL_TREE;
7728 tree exp1;
7730 if (TREE_CODE (slot) != VAR_DECL)
7731 abort ();
7733 if (! ignore)
7734 target = original_target;
7736 if (target == 0)
7738 if (DECL_RTL (slot) != 0)
7740 target = DECL_RTL (slot);
7741 /* If we have already expanded the slot, so don't do
7742 it again. (mrs) */
7743 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7744 return target;
7746 else
7748 target = assign_temp (type, 2, 0, 1);
7749 /* All temp slots at this level must not conflict. */
7750 preserve_temp_slots (target);
7751 DECL_RTL (slot) = target;
7752 if (TREE_ADDRESSABLE (slot))
7754 TREE_ADDRESSABLE (slot) = 0;
7755 mark_addressable (slot);
7758 /* Since SLOT is not known to the called function
7759 to belong to its stack frame, we must build an explicit
7760 cleanup. This case occurs when we must build up a reference
7761 to pass the reference as an argument. In this case,
7762 it is very likely that such a reference need not be
7763 built here. */
7765 if (TREE_OPERAND (exp, 2) == 0)
7766 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7767 cleanups = TREE_OPERAND (exp, 2);
7770 else
7772 /* This case does occur, when expanding a parameter which
7773 needs to be constructed on the stack. The target
7774 is the actual stack address that we want to initialize.
7775 The function we call will perform the cleanup in this case. */
7777 /* If we have already assigned it space, use that space,
7778 not target that we were passed in, as our target
7779 parameter is only a hint. */
7780 if (DECL_RTL (slot) != 0)
7782 target = DECL_RTL (slot);
7783 /* If we have already expanded the slot, so don't do
7784 it again. (mrs) */
7785 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7786 return target;
7788 else
7790 DECL_RTL (slot) = target;
7791 /* If we must have an addressable slot, then make sure that
7792 the RTL that we just stored in slot is OK. */
7793 if (TREE_ADDRESSABLE (slot))
7795 TREE_ADDRESSABLE (slot) = 0;
7796 mark_addressable (slot);
7801 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7802 /* Mark it as expanded. */
7803 TREE_OPERAND (exp, 1) = NULL_TREE;
7805 TREE_USED (slot) = 1;
7806 store_expr (exp1, target, 0);
7808 expand_decl_cleanup (NULL_TREE, cleanups);
7810 return target;
7813 case INIT_EXPR:
7815 tree lhs = TREE_OPERAND (exp, 0);
7816 tree rhs = TREE_OPERAND (exp, 1);
7817 tree noncopied_parts = 0;
7818 tree lhs_type = TREE_TYPE (lhs);
7820 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7821 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7822 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7823 TYPE_NONCOPIED_PARTS (lhs_type));
7824 while (noncopied_parts != 0)
7826 expand_assignment (TREE_VALUE (noncopied_parts),
7827 TREE_PURPOSE (noncopied_parts), 0, 0);
7828 noncopied_parts = TREE_CHAIN (noncopied_parts);
7830 return temp;
7833 case MODIFY_EXPR:
7835 /* If lhs is complex, expand calls in rhs before computing it.
7836 That's so we don't compute a pointer and save it over a call.
7837 If lhs is simple, compute it first so we can give it as a
7838 target if the rhs is just a call. This avoids an extra temp and copy
7839 and that prevents a partial-subsumption which makes bad code.
7840 Actually we could treat component_ref's of vars like vars. */
7842 tree lhs = TREE_OPERAND (exp, 0);
7843 tree rhs = TREE_OPERAND (exp, 1);
7844 tree noncopied_parts = 0;
7845 tree lhs_type = TREE_TYPE (lhs);
7847 temp = 0;
7849 if (TREE_CODE (lhs) != VAR_DECL
7850 && TREE_CODE (lhs) != RESULT_DECL
7851 && TREE_CODE (lhs) != PARM_DECL
7852 && ! (TREE_CODE (lhs) == INDIRECT_REF
7853 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7854 preexpand_calls (exp);
7856 /* Check for |= or &= of a bitfield of size one into another bitfield
7857 of size 1. In this case, (unless we need the result of the
7858 assignment) we can do this more efficiently with a
7859 test followed by an assignment, if necessary.
7861 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7862 things change so we do, this code should be enhanced to
7863 support it. */
7864 if (ignore
7865 && TREE_CODE (lhs) == COMPONENT_REF
7866 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7867 || TREE_CODE (rhs) == BIT_AND_EXPR)
7868 && TREE_OPERAND (rhs, 0) == lhs
7869 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7870 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7871 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7873 rtx label = gen_label_rtx ();
7875 do_jump (TREE_OPERAND (rhs, 1),
7876 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7877 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7878 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7879 (TREE_CODE (rhs) == BIT_IOR_EXPR
7880 ? integer_one_node
7881 : integer_zero_node)),
7882 0, 0);
7883 do_pending_stack_adjust ();
7884 emit_label (label);
7885 return const0_rtx;
7888 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7889 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7890 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7891 TYPE_NONCOPIED_PARTS (lhs_type));
7893 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7894 while (noncopied_parts != 0)
7896 expand_assignment (TREE_PURPOSE (noncopied_parts),
7897 TREE_VALUE (noncopied_parts), 0, 0);
7898 noncopied_parts = TREE_CHAIN (noncopied_parts);
7900 return temp;
7903 case RETURN_EXPR:
7904 if (!TREE_OPERAND (exp, 0))
7905 expand_null_return ();
7906 else
7907 expand_return (TREE_OPERAND (exp, 0));
7908 return const0_rtx;
7910 case PREINCREMENT_EXPR:
7911 case PREDECREMENT_EXPR:
7912 return expand_increment (exp, 0, ignore);
7914 case POSTINCREMENT_EXPR:
7915 case POSTDECREMENT_EXPR:
7916 /* Faster to treat as pre-increment if result is not used. */
7917 return expand_increment (exp, ! ignore, ignore);
7919 case ADDR_EXPR:
7920 /* If nonzero, TEMP will be set to the address of something that might
7921 be a MEM corresponding to a stack slot. */
7922 temp = 0;
7924 /* Are we taking the address of a nested function? */
7925 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7926 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7927 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7928 && ! TREE_STATIC (exp))
7930 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7931 op0 = force_operand (op0, target);
7933 /* If we are taking the address of something erroneous, just
7934 return a zero. */
7935 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7936 return const0_rtx;
7937 else
7939 /* We make sure to pass const0_rtx down if we came in with
7940 ignore set, to avoid doing the cleanups twice for something. */
7941 op0 = expand_expr (TREE_OPERAND (exp, 0),
7942 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7943 (modifier == EXPAND_INITIALIZER
7944 ? modifier : EXPAND_CONST_ADDRESS));
7946 /* If we are going to ignore the result, OP0 will have been set
7947 to const0_rtx, so just return it. Don't get confused and
7948 think we are taking the address of the constant. */
7949 if (ignore)
7950 return op0;
7952 op0 = protect_from_queue (op0, 0);
7954 /* We would like the object in memory. If it is a constant,
7955 we can have it be statically allocated into memory. For
7956 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7957 memory and store the value into it. */
7959 if (CONSTANT_P (op0))
7960 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7961 op0);
7962 else if (GET_CODE (op0) == MEM)
7964 mark_temp_addr_taken (op0);
7965 temp = XEXP (op0, 0);
7968 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7969 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7971 /* If this object is in a register, it must be not
7972 be BLKmode. */
7973 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7974 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7976 mark_temp_addr_taken (memloc);
7977 emit_move_insn (memloc, op0);
7978 op0 = memloc;
7981 if (GET_CODE (op0) != MEM)
7982 abort ();
7984 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7986 temp = XEXP (op0, 0);
7987 #ifdef POINTERS_EXTEND_UNSIGNED
7988 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7989 && mode == ptr_mode)
7990 temp = convert_memory_address (ptr_mode, temp);
7991 #endif
7992 return temp;
7995 op0 = force_operand (XEXP (op0, 0), target);
7998 if (flag_force_addr && GET_CODE (op0) != REG)
7999 op0 = force_reg (Pmode, op0);
8001 if (GET_CODE (op0) == REG
8002 && ! REG_USERVAR_P (op0))
8003 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8005 /* If we might have had a temp slot, add an equivalent address
8006 for it. */
8007 if (temp != 0)
8008 update_temp_slot_address (temp, op0);
8010 #ifdef POINTERS_EXTEND_UNSIGNED
8011 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8012 && mode == ptr_mode)
8013 op0 = convert_memory_address (ptr_mode, op0);
8014 #endif
8016 return op0;
8018 case ENTRY_VALUE_EXPR:
8019 abort ();
8021 /* COMPLEX type for Extended Pascal & Fortran */
8022 case COMPLEX_EXPR:
8024 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8025 rtx insns;
8027 /* Get the rtx code of the operands. */
8028 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8029 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8031 if (! target)
8032 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8034 start_sequence ();
8036 /* Move the real (op0) and imaginary (op1) parts to their location. */
8037 emit_move_insn (gen_realpart (mode, target), op0);
8038 emit_move_insn (gen_imagpart (mode, target), op1);
8040 insns = get_insns ();
8041 end_sequence ();
8043 /* Complex construction should appear as a single unit. */
8044 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8045 each with a separate pseudo as destination.
8046 It's not correct for flow to treat them as a unit. */
8047 if (GET_CODE (target) != CONCAT)
8048 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8049 else
8050 emit_insns (insns);
8052 return target;
8055 case REALPART_EXPR:
8056 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8057 return gen_realpart (mode, op0);
8059 case IMAGPART_EXPR:
8060 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8061 return gen_imagpart (mode, op0);
8063 case CONJ_EXPR:
8065 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8066 rtx imag_t;
8067 rtx insns;
8069 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8071 if (! target)
8072 target = gen_reg_rtx (mode);
8074 start_sequence ();
8076 /* Store the realpart and the negated imagpart to target. */
8077 emit_move_insn (gen_realpart (partmode, target),
8078 gen_realpart (partmode, op0));
8080 imag_t = gen_imagpart (partmode, target);
8081 temp = expand_unop (partmode, neg_optab,
8082 gen_imagpart (partmode, op0), imag_t, 0);
8083 if (temp != imag_t)
8084 emit_move_insn (imag_t, temp);
8086 insns = get_insns ();
8087 end_sequence ();
8089 /* Conjugate should appear as a single unit
8090 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8091 each with a separate pseudo as destination.
8092 It's not correct for flow to treat them as a unit. */
8093 if (GET_CODE (target) != CONCAT)
8094 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8095 else
8096 emit_insns (insns);
8098 return target;
8101 case TRY_CATCH_EXPR:
8103 tree handler = TREE_OPERAND (exp, 1);
8105 expand_eh_region_start ();
8107 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8109 expand_eh_region_end (handler);
8111 return op0;
8114 case TRY_FINALLY_EXPR:
8116 tree try_block = TREE_OPERAND (exp, 0);
8117 tree finally_block = TREE_OPERAND (exp, 1);
8118 rtx finally_label = gen_label_rtx ();
8119 rtx done_label = gen_label_rtx ();
8120 rtx return_link = gen_reg_rtx (Pmode);
8121 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8122 (tree) finally_label, (tree) return_link);
8123 TREE_SIDE_EFFECTS (cleanup) = 1;
8125 /* Start a new binding layer that will keep track of all cleanup
8126 actions to be performed. */
8127 expand_start_bindings (0);
8129 target_temp_slot_level = temp_slot_level;
8131 expand_decl_cleanup (NULL_TREE, cleanup);
8132 op0 = expand_expr (try_block, target, tmode, modifier);
8134 preserve_temp_slots (op0);
8135 expand_end_bindings (NULL_TREE, 0, 0);
8136 emit_jump (done_label);
8137 emit_label (finally_label);
8138 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8139 emit_indirect_jump (return_link);
8140 emit_label (done_label);
8141 return op0;
8144 case GOTO_SUBROUTINE_EXPR:
8146 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8147 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8148 rtx return_address = gen_label_rtx ();
8149 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8150 emit_jump (subr);
8151 emit_label (return_address);
8152 return const0_rtx;
8155 case POPDCC_EXPR:
8157 rtx dcc = get_dynamic_cleanup_chain ();
8158 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8159 return const0_rtx;
8162 case POPDHC_EXPR:
8164 rtx dhc = get_dynamic_handler_chain ();
8165 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8166 return const0_rtx;
8169 default:
8170 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8173 /* Here to do an ordinary binary operator, generating an instruction
8174 from the optab already placed in `this_optab'. */
8175 binop:
8176 preexpand_calls (exp);
8177 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8178 subtarget = 0;
8179 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8180 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8181 binop2:
8182 temp = expand_binop (mode, this_optab, op0, op1, target,
8183 unsignedp, OPTAB_LIB_WIDEN);
8184 if (temp == 0)
8185 abort ();
8186 return temp;
8191 /* Return the alignment in bits of EXP, a pointer valued expression.
8192 But don't return more than MAX_ALIGN no matter what.
8193 The alignment returned is, by default, the alignment of the thing that
8194 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8196 Otherwise, look at the expression to see if we can do better, i.e., if the
8197 expression is actually pointing at an object whose alignment is tighter. */
8199 static int
8200 get_pointer_alignment (exp, max_align)
8201 tree exp;
8202 unsigned max_align;
8204 unsigned align, inner;
8206 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8207 return 0;
8209 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8210 align = MIN (align, max_align);
8212 while (1)
8214 switch (TREE_CODE (exp))
8216 case NOP_EXPR:
8217 case CONVERT_EXPR:
8218 case NON_LVALUE_EXPR:
8219 exp = TREE_OPERAND (exp, 0);
8220 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8221 return align;
8222 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8223 align = MIN (inner, max_align);
8224 break;
8226 case PLUS_EXPR:
8227 /* If sum of pointer + int, restrict our maximum alignment to that
8228 imposed by the integer. If not, we can't do any better than
8229 ALIGN. */
8230 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8231 return align;
8233 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8234 & (max_align - 1))
8235 != 0)
8236 max_align >>= 1;
8238 exp = TREE_OPERAND (exp, 0);
8239 break;
8241 case ADDR_EXPR:
8242 /* See what we are pointing at and look at its alignment. */
8243 exp = TREE_OPERAND (exp, 0);
8244 if (TREE_CODE (exp) == FUNCTION_DECL)
8245 align = FUNCTION_BOUNDARY;
8246 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8247 align = DECL_ALIGN (exp);
8248 #ifdef CONSTANT_ALIGNMENT
8249 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8250 align = CONSTANT_ALIGNMENT (exp, align);
8251 #endif
8252 return MIN (align, max_align);
8254 default:
8255 return align;
8260 /* Return the tree node and offset if a given argument corresponds to
8261 a string constant. */
8263 static tree
8264 string_constant (arg, ptr_offset)
8265 tree arg;
8266 tree *ptr_offset;
8268 STRIP_NOPS (arg);
8270 if (TREE_CODE (arg) == ADDR_EXPR
8271 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8273 *ptr_offset = integer_zero_node;
8274 return TREE_OPERAND (arg, 0);
8276 else if (TREE_CODE (arg) == PLUS_EXPR)
8278 tree arg0 = TREE_OPERAND (arg, 0);
8279 tree arg1 = TREE_OPERAND (arg, 1);
8281 STRIP_NOPS (arg0);
8282 STRIP_NOPS (arg1);
8284 if (TREE_CODE (arg0) == ADDR_EXPR
8285 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8287 *ptr_offset = arg1;
8288 return TREE_OPERAND (arg0, 0);
8290 else if (TREE_CODE (arg1) == ADDR_EXPR
8291 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8293 *ptr_offset = arg0;
8294 return TREE_OPERAND (arg1, 0);
8298 return 0;
8301 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8302 way, because it could contain a zero byte in the middle.
8303 TREE_STRING_LENGTH is the size of the character array, not the string.
8305 Unfortunately, string_constant can't access the values of const char
8306 arrays with initializers, so neither can we do so here. */
8308 static tree
8309 c_strlen (src)
8310 tree src;
8312 tree offset_node;
8313 int offset, max;
8314 char *ptr;
8316 src = string_constant (src, &offset_node);
8317 if (src == 0)
8318 return 0;
8319 max = TREE_STRING_LENGTH (src);
8320 ptr = TREE_STRING_POINTER (src);
8321 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8323 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8324 compute the offset to the following null if we don't know where to
8325 start searching for it. */
8326 int i;
8327 for (i = 0; i < max; i++)
8328 if (ptr[i] == 0)
8329 return 0;
8330 /* We don't know the starting offset, but we do know that the string
8331 has no internal zero bytes. We can assume that the offset falls
8332 within the bounds of the string; otherwise, the programmer deserves
8333 what he gets. Subtract the offset from the length of the string,
8334 and return that. */
8335 /* This would perhaps not be valid if we were dealing with named
8336 arrays in addition to literal string constants. */
8337 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8340 /* We have a known offset into the string. Start searching there for
8341 a null character. */
8342 if (offset_node == 0)
8343 offset = 0;
8344 else
8346 /* Did we get a long long offset? If so, punt. */
8347 if (TREE_INT_CST_HIGH (offset_node) != 0)
8348 return 0;
8349 offset = TREE_INT_CST_LOW (offset_node);
8351 /* If the offset is known to be out of bounds, warn, and call strlen at
8352 runtime. */
8353 if (offset < 0 || offset > max)
8355 warning ("offset outside bounds of constant string");
8356 return 0;
8358 /* Use strlen to search for the first zero byte. Since any strings
8359 constructed with build_string will have nulls appended, we win even
8360 if we get handed something like (char[4])"abcd".
8362 Since OFFSET is our starting index into the string, no further
8363 calculation is needed. */
8364 return size_int (strlen (ptr + offset));
8368 expand_builtin_return_addr (fndecl_code, count, tem)
8369 enum built_in_function fndecl_code;
8370 int count;
8371 rtx tem;
8373 int i;
8375 /* Some machines need special handling before we can access
8376 arbitrary frames. For example, on the sparc, we must first flush
8377 all register windows to the stack. */
8378 #ifdef SETUP_FRAME_ADDRESSES
8379 if (count > 0)
8380 SETUP_FRAME_ADDRESSES ();
8381 #endif
8383 /* On the sparc, the return address is not in the frame, it is in a
8384 register. There is no way to access it off of the current frame
8385 pointer, but it can be accessed off the previous frame pointer by
8386 reading the value from the register window save area. */
8387 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8388 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8389 count--;
8390 #endif
8392 /* Scan back COUNT frames to the specified frame. */
8393 for (i = 0; i < count; i++)
8395 /* Assume the dynamic chain pointer is in the word that the
8396 frame address points to, unless otherwise specified. */
8397 #ifdef DYNAMIC_CHAIN_ADDRESS
8398 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8399 #endif
8400 tem = memory_address (Pmode, tem);
8401 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8404 /* For __builtin_frame_address, return what we've got. */
8405 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8406 return tem;
8408 /* For __builtin_return_address, Get the return address from that
8409 frame. */
8410 #ifdef RETURN_ADDR_RTX
8411 tem = RETURN_ADDR_RTX (count, tem);
8412 #else
8413 tem = memory_address (Pmode,
8414 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8415 tem = gen_rtx_MEM (Pmode, tem);
8416 #endif
8417 return tem;
8420 /* __builtin_setjmp is passed a pointer to an array of five words (not
8421 all will be used on all machines). It operates similarly to the C
8422 library function of the same name, but is more efficient. Much of
8423 the code below (and for longjmp) is copied from the handling of
8424 non-local gotos.
8426 NOTE: This is intended for use by GNAT and the exception handling
8427 scheme in the compiler and will only work in the method used by
8428 them. */
8431 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8432 rtx buf_addr;
8433 rtx target;
8434 rtx first_label, next_label;
8436 rtx lab1 = gen_label_rtx ();
8437 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8438 enum machine_mode value_mode;
8439 rtx stack_save;
8441 value_mode = TYPE_MODE (integer_type_node);
8443 #ifdef POINTERS_EXTEND_UNSIGNED
8444 buf_addr = convert_memory_address (Pmode, buf_addr);
8445 #endif
8447 buf_addr = force_reg (Pmode, buf_addr);
8449 if (target == 0 || GET_CODE (target) != REG
8450 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8451 target = gen_reg_rtx (value_mode);
8453 emit_queue ();
8455 /* We store the frame pointer and the address of lab1 in the buffer
8456 and use the rest of it for the stack save area, which is
8457 machine-dependent. */
8459 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8460 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8461 #endif
8463 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8464 BUILTIN_SETJMP_FRAME_VALUE);
8465 emit_move_insn (validize_mem
8466 (gen_rtx_MEM (Pmode,
8467 plus_constant (buf_addr,
8468 GET_MODE_SIZE (Pmode)))),
8469 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1)));
8471 stack_save = gen_rtx_MEM (sa_mode,
8472 plus_constant (buf_addr,
8473 2 * GET_MODE_SIZE (Pmode)));
8474 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8476 /* If there is further processing to do, do it. */
8477 #ifdef HAVE_builtin_setjmp_setup
8478 if (HAVE_builtin_setjmp_setup)
8479 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8480 #endif
8482 /* Set TARGET to zero and branch to the first-time-through label. */
8483 emit_move_insn (target, const0_rtx);
8484 emit_jump_insn (gen_jump (first_label));
8485 emit_barrier ();
8486 emit_label (lab1);
8488 /* Tell flow about the strange goings on. Putting `lab1' on
8489 `nonlocal_goto_handler_labels' to indicates that function
8490 calls may traverse the arc back to this label. */
8492 current_function_has_nonlocal_label = 1;
8493 nonlocal_goto_handler_labels =
8494 gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels);
8496 /* Clobber the FP when we get here, so we have to make sure it's
8497 marked as used by this function. */
8498 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8500 /* Mark the static chain as clobbered here so life information
8501 doesn't get messed up for it. */
8502 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8504 /* Now put in the code to restore the frame pointer, and argument
8505 pointer, if needed. The code below is from expand_end_bindings
8506 in stmt.c; see detailed documentation there. */
8507 #ifdef HAVE_nonlocal_goto
8508 if (! HAVE_nonlocal_goto)
8509 #endif
8510 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8512 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8513 if (fixed_regs[ARG_POINTER_REGNUM])
8515 #ifdef ELIMINABLE_REGS
8516 size_t i;
8517 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8519 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8520 if (elim_regs[i].from == ARG_POINTER_REGNUM
8521 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8522 break;
8524 if (i == sizeof elim_regs / sizeof elim_regs [0])
8525 #endif
8527 /* Now restore our arg pointer from the address at which it
8528 was saved in our stack frame.
8529 If there hasn't be space allocated for it yet, make
8530 some now. */
8531 if (arg_pointer_save_area == 0)
8532 arg_pointer_save_area
8533 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8534 emit_move_insn (virtual_incoming_args_rtx,
8535 copy_to_reg (arg_pointer_save_area));
8538 #endif
8540 #ifdef HAVE_builtin_setjmp_receiver
8541 if (HAVE_builtin_setjmp_receiver)
8542 emit_insn (gen_builtin_setjmp_receiver (lab1));
8543 else
8544 #endif
8545 #ifdef HAVE_nonlocal_goto_receiver
8546 if (HAVE_nonlocal_goto_receiver)
8547 emit_insn (gen_nonlocal_goto_receiver ());
8548 else
8549 #endif
8551 ; /* Nothing */
8554 /* Set TARGET, and branch to the next-time-through label. */
8555 emit_move_insn (target, const1_rtx);
8556 emit_jump_insn (gen_jump (next_label));
8557 emit_barrier ();
8559 return target;
8562 void
8563 expand_builtin_longjmp (buf_addr, value)
8564 rtx buf_addr, value;
8566 rtx fp, lab, stack;
8567 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8569 #ifdef POINTERS_EXTEND_UNSIGNED
8570 buf_addr = convert_memory_address (Pmode, buf_addr);
8571 #endif
8572 buf_addr = force_reg (Pmode, buf_addr);
8574 /* We used to store value in static_chain_rtx, but that fails if pointers
8575 are smaller than integers. We instead require that the user must pass
8576 a second argument of 1, because that is what builtin_setjmp will
8577 return. This also makes EH slightly more efficient, since we are no
8578 longer copying around a value that we don't care about. */
8579 if (value != const1_rtx)
8580 abort ();
8582 #ifdef HAVE_builtin_longjmp
8583 if (HAVE_builtin_longjmp)
8584 emit_insn (gen_builtin_longjmp (buf_addr));
8585 else
8586 #endif
8588 fp = gen_rtx_MEM (Pmode, buf_addr);
8589 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8590 GET_MODE_SIZE (Pmode)));
8592 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8593 2 * GET_MODE_SIZE (Pmode)));
8595 /* Pick up FP, label, and SP from the block and jump. This code is
8596 from expand_goto in stmt.c; see there for detailed comments. */
8597 #if HAVE_nonlocal_goto
8598 if (HAVE_nonlocal_goto)
8599 /* We have to pass a value to the nonlocal_goto pattern that will
8600 get copied into the static_chain pointer, but it does not matter
8601 what that value is, because builtin_setjmp does not use it. */
8602 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8603 else
8604 #endif
8606 lab = copy_to_reg (lab);
8608 emit_move_insn (hard_frame_pointer_rtx, fp);
8609 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8611 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8612 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8613 emit_indirect_jump (lab);
8618 static rtx
8619 get_memory_rtx (exp)
8620 tree exp;
8622 rtx mem;
8623 int is_aggregate;
8625 mem = gen_rtx_MEM (BLKmode,
8626 memory_address (BLKmode,
8627 expand_expr (exp, NULL_RTX,
8628 ptr_mode, EXPAND_SUM)));
8630 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8632 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8633 if the value is the address of a structure or if the expression is
8634 cast to a pointer to structure type. */
8635 is_aggregate = 0;
8637 while (TREE_CODE (exp) == NOP_EXPR)
8639 tree cast_type = TREE_TYPE (exp);
8640 if (TREE_CODE (cast_type) == POINTER_TYPE
8641 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8643 is_aggregate = 1;
8644 break;
8646 exp = TREE_OPERAND (exp, 0);
8649 if (is_aggregate == 0)
8651 tree type;
8653 if (TREE_CODE (exp) == ADDR_EXPR)
8654 /* If this is the address of an object, check whether the
8655 object is an array. */
8656 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8657 else
8658 type = TREE_TYPE (TREE_TYPE (exp));
8659 is_aggregate = AGGREGATE_TYPE_P (type);
8662 MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8663 return mem;
8667 /* Expand an expression EXP that calls a built-in function,
8668 with result going to TARGET if that's convenient
8669 (and in mode MODE if that's convenient).
8670 SUBTARGET may be used as the target for computing one of EXP's operands.
8671 IGNORE is nonzero if the value is to be ignored. */
8673 #define CALLED_AS_BUILT_IN(NODE) \
8674 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8676 static rtx
8677 expand_builtin (exp, target, subtarget, mode, ignore)
8678 tree exp;
8679 rtx target;
8680 rtx subtarget;
8681 enum machine_mode mode;
8682 int ignore;
8684 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8685 tree arglist = TREE_OPERAND (exp, 1);
8686 rtx op0;
8687 rtx lab1, insns;
8688 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8689 optab builtin_optab;
8691 switch (DECL_FUNCTION_CODE (fndecl))
8693 case BUILT_IN_ABS:
8694 case BUILT_IN_LABS:
8695 case BUILT_IN_FABS:
8696 /* build_function_call changes these into ABS_EXPR. */
8697 abort ();
8699 case BUILT_IN_SIN:
8700 case BUILT_IN_COS:
8701 /* Treat these like sqrt, but only if the user asks for them. */
8702 if (! flag_fast_math)
8703 break;
8704 case BUILT_IN_FSQRT:
8705 /* If not optimizing, call the library function. */
8706 if (! optimize)
8707 break;
8709 if (arglist == 0
8710 /* Arg could be wrong type if user redeclared this fcn wrong. */
8711 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8712 break;
8714 /* Stabilize and compute the argument. */
8715 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8716 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8718 exp = copy_node (exp);
8719 arglist = copy_node (arglist);
8720 TREE_OPERAND (exp, 1) = arglist;
8721 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8723 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8725 /* Make a suitable register to place result in. */
8726 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8728 emit_queue ();
8729 start_sequence ();
8731 switch (DECL_FUNCTION_CODE (fndecl))
8733 case BUILT_IN_SIN:
8734 builtin_optab = sin_optab; break;
8735 case BUILT_IN_COS:
8736 builtin_optab = cos_optab; break;
8737 case BUILT_IN_FSQRT:
8738 builtin_optab = sqrt_optab; break;
8739 default:
8740 abort ();
8743 /* Compute into TARGET.
8744 Set TARGET to wherever the result comes back. */
8745 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8746 builtin_optab, op0, target, 0);
8748 /* If we were unable to expand via the builtin, stop the
8749 sequence (without outputting the insns) and break, causing
8750 a call to the library function. */
8751 if (target == 0)
8753 end_sequence ();
8754 break;
8757 /* Check the results by default. But if flag_fast_math is turned on,
8758 then assume sqrt will always be called with valid arguments. */
8760 if (flag_errno_math && ! flag_fast_math)
8762 /* Don't define the builtin FP instructions
8763 if your machine is not IEEE. */
8764 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8765 abort ();
8767 lab1 = gen_label_rtx ();
8769 /* Test the result; if it is NaN, set errno=EDOM because
8770 the argument was not in the domain. */
8771 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8772 0, 0, lab1);
8774 #ifdef TARGET_EDOM
8776 #ifdef GEN_ERRNO_RTX
8777 rtx errno_rtx = GEN_ERRNO_RTX;
8778 #else
8779 rtx errno_rtx
8780 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8781 #endif
8783 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8785 #else
8786 /* We can't set errno=EDOM directly; let the library call do it.
8787 Pop the arguments right away in case the call gets deleted. */
8788 NO_DEFER_POP;
8789 expand_call (exp, target, 0);
8790 OK_DEFER_POP;
8791 #endif
8793 emit_label (lab1);
8796 /* Output the entire sequence. */
8797 insns = get_insns ();
8798 end_sequence ();
8799 emit_insns (insns);
8801 return target;
8803 case BUILT_IN_FMOD:
8804 break;
8806 /* __builtin_apply_args returns block of memory allocated on
8807 the stack into which is stored the arg pointer, structure
8808 value address, static chain, and all the registers that might
8809 possibly be used in performing a function call. The code is
8810 moved to the start of the function so the incoming values are
8811 saved. */
8812 case BUILT_IN_APPLY_ARGS:
8813 /* Don't do __builtin_apply_args more than once in a function.
8814 Save the result of the first call and reuse it. */
8815 if (apply_args_value != 0)
8816 return apply_args_value;
8818 /* When this function is called, it means that registers must be
8819 saved on entry to this function. So we migrate the
8820 call to the first insn of this function. */
8821 rtx temp;
8822 rtx seq;
8824 start_sequence ();
8825 temp = expand_builtin_apply_args ();
8826 seq = get_insns ();
8827 end_sequence ();
8829 apply_args_value = temp;
8831 /* Put the sequence after the NOTE that starts the function.
8832 If this is inside a SEQUENCE, make the outer-level insn
8833 chain current, so the code is placed at the start of the
8834 function. */
8835 push_topmost_sequence ();
8836 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8837 pop_topmost_sequence ();
8838 return temp;
8841 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8842 FUNCTION with a copy of the parameters described by
8843 ARGUMENTS, and ARGSIZE. It returns a block of memory
8844 allocated on the stack into which is stored all the registers
8845 that might possibly be used for returning the result of a
8846 function. ARGUMENTS is the value returned by
8847 __builtin_apply_args. ARGSIZE is the number of bytes of
8848 arguments that must be copied. ??? How should this value be
8849 computed? We'll also need a safe worst case value for varargs
8850 functions. */
8851 case BUILT_IN_APPLY:
8852 if (arglist == 0
8853 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8854 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8855 || TREE_CHAIN (arglist) == 0
8856 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8857 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8858 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8859 return const0_rtx;
8860 else
8862 int i;
8863 tree t;
8864 rtx ops[3];
8866 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8867 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8869 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8872 /* __builtin_return (RESULT) causes the function to return the
8873 value described by RESULT. RESULT is address of the block of
8874 memory returned by __builtin_apply. */
8875 case BUILT_IN_RETURN:
8876 if (arglist
8877 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8878 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8879 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8880 NULL_RTX, VOIDmode, 0));
8881 return const0_rtx;
8883 case BUILT_IN_SAVEREGS:
8884 /* Don't do __builtin_saveregs more than once in a function.
8885 Save the result of the first call and reuse it. */
8886 if (saveregs_value != 0)
8887 return saveregs_value;
8889 /* When this function is called, it means that registers must be
8890 saved on entry to this function. So we migrate the
8891 call to the first insn of this function. */
8892 rtx temp;
8893 rtx seq;
8895 /* Now really call the function. `expand_call' does not call
8896 expand_builtin, so there is no danger of infinite recursion here. */
8897 start_sequence ();
8899 #ifdef EXPAND_BUILTIN_SAVEREGS
8900 /* Do whatever the machine needs done in this case. */
8901 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8902 #else
8903 /* The register where the function returns its value
8904 is likely to have something else in it, such as an argument.
8905 So preserve that register around the call. */
8907 if (value_mode != VOIDmode)
8909 rtx valreg = hard_libcall_value (value_mode);
8910 rtx saved_valreg = gen_reg_rtx (value_mode);
8912 emit_move_insn (saved_valreg, valreg);
8913 temp = expand_call (exp, target, ignore);
8914 emit_move_insn (valreg, saved_valreg);
8916 else
8917 /* Generate the call, putting the value in a pseudo. */
8918 temp = expand_call (exp, target, ignore);
8919 #endif
8921 seq = get_insns ();
8922 end_sequence ();
8924 saveregs_value = temp;
8926 /* Put the sequence after the NOTE that starts the function.
8927 If this is inside a SEQUENCE, make the outer-level insn
8928 chain current, so the code is placed at the start of the
8929 function. */
8930 push_topmost_sequence ();
8931 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8932 pop_topmost_sequence ();
8933 return temp;
8936 /* __builtin_args_info (N) returns word N of the arg space info
8937 for the current function. The number and meanings of words
8938 is controlled by the definition of CUMULATIVE_ARGS. */
8939 case BUILT_IN_ARGS_INFO:
8941 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8942 int *word_ptr = (int *) &current_function_args_info;
8943 #if 0
8944 /* These are used by the code below that is if 0'ed away */
8945 int i;
8946 tree type, elts, result;
8947 #endif
8949 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8950 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8951 __FILE__, __LINE__);
8953 if (arglist != 0)
8955 tree arg = TREE_VALUE (arglist);
8956 if (TREE_CODE (arg) != INTEGER_CST)
8957 error ("argument of `__builtin_args_info' must be constant");
8958 else
8960 int wordnum = TREE_INT_CST_LOW (arg);
8962 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8963 error ("argument of `__builtin_args_info' out of range");
8964 else
8965 return GEN_INT (word_ptr[wordnum]);
8968 else
8969 error ("missing argument in `__builtin_args_info'");
8971 return const0_rtx;
8973 #if 0
8974 for (i = 0; i < nwords; i++)
8975 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8977 type = build_array_type (integer_type_node,
8978 build_index_type (build_int_2 (nwords, 0)));
8979 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8980 TREE_CONSTANT (result) = 1;
8981 TREE_STATIC (result) = 1;
8982 result = build (INDIRECT_REF, build_pointer_type (type), result);
8983 TREE_CONSTANT (result) = 1;
8984 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8985 #endif
8988 /* Return the address of the first anonymous stack arg. */
8989 case BUILT_IN_NEXT_ARG:
8991 tree fntype = TREE_TYPE (current_function_decl);
8993 if ((TYPE_ARG_TYPES (fntype) == 0
8994 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8995 == void_type_node))
8996 && ! current_function_varargs)
8998 error ("`va_start' used in function with fixed args");
8999 return const0_rtx;
9002 if (arglist)
9004 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9005 tree arg = TREE_VALUE (arglist);
9007 /* Strip off all nops for the sake of the comparison. This
9008 is not quite the same as STRIP_NOPS. It does more.
9009 We must also strip off INDIRECT_EXPR for C++ reference
9010 parameters. */
9011 while (TREE_CODE (arg) == NOP_EXPR
9012 || TREE_CODE (arg) == CONVERT_EXPR
9013 || TREE_CODE (arg) == NON_LVALUE_EXPR
9014 || TREE_CODE (arg) == INDIRECT_REF)
9015 arg = TREE_OPERAND (arg, 0);
9016 if (arg != last_parm)
9017 warning ("second parameter of `va_start' not last named argument");
9019 else if (! current_function_varargs)
9020 /* Evidently an out of date version of <stdarg.h>; can't validate
9021 va_start's second argument, but can still work as intended. */
9022 warning ("`__builtin_next_arg' called without an argument");
9025 return expand_binop (Pmode, add_optab,
9026 current_function_internal_arg_pointer,
9027 current_function_arg_offset_rtx,
9028 NULL_RTX, 0, OPTAB_LIB_WIDEN);
9030 case BUILT_IN_CLASSIFY_TYPE:
9031 if (arglist != 0)
9033 tree type = TREE_TYPE (TREE_VALUE (arglist));
9034 enum tree_code code = TREE_CODE (type);
9035 if (code == VOID_TYPE)
9036 return GEN_INT (void_type_class);
9037 if (code == INTEGER_TYPE)
9038 return GEN_INT (integer_type_class);
9039 if (code == CHAR_TYPE)
9040 return GEN_INT (char_type_class);
9041 if (code == ENUMERAL_TYPE)
9042 return GEN_INT (enumeral_type_class);
9043 if (code == BOOLEAN_TYPE)
9044 return GEN_INT (boolean_type_class);
9045 if (code == POINTER_TYPE)
9046 return GEN_INT (pointer_type_class);
9047 if (code == REFERENCE_TYPE)
9048 return GEN_INT (reference_type_class);
9049 if (code == OFFSET_TYPE)
9050 return GEN_INT (offset_type_class);
9051 if (code == REAL_TYPE)
9052 return GEN_INT (real_type_class);
9053 if (code == COMPLEX_TYPE)
9054 return GEN_INT (complex_type_class);
9055 if (code == FUNCTION_TYPE)
9056 return GEN_INT (function_type_class);
9057 if (code == METHOD_TYPE)
9058 return GEN_INT (method_type_class);
9059 if (code == RECORD_TYPE)
9060 return GEN_INT (record_type_class);
9061 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9062 return GEN_INT (union_type_class);
9063 if (code == ARRAY_TYPE)
9065 if (TYPE_STRING_FLAG (type))
9066 return GEN_INT (string_type_class);
9067 else
9068 return GEN_INT (array_type_class);
9070 if (code == SET_TYPE)
9071 return GEN_INT (set_type_class);
9072 if (code == FILE_TYPE)
9073 return GEN_INT (file_type_class);
9074 if (code == LANG_TYPE)
9075 return GEN_INT (lang_type_class);
9077 return GEN_INT (no_type_class);
9079 case BUILT_IN_CONSTANT_P:
9080 if (arglist == 0)
9081 return const0_rtx;
9082 else
9084 tree arg = TREE_VALUE (arglist);
9085 rtx tmp;
9087 /* We return 1 for a numeric type that's known to be a constant
9088 value at compile-time or for an aggregate type that's a
9089 literal constant. */
9090 STRIP_NOPS (arg);
9092 /* If we know this is a constant, emit the constant of one. */
9093 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9094 || (TREE_CODE (arg) == CONSTRUCTOR
9095 && TREE_CONSTANT (arg))
9096 || (TREE_CODE (arg) == ADDR_EXPR
9097 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9098 return const1_rtx;
9100 /* If we aren't going to be running CSE or this expression
9101 has side effects, show we don't know it to be a constant.
9102 Likewise if it's a pointer or aggregate type since in those
9103 case we only want literals, since those are only optimized
9104 when generating RTL, not later. */
9105 if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9106 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9107 || POINTER_TYPE_P (TREE_TYPE (arg)))
9108 return const0_rtx;
9110 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9111 chance to see if it can deduce whether ARG is constant. */
9113 tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9114 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9115 return tmp;
9118 case BUILT_IN_FRAME_ADDRESS:
9119 /* The argument must be a nonnegative integer constant.
9120 It counts the number of frames to scan up the stack.
9121 The value is the address of that frame. */
9122 case BUILT_IN_RETURN_ADDRESS:
9123 /* The argument must be a nonnegative integer constant.
9124 It counts the number of frames to scan up the stack.
9125 The value is the return address saved in that frame. */
9126 if (arglist == 0)
9127 /* Warning about missing arg was already issued. */
9128 return const0_rtx;
9129 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9130 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9132 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9133 error ("invalid arg to `__builtin_frame_address'");
9134 else
9135 error ("invalid arg to `__builtin_return_address'");
9136 return const0_rtx;
9138 else
9140 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9141 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9142 hard_frame_pointer_rtx);
9144 /* Some ports cannot access arbitrary stack frames. */
9145 if (tem == NULL)
9147 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9148 warning ("unsupported arg to `__builtin_frame_address'");
9149 else
9150 warning ("unsupported arg to `__builtin_return_address'");
9151 return const0_rtx;
9154 /* For __builtin_frame_address, return what we've got. */
9155 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9156 return tem;
9158 if (GET_CODE (tem) != REG
9159 && ! CONSTANT_P (tem))
9160 tem = copy_to_mode_reg (Pmode, tem);
9161 return tem;
9164 /* Returns the address of the area where the structure is returned.
9165 0 otherwise. */
9166 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9167 if (arglist != 0
9168 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9169 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9170 return const0_rtx;
9171 else
9172 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9174 case BUILT_IN_ALLOCA:
9175 if (arglist == 0
9176 /* Arg could be non-integer if user redeclared this fcn wrong. */
9177 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9178 break;
9180 /* Compute the argument. */
9181 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9183 /* Allocate the desired space. */
9184 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9186 case BUILT_IN_FFS:
9187 /* If not optimizing, call the library function. */
9188 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9189 break;
9191 if (arglist == 0
9192 /* Arg could be non-integer if user redeclared this fcn wrong. */
9193 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9194 break;
9196 /* Compute the argument. */
9197 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9198 /* Compute ffs, into TARGET if possible.
9199 Set TARGET to wherever the result comes back. */
9200 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9201 ffs_optab, op0, target, 1);
9202 if (target == 0)
9203 abort ();
9204 return target;
9206 case BUILT_IN_STRLEN:
9207 /* If not optimizing, call the library function. */
9208 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9209 break;
9211 if (arglist == 0
9212 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9213 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9214 break;
9215 else
9217 tree src = TREE_VALUE (arglist);
9218 tree len = c_strlen (src);
9220 int align
9221 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9223 rtx result, src_rtx, char_rtx;
9224 enum machine_mode insn_mode = value_mode, char_mode;
9225 enum insn_code icode;
9227 /* If the length is known, just return it. */
9228 if (len != 0)
9229 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9231 /* If SRC is not a pointer type, don't do this operation inline. */
9232 if (align == 0)
9233 break;
9235 /* Call a function if we can't compute strlen in the right mode. */
9237 while (insn_mode != VOIDmode)
9239 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9240 if (icode != CODE_FOR_nothing)
9241 break;
9243 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9245 if (insn_mode == VOIDmode)
9246 break;
9248 /* Make a place to write the result of the instruction. */
9249 result = target;
9250 if (! (result != 0
9251 && GET_CODE (result) == REG
9252 && GET_MODE (result) == insn_mode
9253 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9254 result = gen_reg_rtx (insn_mode);
9256 /* Make sure the operands are acceptable to the predicates. */
9258 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9259 result = gen_reg_rtx (insn_mode);
9260 src_rtx = memory_address (BLKmode,
9261 expand_expr (src, NULL_RTX, ptr_mode,
9262 EXPAND_NORMAL));
9264 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9265 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9267 /* Check the string is readable and has an end. */
9268 if (current_function_check_memory_usage)
9269 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9270 src_rtx, Pmode,
9271 GEN_INT (MEMORY_USE_RO),
9272 TYPE_MODE (integer_type_node));
9274 char_rtx = const0_rtx;
9275 char_mode = insn_operand_mode[(int)icode][2];
9276 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9277 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9279 emit_insn (GEN_FCN (icode) (result,
9280 gen_rtx_MEM (BLKmode, src_rtx),
9281 char_rtx, GEN_INT (align)));
9283 /* Return the value in the proper mode for this function. */
9284 if (GET_MODE (result) == value_mode)
9285 return result;
9286 else if (target != 0)
9288 convert_move (target, result, 0);
9289 return target;
9291 else
9292 return convert_to_mode (value_mode, result, 0);
9295 case BUILT_IN_STRCPY:
9296 /* If not optimizing, call the library function. */
9297 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9298 break;
9300 if (arglist == 0
9301 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9302 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9303 || TREE_CHAIN (arglist) == 0
9304 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9305 break;
9306 else
9308 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9310 if (len == 0)
9311 break;
9313 len = size_binop (PLUS_EXPR, len, integer_one_node);
9315 chainon (arglist, build_tree_list (NULL_TREE, len));
9318 /* Drops in. */
9319 case BUILT_IN_MEMCPY:
9320 /* If not optimizing, call the library function. */
9321 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9322 break;
9324 if (arglist == 0
9325 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9326 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9327 || TREE_CHAIN (arglist) == 0
9328 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9329 != POINTER_TYPE)
9330 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9331 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9332 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9333 != INTEGER_TYPE))
9334 break;
9335 else
9337 tree dest = TREE_VALUE (arglist);
9338 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9339 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9341 int src_align
9342 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9343 int dest_align
9344 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9345 rtx dest_mem, src_mem, dest_addr, len_rtx;
9347 /* If either SRC or DEST is not a pointer type, don't do
9348 this operation in-line. */
9349 if (src_align == 0 || dest_align == 0)
9351 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9352 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9353 break;
9356 dest_mem = get_memory_rtx (dest);
9357 src_mem = get_memory_rtx (src);
9358 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9360 /* Just copy the rights of SRC to the rights of DEST. */
9361 if (current_function_check_memory_usage)
9362 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9363 XEXP (dest_mem, 0), Pmode,
9364 XEXP (src_mem, 0), Pmode,
9365 len_rtx, TYPE_MODE (sizetype));
9367 /* Copy word part most expediently. */
9368 dest_addr
9369 = emit_block_move (dest_mem, src_mem, len_rtx,
9370 MIN (src_align, dest_align));
9372 if (dest_addr == 0)
9373 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9375 return dest_addr;
9378 case BUILT_IN_MEMSET:
9379 /* If not optimizing, call the library function. */
9380 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9381 break;
9383 if (arglist == 0
9384 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9385 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9386 || TREE_CHAIN (arglist) == 0
9387 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9388 != INTEGER_TYPE)
9389 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9390 || (INTEGER_TYPE
9391 != (TREE_CODE (TREE_TYPE
9392 (TREE_VALUE
9393 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9394 break;
9395 else
9397 tree dest = TREE_VALUE (arglist);
9398 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9399 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9401 int dest_align
9402 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9403 rtx dest_mem, dest_addr, len_rtx;
9405 /* If DEST is not a pointer type, don't do this
9406 operation in-line. */
9407 if (dest_align == 0)
9408 break;
9410 /* If the arguments have side-effects, then we can only evaluate
9411 them at most once. The following code evaluates them twice if
9412 they are not constants because we break out to expand_call
9413 in that case. They can't be constants if they have side-effects
9414 so we can check for that first. Alternatively, we could call
9415 save_expr to make multiple evaluation safe. */
9416 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9417 break;
9419 /* If VAL is not 0, don't do this operation in-line. */
9420 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9421 break;
9423 /* If LEN does not expand to a constant, don't do this
9424 operation in-line. */
9425 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9426 if (GET_CODE (len_rtx) != CONST_INT)
9427 break;
9429 dest_mem = get_memory_rtx (dest);
9431 /* Just check DST is writable and mark it as readable. */
9432 if (current_function_check_memory_usage)
9433 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9434 XEXP (dest_mem, 0), Pmode,
9435 len_rtx, TYPE_MODE (sizetype),
9436 GEN_INT (MEMORY_USE_WO),
9437 TYPE_MODE (integer_type_node));
9440 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9442 if (dest_addr == 0)
9443 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9445 return dest_addr;
9448 /* These comparison functions need an instruction that returns an actual
9449 index. An ordinary compare that just sets the condition codes
9450 is not enough. */
9451 #ifdef HAVE_cmpstrsi
9452 case BUILT_IN_STRCMP:
9453 /* If not optimizing, call the library function. */
9454 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9455 break;
9457 /* If we need to check memory accesses, call the library function. */
9458 if (current_function_check_memory_usage)
9459 break;
9461 if (arglist == 0
9462 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9463 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9464 || TREE_CHAIN (arglist) == 0
9465 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9466 break;
9467 else if (!HAVE_cmpstrsi)
9468 break;
9470 tree arg1 = TREE_VALUE (arglist);
9471 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9472 tree len, len2;
9474 len = c_strlen (arg1);
9475 if (len)
9476 len = size_binop (PLUS_EXPR, integer_one_node, len);
9477 len2 = c_strlen (arg2);
9478 if (len2)
9479 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9481 /* If we don't have a constant length for the first, use the length
9482 of the second, if we know it. We don't require a constant for
9483 this case; some cost analysis could be done if both are available
9484 but neither is constant. For now, assume they're equally cheap.
9486 If both strings have constant lengths, use the smaller. This
9487 could arise if optimization results in strcpy being called with
9488 two fixed strings, or if the code was machine-generated. We should
9489 add some code to the `memcmp' handler below to deal with such
9490 situations, someday. */
9491 if (!len || TREE_CODE (len) != INTEGER_CST)
9493 if (len2)
9494 len = len2;
9495 else if (len == 0)
9496 break;
9498 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9500 if (tree_int_cst_lt (len2, len))
9501 len = len2;
9504 chainon (arglist, build_tree_list (NULL_TREE, len));
9507 /* Drops in. */
9508 case BUILT_IN_MEMCMP:
9509 /* If not optimizing, call the library function. */
9510 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9511 break;
9513 /* If we need to check memory accesses, call the library function. */
9514 if (current_function_check_memory_usage)
9515 break;
9517 if (arglist == 0
9518 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9519 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9520 || TREE_CHAIN (arglist) == 0
9521 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9522 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9523 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9524 break;
9525 else if (!HAVE_cmpstrsi)
9526 break;
9528 tree arg1 = TREE_VALUE (arglist);
9529 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9530 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9531 rtx result;
9533 int arg1_align
9534 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9535 int arg2_align
9536 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9537 enum machine_mode insn_mode
9538 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9540 /* If we don't have POINTER_TYPE, call the function. */
9541 if (arg1_align == 0 || arg2_align == 0)
9543 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9544 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9545 break;
9548 /* Make a place to write the result of the instruction. */
9549 result = target;
9550 if (! (result != 0
9551 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9552 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9553 result = gen_reg_rtx (insn_mode);
9555 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9556 get_memory_rtx (arg2),
9557 expand_expr (len, NULL_RTX, VOIDmode, 0),
9558 GEN_INT (MIN (arg1_align, arg2_align))));
9560 /* Return the value in the proper mode for this function. */
9561 mode = TYPE_MODE (TREE_TYPE (exp));
9562 if (GET_MODE (result) == mode)
9563 return result;
9564 else if (target != 0)
9566 convert_move (target, result, 0);
9567 return target;
9569 else
9570 return convert_to_mode (mode, result, 0);
9572 #else
9573 case BUILT_IN_STRCMP:
9574 case BUILT_IN_MEMCMP:
9575 break;
9576 #endif
9578 case BUILT_IN_SETJMP:
9579 if (arglist == 0
9580 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9581 break;
9582 else
9584 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9585 VOIDmode, 0);
9586 rtx lab = gen_label_rtx ();
9587 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9588 emit_label (lab);
9589 return ret;
9592 /* __builtin_longjmp is passed a pointer to an array of five words.
9593 It's similar to the C library longjmp function but works with
9594 __builtin_setjmp above. */
9595 case BUILT_IN_LONGJMP:
9596 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9597 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9598 break;
9599 else
9601 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9602 VOIDmode, 0);
9603 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9604 NULL_RTX, VOIDmode, 0);
9606 if (value != const1_rtx)
9608 error ("__builtin_longjmp second argument must be 1");
9609 return const0_rtx;
9612 expand_builtin_longjmp (buf_addr, value);
9613 return const0_rtx;
9616 case BUILT_IN_TRAP:
9617 #ifdef HAVE_trap
9618 if (HAVE_trap)
9619 emit_insn (gen_trap ());
9620 else
9621 #endif
9622 error ("__builtin_trap not supported by this target");
9623 emit_barrier ();
9624 return const0_rtx;
9626 /* Various hooks for the DWARF 2 __throw routine. */
9627 case BUILT_IN_UNWIND_INIT:
9628 expand_builtin_unwind_init ();
9629 return const0_rtx;
9630 case BUILT_IN_DWARF_CFA:
9631 return virtual_cfa_rtx;
9632 #ifdef DWARF2_UNWIND_INFO
9633 case BUILT_IN_DWARF_FP_REGNUM:
9634 return expand_builtin_dwarf_fp_regnum ();
9635 case BUILT_IN_DWARF_REG_SIZE:
9636 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9637 #endif
9638 case BUILT_IN_FROB_RETURN_ADDR:
9639 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9640 case BUILT_IN_EXTRACT_RETURN_ADDR:
9641 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9642 case BUILT_IN_EH_RETURN:
9643 expand_builtin_eh_return (TREE_VALUE (arglist),
9644 TREE_VALUE (TREE_CHAIN (arglist)),
9645 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9646 return const0_rtx;
9648 default: /* just do library call, if unknown builtin */
9649 error ("built-in function `%s' not currently supported",
9650 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9653 /* The switch statement above can drop through to cause the function
9654 to be called normally. */
9656 return expand_call (exp, target, ignore);
9659 /* Built-in functions to perform an untyped call and return. */
9661 /* For each register that may be used for calling a function, this
9662 gives a mode used to copy the register's value. VOIDmode indicates
9663 the register is not used for calling a function. If the machine
9664 has register windows, this gives only the outbound registers.
9665 INCOMING_REGNO gives the corresponding inbound register. */
9666 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9668 /* For each register that may be used for returning values, this gives
9669 a mode used to copy the register's value. VOIDmode indicates the
9670 register is not used for returning values. If the machine has
9671 register windows, this gives only the outbound registers.
9672 INCOMING_REGNO gives the corresponding inbound register. */
9673 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9675 /* For each register that may be used for calling a function, this
9676 gives the offset of that register into the block returned by
9677 __builtin_apply_args. 0 indicates that the register is not
9678 used for calling a function. */
9679 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9681 /* Return the offset of register REGNO into the block returned by
9682 __builtin_apply_args. This is not declared static, since it is
9683 needed in objc-act.c. */
9685 int
9686 apply_args_register_offset (regno)
9687 int regno;
9689 apply_args_size ();
9691 /* Arguments are always put in outgoing registers (in the argument
9692 block) if such make sense. */
9693 #ifdef OUTGOING_REGNO
9694 regno = OUTGOING_REGNO(regno);
9695 #endif
9696 return apply_args_reg_offset[regno];
9699 /* Return the size required for the block returned by __builtin_apply_args,
9700 and initialize apply_args_mode. */
9702 static int
9703 apply_args_size ()
9705 static int size = -1;
9706 int align, regno;
9707 enum machine_mode mode;
9709 /* The values computed by this function never change. */
9710 if (size < 0)
9712 /* The first value is the incoming arg-pointer. */
9713 size = GET_MODE_SIZE (Pmode);
9715 /* The second value is the structure value address unless this is
9716 passed as an "invisible" first argument. */
9717 if (struct_value_rtx)
9718 size += GET_MODE_SIZE (Pmode);
9720 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9721 if (FUNCTION_ARG_REGNO_P (regno))
9723 /* Search for the proper mode for copying this register's
9724 value. I'm not sure this is right, but it works so far. */
9725 enum machine_mode best_mode = VOIDmode;
9727 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9728 mode != VOIDmode;
9729 mode = GET_MODE_WIDER_MODE (mode))
9730 if (HARD_REGNO_MODE_OK (regno, mode)
9731 && HARD_REGNO_NREGS (regno, mode) == 1)
9732 best_mode = mode;
9734 if (best_mode == VOIDmode)
9735 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9736 mode != VOIDmode;
9737 mode = GET_MODE_WIDER_MODE (mode))
9738 if (HARD_REGNO_MODE_OK (regno, mode)
9739 && (mov_optab->handlers[(int) mode].insn_code
9740 != CODE_FOR_nothing))
9741 best_mode = mode;
9743 mode = best_mode;
9744 if (mode == VOIDmode)
9745 abort ();
9747 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9748 if (size % align != 0)
9749 size = CEIL (size, align) * align;
9750 apply_args_reg_offset[regno] = size;
9751 size += GET_MODE_SIZE (mode);
9752 apply_args_mode[regno] = mode;
9754 else
9756 apply_args_mode[regno] = VOIDmode;
9757 apply_args_reg_offset[regno] = 0;
9760 return size;
9763 /* Return the size required for the block returned by __builtin_apply,
9764 and initialize apply_result_mode. */
9766 static int
9767 apply_result_size ()
9769 static int size = -1;
9770 int align, regno;
9771 enum machine_mode mode;
9773 /* The values computed by this function never change. */
9774 if (size < 0)
9776 size = 0;
9778 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9779 if (FUNCTION_VALUE_REGNO_P (regno))
9781 /* Search for the proper mode for copying this register's
9782 value. I'm not sure this is right, but it works so far. */
9783 enum machine_mode best_mode = VOIDmode;
9785 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9786 mode != TImode;
9787 mode = GET_MODE_WIDER_MODE (mode))
9788 if (HARD_REGNO_MODE_OK (regno, mode))
9789 best_mode = mode;
9791 if (best_mode == VOIDmode)
9792 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9793 mode != VOIDmode;
9794 mode = GET_MODE_WIDER_MODE (mode))
9795 if (HARD_REGNO_MODE_OK (regno, mode)
9796 && (mov_optab->handlers[(int) mode].insn_code
9797 != CODE_FOR_nothing))
9798 best_mode = mode;
9800 mode = best_mode;
9801 if (mode == VOIDmode)
9802 abort ();
9804 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9805 if (size % align != 0)
9806 size = CEIL (size, align) * align;
9807 size += GET_MODE_SIZE (mode);
9808 apply_result_mode[regno] = mode;
9810 else
9811 apply_result_mode[regno] = VOIDmode;
9813 /* Allow targets that use untyped_call and untyped_return to override
9814 the size so that machine-specific information can be stored here. */
9815 #ifdef APPLY_RESULT_SIZE
9816 size = APPLY_RESULT_SIZE;
9817 #endif
9819 return size;
9822 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9823 /* Create a vector describing the result block RESULT. If SAVEP is true,
9824 the result block is used to save the values; otherwise it is used to
9825 restore the values. */
9827 static rtx
9828 result_vector (savep, result)
9829 int savep;
9830 rtx result;
9832 int regno, size, align, nelts;
9833 enum machine_mode mode;
9834 rtx reg, mem;
9835 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9837 size = nelts = 0;
9838 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9839 if ((mode = apply_result_mode[regno]) != VOIDmode)
9841 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9842 if (size % align != 0)
9843 size = CEIL (size, align) * align;
9844 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9845 mem = change_address (result, mode,
9846 plus_constant (XEXP (result, 0), size));
9847 savevec[nelts++] = (savep
9848 ? gen_rtx_SET (VOIDmode, mem, reg)
9849 : gen_rtx_SET (VOIDmode, reg, mem));
9850 size += GET_MODE_SIZE (mode);
9852 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9854 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9856 /* Save the state required to perform an untyped call with the same
9857 arguments as were passed to the current function. */
9859 static rtx
9860 expand_builtin_apply_args ()
9862 rtx registers;
9863 int size, align, regno;
9864 enum machine_mode mode;
9866 /* Create a block where the arg-pointer, structure value address,
9867 and argument registers can be saved. */
9868 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9870 /* Walk past the arg-pointer and structure value address. */
9871 size = GET_MODE_SIZE (Pmode);
9872 if (struct_value_rtx)
9873 size += GET_MODE_SIZE (Pmode);
9875 /* Save each register used in calling a function to the block. */
9876 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9877 if ((mode = apply_args_mode[regno]) != VOIDmode)
9879 rtx tem;
9881 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9882 if (size % align != 0)
9883 size = CEIL (size, align) * align;
9885 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9887 #ifdef STACK_REGS
9888 /* For reg-stack.c's stack register household.
9889 Compare with a similar piece of code in function.c. */
9891 emit_insn (gen_rtx_USE (mode, tem));
9892 #endif
9894 emit_move_insn (change_address (registers, mode,
9895 plus_constant (XEXP (registers, 0),
9896 size)),
9897 tem);
9898 size += GET_MODE_SIZE (mode);
9901 /* Save the arg pointer to the block. */
9902 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9903 copy_to_reg (virtual_incoming_args_rtx));
9904 size = GET_MODE_SIZE (Pmode);
9906 /* Save the structure value address unless this is passed as an
9907 "invisible" first argument. */
9908 if (struct_value_incoming_rtx)
9910 emit_move_insn (change_address (registers, Pmode,
9911 plus_constant (XEXP (registers, 0),
9912 size)),
9913 copy_to_reg (struct_value_incoming_rtx));
9914 size += GET_MODE_SIZE (Pmode);
9917 /* Return the address of the block. */
9918 return copy_addr_to_reg (XEXP (registers, 0));
9921 /* Perform an untyped call and save the state required to perform an
9922 untyped return of whatever value was returned by the given function. */
9924 static rtx
9925 expand_builtin_apply (function, arguments, argsize)
9926 rtx function, arguments, argsize;
9928 int size, align, regno;
9929 enum machine_mode mode;
9930 rtx incoming_args, result, reg, dest, call_insn;
9931 rtx old_stack_level = 0;
9932 rtx call_fusage = 0;
9934 /* Create a block where the return registers can be saved. */
9935 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9937 /* ??? The argsize value should be adjusted here. */
9939 /* Fetch the arg pointer from the ARGUMENTS block. */
9940 incoming_args = gen_reg_rtx (Pmode);
9941 emit_move_insn (incoming_args,
9942 gen_rtx_MEM (Pmode, arguments));
9943 #ifndef STACK_GROWS_DOWNWARD
9944 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9945 incoming_args, 0, OPTAB_LIB_WIDEN);
9946 #endif
9948 /* Perform postincrements before actually calling the function. */
9949 emit_queue ();
9951 /* Push a new argument block and copy the arguments. */
9952 do_pending_stack_adjust ();
9954 /* Save the stack with nonlocal if available */
9955 #ifdef HAVE_save_stack_nonlocal
9956 if (HAVE_save_stack_nonlocal)
9957 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9958 else
9959 #endif
9960 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9962 /* Push a block of memory onto the stack to store the memory arguments.
9963 Save the address in a register, and copy the memory arguments. ??? I
9964 haven't figured out how the calling convention macros effect this,
9965 but it's likely that the source and/or destination addresses in
9966 the block copy will need updating in machine specific ways. */
9967 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9968 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9969 gen_rtx_MEM (BLKmode, incoming_args),
9970 argsize,
9971 PARM_BOUNDARY / BITS_PER_UNIT);
9973 /* Refer to the argument block. */
9974 apply_args_size ();
9975 arguments = gen_rtx_MEM (BLKmode, arguments);
9977 /* Walk past the arg-pointer and structure value address. */
9978 size = GET_MODE_SIZE (Pmode);
9979 if (struct_value_rtx)
9980 size += GET_MODE_SIZE (Pmode);
9982 /* Restore each of the registers previously saved. Make USE insns
9983 for each of these registers for use in making the call. */
9984 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9985 if ((mode = apply_args_mode[regno]) != VOIDmode)
9987 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9988 if (size % align != 0)
9989 size = CEIL (size, align) * align;
9990 reg = gen_rtx_REG (mode, regno);
9991 emit_move_insn (reg,
9992 change_address (arguments, mode,
9993 plus_constant (XEXP (arguments, 0),
9994 size)));
9996 use_reg (&call_fusage, reg);
9997 size += GET_MODE_SIZE (mode);
10000 /* Restore the structure value address unless this is passed as an
10001 "invisible" first argument. */
10002 size = GET_MODE_SIZE (Pmode);
10003 if (struct_value_rtx)
10005 rtx value = gen_reg_rtx (Pmode);
10006 emit_move_insn (value,
10007 change_address (arguments, Pmode,
10008 plus_constant (XEXP (arguments, 0),
10009 size)));
10010 emit_move_insn (struct_value_rtx, value);
10011 if (GET_CODE (struct_value_rtx) == REG)
10012 use_reg (&call_fusage, struct_value_rtx);
10013 size += GET_MODE_SIZE (Pmode);
10016 /* All arguments and registers used for the call are set up by now! */
10017 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
10019 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
10020 and we don't want to load it into a register as an optimization,
10021 because prepare_call_address already did it if it should be done. */
10022 if (GET_CODE (function) != SYMBOL_REF)
10023 function = memory_address (FUNCTION_MODE, function);
10025 /* Generate the actual call instruction and save the return value. */
10026 #ifdef HAVE_untyped_call
10027 if (HAVE_untyped_call)
10028 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
10029 result, result_vector (1, result)));
10030 else
10031 #endif
10032 #ifdef HAVE_call_value
10033 if (HAVE_call_value)
10035 rtx valreg = 0;
10037 /* Locate the unique return register. It is not possible to
10038 express a call that sets more than one return register using
10039 call_value; use untyped_call for that. In fact, untyped_call
10040 only needs to save the return registers in the given block. */
10041 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10042 if ((mode = apply_result_mode[regno]) != VOIDmode)
10044 if (valreg)
10045 abort (); /* HAVE_untyped_call required. */
10046 valreg = gen_rtx_REG (mode, regno);
10049 emit_call_insn (gen_call_value (valreg,
10050 gen_rtx_MEM (FUNCTION_MODE, function),
10051 const0_rtx, NULL_RTX, const0_rtx));
10053 emit_move_insn (change_address (result, GET_MODE (valreg),
10054 XEXP (result, 0)),
10055 valreg);
10057 else
10058 #endif
10059 abort ();
10061 /* Find the CALL insn we just emitted. */
10062 for (call_insn = get_last_insn ();
10063 call_insn && GET_CODE (call_insn) != CALL_INSN;
10064 call_insn = PREV_INSN (call_insn))
10067 if (! call_insn)
10068 abort ();
10070 /* Put the register usage information on the CALL. If there is already
10071 some usage information, put ours at the end. */
10072 if (CALL_INSN_FUNCTION_USAGE (call_insn))
10074 rtx link;
10076 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10077 link = XEXP (link, 1))
10080 XEXP (link, 1) = call_fusage;
10082 else
10083 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10085 /* Restore the stack. */
10086 #ifdef HAVE_save_stack_nonlocal
10087 if (HAVE_save_stack_nonlocal)
10088 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10089 else
10090 #endif
10091 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10093 /* Return the address of the result block. */
10094 return copy_addr_to_reg (XEXP (result, 0));
10097 /* Perform an untyped return. */
10099 static void
10100 expand_builtin_return (result)
10101 rtx result;
10103 int size, align, regno;
10104 enum machine_mode mode;
10105 rtx reg;
10106 rtx call_fusage = 0;
10108 apply_result_size ();
10109 result = gen_rtx_MEM (BLKmode, result);
10111 #ifdef HAVE_untyped_return
10112 if (HAVE_untyped_return)
10114 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10115 emit_barrier ();
10116 return;
10118 #endif
10120 /* Restore the return value and note that each value is used. */
10121 size = 0;
10122 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10123 if ((mode = apply_result_mode[regno]) != VOIDmode)
10125 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10126 if (size % align != 0)
10127 size = CEIL (size, align) * align;
10128 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10129 emit_move_insn (reg,
10130 change_address (result, mode,
10131 plus_constant (XEXP (result, 0),
10132 size)));
10134 push_to_sequence (call_fusage);
10135 emit_insn (gen_rtx_USE (VOIDmode, reg));
10136 call_fusage = get_insns ();
10137 end_sequence ();
10138 size += GET_MODE_SIZE (mode);
10141 /* Put the USE insns before the return. */
10142 emit_insns (call_fusage);
10144 /* Return whatever values was restored by jumping directly to the end
10145 of the function. */
10146 expand_null_return ();
10149 /* Expand code for a post- or pre- increment or decrement
10150 and return the RTX for the result.
10151 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10153 static rtx
10154 expand_increment (exp, post, ignore)
10155 register tree exp;
10156 int post, ignore;
10158 register rtx op0, op1;
10159 register rtx temp, value;
10160 register tree incremented = TREE_OPERAND (exp, 0);
10161 optab this_optab = add_optab;
10162 int icode;
10163 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10164 int op0_is_copy = 0;
10165 int single_insn = 0;
10166 /* 1 means we can't store into OP0 directly,
10167 because it is a subreg narrower than a word,
10168 and we don't dare clobber the rest of the word. */
10169 int bad_subreg = 0;
10171 /* Stabilize any component ref that might need to be
10172 evaluated more than once below. */
10173 if (!post
10174 || TREE_CODE (incremented) == BIT_FIELD_REF
10175 || (TREE_CODE (incremented) == COMPONENT_REF
10176 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10177 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10178 incremented = stabilize_reference (incremented);
10179 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10180 ones into save exprs so that they don't accidentally get evaluated
10181 more than once by the code below. */
10182 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10183 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10184 incremented = save_expr (incremented);
10186 /* Compute the operands as RTX.
10187 Note whether OP0 is the actual lvalue or a copy of it:
10188 I believe it is a copy iff it is a register or subreg
10189 and insns were generated in computing it. */
10191 temp = get_last_insn ();
10192 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10194 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10195 in place but instead must do sign- or zero-extension during assignment,
10196 so we copy it into a new register and let the code below use it as
10197 a copy.
10199 Note that we can safely modify this SUBREG since it is know not to be
10200 shared (it was made by the expand_expr call above). */
10202 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10204 if (post)
10205 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10206 else
10207 bad_subreg = 1;
10209 else if (GET_CODE (op0) == SUBREG
10210 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10212 /* We cannot increment this SUBREG in place. If we are
10213 post-incrementing, get a copy of the old value. Otherwise,
10214 just mark that we cannot increment in place. */
10215 if (post)
10216 op0 = copy_to_reg (op0);
10217 else
10218 bad_subreg = 1;
10221 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10222 && temp != get_last_insn ());
10223 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10224 EXPAND_MEMORY_USE_BAD);
10226 /* Decide whether incrementing or decrementing. */
10227 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10228 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10229 this_optab = sub_optab;
10231 /* Convert decrement by a constant into a negative increment. */
10232 if (this_optab == sub_optab
10233 && GET_CODE (op1) == CONST_INT)
10235 op1 = GEN_INT (- INTVAL (op1));
10236 this_optab = add_optab;
10239 /* For a preincrement, see if we can do this with a single instruction. */
10240 if (!post)
10242 icode = (int) this_optab->handlers[(int) mode].insn_code;
10243 if (icode != (int) CODE_FOR_nothing
10244 /* Make sure that OP0 is valid for operands 0 and 1
10245 of the insn we want to queue. */
10246 && (*insn_operand_predicate[icode][0]) (op0, mode)
10247 && (*insn_operand_predicate[icode][1]) (op0, mode)
10248 && (*insn_operand_predicate[icode][2]) (op1, mode))
10249 single_insn = 1;
10252 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10253 then we cannot just increment OP0. We must therefore contrive to
10254 increment the original value. Then, for postincrement, we can return
10255 OP0 since it is a copy of the old value. For preincrement, expand here
10256 unless we can do it with a single insn.
10258 Likewise if storing directly into OP0 would clobber high bits
10259 we need to preserve (bad_subreg). */
10260 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10262 /* This is the easiest way to increment the value wherever it is.
10263 Problems with multiple evaluation of INCREMENTED are prevented
10264 because either (1) it is a component_ref or preincrement,
10265 in which case it was stabilized above, or (2) it is an array_ref
10266 with constant index in an array in a register, which is
10267 safe to reevaluate. */
10268 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10269 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10270 ? MINUS_EXPR : PLUS_EXPR),
10271 TREE_TYPE (exp),
10272 incremented,
10273 TREE_OPERAND (exp, 1));
10275 while (TREE_CODE (incremented) == NOP_EXPR
10276 || TREE_CODE (incremented) == CONVERT_EXPR)
10278 newexp = convert (TREE_TYPE (incremented), newexp);
10279 incremented = TREE_OPERAND (incremented, 0);
10282 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10283 return post ? op0 : temp;
10286 if (post)
10288 /* We have a true reference to the value in OP0.
10289 If there is an insn to add or subtract in this mode, queue it.
10290 Queueing the increment insn avoids the register shuffling
10291 that often results if we must increment now and first save
10292 the old value for subsequent use. */
10294 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10295 op0 = stabilize (op0);
10296 #endif
10298 icode = (int) this_optab->handlers[(int) mode].insn_code;
10299 if (icode != (int) CODE_FOR_nothing
10300 /* Make sure that OP0 is valid for operands 0 and 1
10301 of the insn we want to queue. */
10302 && (*insn_operand_predicate[icode][0]) (op0, mode)
10303 && (*insn_operand_predicate[icode][1]) (op0, mode))
10305 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10306 op1 = force_reg (mode, op1);
10308 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10310 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10312 rtx addr = (general_operand (XEXP (op0, 0), mode)
10313 ? force_reg (Pmode, XEXP (op0, 0))
10314 : copy_to_reg (XEXP (op0, 0)));
10315 rtx temp, result;
10317 op0 = change_address (op0, VOIDmode, addr);
10318 temp = force_reg (GET_MODE (op0), op0);
10319 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10320 op1 = force_reg (mode, op1);
10322 /* The increment queue is LIFO, thus we have to `queue'
10323 the instructions in reverse order. */
10324 enqueue_insn (op0, gen_move_insn (op0, temp));
10325 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10326 return result;
10330 /* Preincrement, or we can't increment with one simple insn. */
10331 if (post)
10332 /* Save a copy of the value before inc or dec, to return it later. */
10333 temp = value = copy_to_reg (op0);
10334 else
10335 /* Arrange to return the incremented value. */
10336 /* Copy the rtx because expand_binop will protect from the queue,
10337 and the results of that would be invalid for us to return
10338 if our caller does emit_queue before using our result. */
10339 temp = copy_rtx (value = op0);
10341 /* Increment however we can. */
10342 op1 = expand_binop (mode, this_optab, value, op1,
10343 current_function_check_memory_usage ? NULL_RTX : op0,
10344 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10345 /* Make sure the value is stored into OP0. */
10346 if (op1 != op0)
10347 emit_move_insn (op0, op1);
10349 return temp;
10352 /* Expand all function calls contained within EXP, innermost ones first.
10353 But don't look within expressions that have sequence points.
10354 For each CALL_EXPR, record the rtx for its value
10355 in the CALL_EXPR_RTL field. */
10357 static void
10358 preexpand_calls (exp)
10359 tree exp;
10361 register int nops, i;
10362 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10364 if (! do_preexpand_calls)
10365 return;
10367 /* Only expressions and references can contain calls. */
10369 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10370 return;
10372 switch (TREE_CODE (exp))
10374 case CALL_EXPR:
10375 /* Do nothing if already expanded. */
10376 if (CALL_EXPR_RTL (exp) != 0
10377 /* Do nothing if the call returns a variable-sized object. */
10378 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10379 /* Do nothing to built-in functions. */
10380 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10381 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10382 == FUNCTION_DECL)
10383 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10384 return;
10386 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10387 return;
10389 case COMPOUND_EXPR:
10390 case COND_EXPR:
10391 case TRUTH_ANDIF_EXPR:
10392 case TRUTH_ORIF_EXPR:
10393 /* If we find one of these, then we can be sure
10394 the adjust will be done for it (since it makes jumps).
10395 Do it now, so that if this is inside an argument
10396 of a function, we don't get the stack adjustment
10397 after some other args have already been pushed. */
10398 do_pending_stack_adjust ();
10399 return;
10401 case BLOCK:
10402 case RTL_EXPR:
10403 case WITH_CLEANUP_EXPR:
10404 case CLEANUP_POINT_EXPR:
10405 case TRY_CATCH_EXPR:
10406 return;
10408 case SAVE_EXPR:
10409 if (SAVE_EXPR_RTL (exp) != 0)
10410 return;
10412 default:
10413 break;
10416 nops = tree_code_length[(int) TREE_CODE (exp)];
10417 for (i = 0; i < nops; i++)
10418 if (TREE_OPERAND (exp, i) != 0)
10420 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10421 if (type == 'e' || type == '<' || type == '1' || type == '2'
10422 || type == 'r')
10423 preexpand_calls (TREE_OPERAND (exp, i));
10427 /* At the start of a function, record that we have no previously-pushed
10428 arguments waiting to be popped. */
10430 void
10431 init_pending_stack_adjust ()
10433 pending_stack_adjust = 0;
10436 /* When exiting from function, if safe, clear out any pending stack adjust
10437 so the adjustment won't get done.
10439 Note, if the current function calls alloca, then it must have a
10440 frame pointer regardless of the value of flag_omit_frame_pointer. */
10442 void
10443 clear_pending_stack_adjust ()
10445 #ifdef EXIT_IGNORE_STACK
10446 if (optimize > 0
10447 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10448 && EXIT_IGNORE_STACK
10449 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10450 && ! flag_inline_functions)
10451 pending_stack_adjust = 0;
10452 #endif
10455 /* Pop any previously-pushed arguments that have not been popped yet. */
10457 void
10458 do_pending_stack_adjust ()
10460 if (inhibit_defer_pop == 0)
10462 if (pending_stack_adjust != 0)
10463 adjust_stack (GEN_INT (pending_stack_adjust));
10464 pending_stack_adjust = 0;
10468 /* Expand conditional expressions. */
10470 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10471 LABEL is an rtx of code CODE_LABEL, in this function and all the
10472 functions here. */
10474 void
10475 jumpifnot (exp, label)
10476 tree exp;
10477 rtx label;
10479 do_jump (exp, label, NULL_RTX);
10482 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10484 void
10485 jumpif (exp, label)
10486 tree exp;
10487 rtx label;
10489 do_jump (exp, NULL_RTX, label);
10492 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10493 the result is zero, or IF_TRUE_LABEL if the result is one.
10494 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10495 meaning fall through in that case.
10497 do_jump always does any pending stack adjust except when it does not
10498 actually perform a jump. An example where there is no jump
10499 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10501 This function is responsible for optimizing cases such as
10502 &&, || and comparison operators in EXP. */
10504 void
10505 do_jump (exp, if_false_label, if_true_label)
10506 tree exp;
10507 rtx if_false_label, if_true_label;
10509 register enum tree_code code = TREE_CODE (exp);
10510 /* Some cases need to create a label to jump to
10511 in order to properly fall through.
10512 These cases set DROP_THROUGH_LABEL nonzero. */
10513 rtx drop_through_label = 0;
10514 rtx temp;
10515 rtx comparison = 0;
10516 int i;
10517 tree type;
10518 enum machine_mode mode;
10520 #ifdef MAX_INTEGER_COMPUTATION_MODE
10521 check_max_integer_computation_mode (exp);
10522 #endif
10524 emit_queue ();
10526 switch (code)
10528 case ERROR_MARK:
10529 break;
10531 case INTEGER_CST:
10532 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10533 if (temp)
10534 emit_jump (temp);
10535 break;
10537 #if 0
10538 /* This is not true with #pragma weak */
10539 case ADDR_EXPR:
10540 /* The address of something can never be zero. */
10541 if (if_true_label)
10542 emit_jump (if_true_label);
10543 break;
10544 #endif
10546 case NOP_EXPR:
10547 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10548 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10549 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10550 goto normal;
10551 case CONVERT_EXPR:
10552 /* If we are narrowing the operand, we have to do the compare in the
10553 narrower mode. */
10554 if ((TYPE_PRECISION (TREE_TYPE (exp))
10555 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10556 goto normal;
10557 case NON_LVALUE_EXPR:
10558 case REFERENCE_EXPR:
10559 case ABS_EXPR:
10560 case NEGATE_EXPR:
10561 case LROTATE_EXPR:
10562 case RROTATE_EXPR:
10563 /* These cannot change zero->non-zero or vice versa. */
10564 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10565 break;
10567 #if 0
10568 /* This is never less insns than evaluating the PLUS_EXPR followed by
10569 a test and can be longer if the test is eliminated. */
10570 case PLUS_EXPR:
10571 /* Reduce to minus. */
10572 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10573 TREE_OPERAND (exp, 0),
10574 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10575 TREE_OPERAND (exp, 1))));
10576 /* Process as MINUS. */
10577 #endif
10579 case MINUS_EXPR:
10580 /* Non-zero iff operands of minus differ. */
10581 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10582 TREE_OPERAND (exp, 0),
10583 TREE_OPERAND (exp, 1)),
10584 NE, NE);
10585 break;
10587 case BIT_AND_EXPR:
10588 /* If we are AND'ing with a small constant, do this comparison in the
10589 smallest type that fits. If the machine doesn't have comparisons
10590 that small, it will be converted back to the wider comparison.
10591 This helps if we are testing the sign bit of a narrower object.
10592 combine can't do this for us because it can't know whether a
10593 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10595 if (! SLOW_BYTE_ACCESS
10596 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10597 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10598 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10599 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10600 && (type = type_for_mode (mode, 1)) != 0
10601 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10602 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10603 != CODE_FOR_nothing))
10605 do_jump (convert (type, exp), if_false_label, if_true_label);
10606 break;
10608 goto normal;
10610 case TRUTH_NOT_EXPR:
10611 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10612 break;
10614 case TRUTH_ANDIF_EXPR:
10615 if (if_false_label == 0)
10616 if_false_label = drop_through_label = gen_label_rtx ();
10617 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10618 start_cleanup_deferral ();
10619 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10620 end_cleanup_deferral ();
10621 break;
10623 case TRUTH_ORIF_EXPR:
10624 if (if_true_label == 0)
10625 if_true_label = drop_through_label = gen_label_rtx ();
10626 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10627 start_cleanup_deferral ();
10628 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10629 end_cleanup_deferral ();
10630 break;
10632 case COMPOUND_EXPR:
10633 push_temp_slots ();
10634 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10635 preserve_temp_slots (NULL_RTX);
10636 free_temp_slots ();
10637 pop_temp_slots ();
10638 emit_queue ();
10639 do_pending_stack_adjust ();
10640 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10641 break;
10643 case COMPONENT_REF:
10644 case BIT_FIELD_REF:
10645 case ARRAY_REF:
10647 int bitsize, bitpos, unsignedp;
10648 enum machine_mode mode;
10649 tree type;
10650 tree offset;
10651 int volatilep = 0;
10652 int alignment;
10654 /* Get description of this reference. We don't actually care
10655 about the underlying object here. */
10656 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10657 &mode, &unsignedp, &volatilep,
10658 &alignment);
10660 type = type_for_size (bitsize, unsignedp);
10661 if (! SLOW_BYTE_ACCESS
10662 && type != 0 && bitsize >= 0
10663 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10664 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10665 != CODE_FOR_nothing))
10667 do_jump (convert (type, exp), if_false_label, if_true_label);
10668 break;
10670 goto normal;
10673 case COND_EXPR:
10674 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10675 if (integer_onep (TREE_OPERAND (exp, 1))
10676 && integer_zerop (TREE_OPERAND (exp, 2)))
10677 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10679 else if (integer_zerop (TREE_OPERAND (exp, 1))
10680 && integer_onep (TREE_OPERAND (exp, 2)))
10681 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10683 else
10685 register rtx label1 = gen_label_rtx ();
10686 drop_through_label = gen_label_rtx ();
10688 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10690 start_cleanup_deferral ();
10691 /* Now the THEN-expression. */
10692 do_jump (TREE_OPERAND (exp, 1),
10693 if_false_label ? if_false_label : drop_through_label,
10694 if_true_label ? if_true_label : drop_through_label);
10695 /* In case the do_jump just above never jumps. */
10696 do_pending_stack_adjust ();
10697 emit_label (label1);
10699 /* Now the ELSE-expression. */
10700 do_jump (TREE_OPERAND (exp, 2),
10701 if_false_label ? if_false_label : drop_through_label,
10702 if_true_label ? if_true_label : drop_through_label);
10703 end_cleanup_deferral ();
10705 break;
10707 case EQ_EXPR:
10709 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10711 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10712 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10714 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10715 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10716 do_jump
10717 (fold
10718 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10719 fold (build (EQ_EXPR, TREE_TYPE (exp),
10720 fold (build1 (REALPART_EXPR,
10721 TREE_TYPE (inner_type),
10722 exp0)),
10723 fold (build1 (REALPART_EXPR,
10724 TREE_TYPE (inner_type),
10725 exp1)))),
10726 fold (build (EQ_EXPR, TREE_TYPE (exp),
10727 fold (build1 (IMAGPART_EXPR,
10728 TREE_TYPE (inner_type),
10729 exp0)),
10730 fold (build1 (IMAGPART_EXPR,
10731 TREE_TYPE (inner_type),
10732 exp1)))))),
10733 if_false_label, if_true_label);
10736 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10737 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10739 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10740 && !can_compare_p (TYPE_MODE (inner_type)))
10741 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10742 else
10743 comparison = compare (exp, EQ, EQ);
10744 break;
10747 case NE_EXPR:
10749 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10751 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10752 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10754 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10755 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10756 do_jump
10757 (fold
10758 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10759 fold (build (NE_EXPR, TREE_TYPE (exp),
10760 fold (build1 (REALPART_EXPR,
10761 TREE_TYPE (inner_type),
10762 exp0)),
10763 fold (build1 (REALPART_EXPR,
10764 TREE_TYPE (inner_type),
10765 exp1)))),
10766 fold (build (NE_EXPR, TREE_TYPE (exp),
10767 fold (build1 (IMAGPART_EXPR,
10768 TREE_TYPE (inner_type),
10769 exp0)),
10770 fold (build1 (IMAGPART_EXPR,
10771 TREE_TYPE (inner_type),
10772 exp1)))))),
10773 if_false_label, if_true_label);
10776 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10777 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10779 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10780 && !can_compare_p (TYPE_MODE (inner_type)))
10781 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10782 else
10783 comparison = compare (exp, NE, NE);
10784 break;
10787 case LT_EXPR:
10788 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10789 == MODE_INT)
10790 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10791 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10792 else
10793 comparison = compare (exp, LT, LTU);
10794 break;
10796 case LE_EXPR:
10797 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10798 == MODE_INT)
10799 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10800 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10801 else
10802 comparison = compare (exp, LE, LEU);
10803 break;
10805 case GT_EXPR:
10806 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10807 == MODE_INT)
10808 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10809 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10810 else
10811 comparison = compare (exp, GT, GTU);
10812 break;
10814 case GE_EXPR:
10815 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10816 == MODE_INT)
10817 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10818 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10819 else
10820 comparison = compare (exp, GE, GEU);
10821 break;
10823 default:
10824 normal:
10825 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10826 #if 0
10827 /* This is not needed any more and causes poor code since it causes
10828 comparisons and tests from non-SI objects to have different code
10829 sequences. */
10830 /* Copy to register to avoid generating bad insns by cse
10831 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10832 if (!cse_not_expected && GET_CODE (temp) == MEM)
10833 temp = copy_to_reg (temp);
10834 #endif
10835 do_pending_stack_adjust ();
10836 if (GET_CODE (temp) == CONST_INT)
10837 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10838 else if (GET_CODE (temp) == LABEL_REF)
10839 comparison = const_true_rtx;
10840 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10841 && !can_compare_p (GET_MODE (temp)))
10842 /* Note swapping the labels gives us not-equal. */
10843 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10844 else if (GET_MODE (temp) != VOIDmode)
10845 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10846 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10847 GET_MODE (temp), NULL_RTX, 0);
10848 else
10849 abort ();
10852 /* Do any postincrements in the expression that was tested. */
10853 emit_queue ();
10855 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10856 straight into a conditional jump instruction as the jump condition.
10857 Otherwise, all the work has been done already. */
10859 if (comparison == const_true_rtx)
10861 if (if_true_label)
10862 emit_jump (if_true_label);
10864 else if (comparison == const0_rtx)
10866 if (if_false_label)
10867 emit_jump (if_false_label);
10869 else if (comparison)
10870 do_jump_for_compare (comparison, if_false_label, if_true_label);
10872 if (drop_through_label)
10874 /* If do_jump produces code that might be jumped around,
10875 do any stack adjusts from that code, before the place
10876 where control merges in. */
10877 do_pending_stack_adjust ();
10878 emit_label (drop_through_label);
10882 /* Given a comparison expression EXP for values too wide to be compared
10883 with one insn, test the comparison and jump to the appropriate label.
10884 The code of EXP is ignored; we always test GT if SWAP is 0,
10885 and LT if SWAP is 1. */
10887 static void
10888 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10889 tree exp;
10890 int swap;
10891 rtx if_false_label, if_true_label;
10893 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10894 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10895 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10896 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10897 rtx drop_through_label = 0;
10898 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10899 int i;
10901 if (! if_true_label || ! if_false_label)
10902 drop_through_label = gen_label_rtx ();
10903 if (! if_true_label)
10904 if_true_label = drop_through_label;
10905 if (! if_false_label)
10906 if_false_label = drop_through_label;
10908 /* Compare a word at a time, high order first. */
10909 for (i = 0; i < nwords; i++)
10911 rtx comp;
10912 rtx op0_word, op1_word;
10914 if (WORDS_BIG_ENDIAN)
10916 op0_word = operand_subword_force (op0, i, mode);
10917 op1_word = operand_subword_force (op1, i, mode);
10919 else
10921 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10922 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10925 /* All but high-order word must be compared as unsigned. */
10926 comp = compare_from_rtx (op0_word, op1_word,
10927 (unsignedp || i > 0) ? GTU : GT,
10928 unsignedp, word_mode, NULL_RTX, 0);
10929 if (comp == const_true_rtx)
10930 emit_jump (if_true_label);
10931 else if (comp != const0_rtx)
10932 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10934 /* Consider lower words only if these are equal. */
10935 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10936 NULL_RTX, 0);
10937 if (comp == const_true_rtx)
10938 emit_jump (if_false_label);
10939 else if (comp != const0_rtx)
10940 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10943 if (if_false_label)
10944 emit_jump (if_false_label);
10945 if (drop_through_label)
10946 emit_label (drop_through_label);
10949 /* Compare OP0 with OP1, word at a time, in mode MODE.
10950 UNSIGNEDP says to do unsigned comparison.
10951 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10953 void
10954 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10955 enum machine_mode mode;
10956 int unsignedp;
10957 rtx op0, op1;
10958 rtx if_false_label, if_true_label;
10960 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10961 rtx drop_through_label = 0;
10962 int i;
10964 if (! if_true_label || ! if_false_label)
10965 drop_through_label = gen_label_rtx ();
10966 if (! if_true_label)
10967 if_true_label = drop_through_label;
10968 if (! if_false_label)
10969 if_false_label = drop_through_label;
10971 /* Compare a word at a time, high order first. */
10972 for (i = 0; i < nwords; i++)
10974 rtx comp;
10975 rtx op0_word, op1_word;
10977 if (WORDS_BIG_ENDIAN)
10979 op0_word = operand_subword_force (op0, i, mode);
10980 op1_word = operand_subword_force (op1, i, mode);
10982 else
10984 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10985 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10988 /* All but high-order word must be compared as unsigned. */
10989 comp = compare_from_rtx (op0_word, op1_word,
10990 (unsignedp || i > 0) ? GTU : GT,
10991 unsignedp, word_mode, NULL_RTX, 0);
10992 if (comp == const_true_rtx)
10993 emit_jump (if_true_label);
10994 else if (comp != const0_rtx)
10995 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10997 /* Consider lower words only if these are equal. */
10998 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10999 NULL_RTX, 0);
11000 if (comp == const_true_rtx)
11001 emit_jump (if_false_label);
11002 else if (comp != const0_rtx)
11003 do_jump_for_compare (comp, NULL_RTX, if_false_label);
11006 if (if_false_label)
11007 emit_jump (if_false_label);
11008 if (drop_through_label)
11009 emit_label (drop_through_label);
11012 /* Given an EQ_EXPR expression EXP for values too wide to be compared
11013 with one insn, test the comparison and jump to the appropriate label. */
11015 static void
11016 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
11017 tree exp;
11018 rtx if_false_label, if_true_label;
11020 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11021 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11022 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11023 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11024 int i;
11025 rtx drop_through_label = 0;
11027 if (! if_false_label)
11028 drop_through_label = if_false_label = gen_label_rtx ();
11030 for (i = 0; i < nwords; i++)
11032 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
11033 operand_subword_force (op1, i, mode),
11034 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
11035 word_mode, NULL_RTX, 0);
11036 if (comp == const_true_rtx)
11037 emit_jump (if_false_label);
11038 else if (comp != const0_rtx)
11039 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11042 if (if_true_label)
11043 emit_jump (if_true_label);
11044 if (drop_through_label)
11045 emit_label (drop_through_label);
11048 /* Jump according to whether OP0 is 0.
11049 We assume that OP0 has an integer mode that is too wide
11050 for the available compare insns. */
11052 void
11053 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
11054 rtx op0;
11055 rtx if_false_label, if_true_label;
11057 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11058 rtx part;
11059 int i;
11060 rtx drop_through_label = 0;
11062 /* The fastest way of doing this comparison on almost any machine is to
11063 "or" all the words and compare the result. If all have to be loaded
11064 from memory and this is a very wide item, it's possible this may
11065 be slower, but that's highly unlikely. */
11067 part = gen_reg_rtx (word_mode);
11068 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11069 for (i = 1; i < nwords && part != 0; i++)
11070 part = expand_binop (word_mode, ior_optab, part,
11071 operand_subword_force (op0, i, GET_MODE (op0)),
11072 part, 1, OPTAB_WIDEN);
11074 if (part != 0)
11076 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11077 NULL_RTX, 0);
11079 if (comp == const_true_rtx)
11080 emit_jump (if_false_label);
11081 else if (comp == const0_rtx)
11082 emit_jump (if_true_label);
11083 else
11084 do_jump_for_compare (comp, if_false_label, if_true_label);
11086 return;
11089 /* If we couldn't do the "or" simply, do this with a series of compares. */
11090 if (! if_false_label)
11091 drop_through_label = if_false_label = gen_label_rtx ();
11093 for (i = 0; i < nwords; i++)
11095 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11096 GET_MODE (op0)),
11097 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11098 if (comp == const_true_rtx)
11099 emit_jump (if_false_label);
11100 else if (comp != const0_rtx)
11101 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11104 if (if_true_label)
11105 emit_jump (if_true_label);
11107 if (drop_through_label)
11108 emit_label (drop_through_label);
11111 /* Given a comparison expression in rtl form, output conditional branches to
11112 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11114 static void
11115 do_jump_for_compare (comparison, if_false_label, if_true_label)
11116 rtx comparison, if_false_label, if_true_label;
11118 if (if_true_label)
11120 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11121 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11122 (if_true_label));
11123 else
11124 abort ();
11126 if (if_false_label)
11127 emit_jump (if_false_label);
11129 else if (if_false_label)
11131 rtx first = get_last_insn (), insn, branch;
11132 int br_count;
11134 /* Output the branch with the opposite condition. Then try to invert
11135 what is generated. If more than one insn is a branch, or if the
11136 branch is not the last insn written, abort. If we can't invert
11137 the branch, emit make a true label, redirect this jump to that,
11138 emit a jump to the false label and define the true label. */
11139 /* ??? Note that we wouldn't have to do any of this nonsense if
11140 we passed both labels into a combined compare-and-branch.
11141 Ah well, jump threading does a good job of repairing the damage. */
11143 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11144 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11145 (if_false_label));
11146 else
11147 abort ();
11149 /* Here we get the first insn that was just emitted. It used to be the
11150 case that, on some machines, emitting the branch would discard
11151 the previous compare insn and emit a replacement. This isn't
11152 done anymore, but abort if we see that FIRST is deleted. */
11154 if (first == 0)
11155 first = get_insns ();
11156 else if (INSN_DELETED_P (first))
11157 abort ();
11158 else
11159 first = NEXT_INSN (first);
11161 /* Look for multiple branches in this sequence, as might be generated
11162 for a multi-word integer comparison. */
11164 br_count = 0;
11165 branch = NULL_RTX;
11166 for (insn = first; insn ; insn = NEXT_INSN (insn))
11167 if (GET_CODE (insn) == JUMP_INSN)
11169 branch = insn;
11170 br_count += 1;
11173 /* If we've got one branch at the end of the sequence,
11174 we can try to reverse it. */
11176 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11178 rtx insn_label;
11179 insn_label = XEXP (condjump_label (branch), 0);
11180 JUMP_LABEL (branch) = insn_label;
11182 if (insn_label != if_false_label)
11183 abort ();
11185 if (invert_jump (branch, if_false_label))
11186 return;
11189 /* Multiple branches, or reversion failed. Convert to branches
11190 around an unconditional jump. */
11192 if_true_label = gen_label_rtx ();
11193 for (insn = first; insn; insn = NEXT_INSN (insn))
11194 if (GET_CODE (insn) == JUMP_INSN)
11196 rtx insn_label;
11197 insn_label = XEXP (condjump_label (insn), 0);
11198 JUMP_LABEL (insn) = insn_label;
11200 if (insn_label == if_false_label)
11201 redirect_jump (insn, if_true_label);
11203 emit_jump (if_false_label);
11204 emit_label (if_true_label);
11208 /* Generate code for a comparison expression EXP
11209 (including code to compute the values to be compared)
11210 and set (CC0) according to the result.
11211 SIGNED_CODE should be the rtx operation for this comparison for
11212 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11214 We force a stack adjustment unless there are currently
11215 things pushed on the stack that aren't yet used. */
11217 static rtx
11218 compare (exp, signed_code, unsigned_code)
11219 register tree exp;
11220 enum rtx_code signed_code, unsigned_code;
11222 register rtx op0, op1;
11223 register tree type;
11224 register enum machine_mode mode;
11225 int unsignedp;
11226 enum rtx_code code;
11228 /* Don't crash if the comparison was erroneous. */
11229 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11230 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
11231 return op0;
11233 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11234 type = TREE_TYPE (TREE_OPERAND (exp, 0));
11235 mode = TYPE_MODE (type);
11236 unsignedp = TREE_UNSIGNED (type);
11237 code = unsignedp ? unsigned_code : signed_code;
11239 #ifdef HAVE_canonicalize_funcptr_for_compare
11240 /* If function pointers need to be "canonicalized" before they can
11241 be reliably compared, then canonicalize them. */
11242 if (HAVE_canonicalize_funcptr_for_compare
11243 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11244 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11245 == FUNCTION_TYPE))
11247 rtx new_op0 = gen_reg_rtx (mode);
11249 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11250 op0 = new_op0;
11253 if (HAVE_canonicalize_funcptr_for_compare
11254 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11255 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11256 == FUNCTION_TYPE))
11258 rtx new_op1 = gen_reg_rtx (mode);
11260 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11261 op1 = new_op1;
11263 #endif
11265 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11266 ((mode == BLKmode)
11267 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11268 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11271 /* Like compare but expects the values to compare as two rtx's.
11272 The decision as to signed or unsigned comparison must be made by the caller.
11274 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11275 compared.
11277 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11278 size of MODE should be used. */
11281 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11282 register rtx op0, op1;
11283 enum rtx_code code;
11284 int unsignedp;
11285 enum machine_mode mode;
11286 rtx size;
11287 int align;
11289 rtx tem;
11291 /* If one operand is constant, make it the second one. Only do this
11292 if the other operand is not constant as well. */
11294 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11295 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11297 tem = op0;
11298 op0 = op1;
11299 op1 = tem;
11300 code = swap_condition (code);
11303 if (flag_force_mem)
11305 op0 = force_not_mem (op0);
11306 op1 = force_not_mem (op1);
11309 do_pending_stack_adjust ();
11311 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11312 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11313 return tem;
11315 #if 0
11316 /* There's no need to do this now that combine.c can eliminate lots of
11317 sign extensions. This can be less efficient in certain cases on other
11318 machines. */
11320 /* If this is a signed equality comparison, we can do it as an
11321 unsigned comparison since zero-extension is cheaper than sign
11322 extension and comparisons with zero are done as unsigned. This is
11323 the case even on machines that can do fast sign extension, since
11324 zero-extension is easier to combine with other operations than
11325 sign-extension is. If we are comparing against a constant, we must
11326 convert it to what it would look like unsigned. */
11327 if ((code == EQ || code == NE) && ! unsignedp
11328 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11330 if (GET_CODE (op1) == CONST_INT
11331 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11332 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11333 unsignedp = 1;
11335 #endif
11337 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11339 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11342 /* Generate code to calculate EXP using a store-flag instruction
11343 and return an rtx for the result. EXP is either a comparison
11344 or a TRUTH_NOT_EXPR whose operand is a comparison.
11346 If TARGET is nonzero, store the result there if convenient.
11348 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11349 cheap.
11351 Return zero if there is no suitable set-flag instruction
11352 available on this machine.
11354 Once expand_expr has been called on the arguments of the comparison,
11355 we are committed to doing the store flag, since it is not safe to
11356 re-evaluate the expression. We emit the store-flag insn by calling
11357 emit_store_flag, but only expand the arguments if we have a reason
11358 to believe that emit_store_flag will be successful. If we think that
11359 it will, but it isn't, we have to simulate the store-flag with a
11360 set/jump/set sequence. */
11362 static rtx
11363 do_store_flag (exp, target, mode, only_cheap)
11364 tree exp;
11365 rtx target;
11366 enum machine_mode mode;
11367 int only_cheap;
11369 enum rtx_code code;
11370 tree arg0, arg1, type;
11371 tree tem;
11372 enum machine_mode operand_mode;
11373 int invert = 0;
11374 int unsignedp;
11375 rtx op0, op1;
11376 enum insn_code icode;
11377 rtx subtarget = target;
11378 rtx result, label;
11380 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11381 result at the end. We can't simply invert the test since it would
11382 have already been inverted if it were valid. This case occurs for
11383 some floating-point comparisons. */
11385 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11386 invert = 1, exp = TREE_OPERAND (exp, 0);
11388 arg0 = TREE_OPERAND (exp, 0);
11389 arg1 = TREE_OPERAND (exp, 1);
11390 type = TREE_TYPE (arg0);
11391 operand_mode = TYPE_MODE (type);
11392 unsignedp = TREE_UNSIGNED (type);
11394 /* We won't bother with BLKmode store-flag operations because it would mean
11395 passing a lot of information to emit_store_flag. */
11396 if (operand_mode == BLKmode)
11397 return 0;
11399 /* We won't bother with store-flag operations involving function pointers
11400 when function pointers must be canonicalized before comparisons. */
11401 #ifdef HAVE_canonicalize_funcptr_for_compare
11402 if (HAVE_canonicalize_funcptr_for_compare
11403 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11404 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11405 == FUNCTION_TYPE))
11406 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11407 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11408 == FUNCTION_TYPE))))
11409 return 0;
11410 #endif
11412 STRIP_NOPS (arg0);
11413 STRIP_NOPS (arg1);
11415 /* Get the rtx comparison code to use. We know that EXP is a comparison
11416 operation of some type. Some comparisons against 1 and -1 can be
11417 converted to comparisons with zero. Do so here so that the tests
11418 below will be aware that we have a comparison with zero. These
11419 tests will not catch constants in the first operand, but constants
11420 are rarely passed as the first operand. */
11422 switch (TREE_CODE (exp))
11424 case EQ_EXPR:
11425 code = EQ;
11426 break;
11427 case NE_EXPR:
11428 code = NE;
11429 break;
11430 case LT_EXPR:
11431 if (integer_onep (arg1))
11432 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11433 else
11434 code = unsignedp ? LTU : LT;
11435 break;
11436 case LE_EXPR:
11437 if (! unsignedp && integer_all_onesp (arg1))
11438 arg1 = integer_zero_node, code = LT;
11439 else
11440 code = unsignedp ? LEU : LE;
11441 break;
11442 case GT_EXPR:
11443 if (! unsignedp && integer_all_onesp (arg1))
11444 arg1 = integer_zero_node, code = GE;
11445 else
11446 code = unsignedp ? GTU : GT;
11447 break;
11448 case GE_EXPR:
11449 if (integer_onep (arg1))
11450 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11451 else
11452 code = unsignedp ? GEU : GE;
11453 break;
11454 default:
11455 abort ();
11458 /* Put a constant second. */
11459 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11461 tem = arg0; arg0 = arg1; arg1 = tem;
11462 code = swap_condition (code);
11465 /* If this is an equality or inequality test of a single bit, we can
11466 do this by shifting the bit being tested to the low-order bit and
11467 masking the result with the constant 1. If the condition was EQ,
11468 we xor it with 1. This does not require an scc insn and is faster
11469 than an scc insn even if we have it. */
11471 if ((code == NE || code == EQ)
11472 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11473 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11475 tree inner = TREE_OPERAND (arg0, 0);
11476 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11477 int ops_unsignedp;
11479 /* If INNER is a right shift of a constant and it plus BITNUM does
11480 not overflow, adjust BITNUM and INNER. */
11482 if (TREE_CODE (inner) == RSHIFT_EXPR
11483 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11484 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11485 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11486 < TYPE_PRECISION (type)))
11488 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11489 inner = TREE_OPERAND (inner, 0);
11492 /* If we are going to be able to omit the AND below, we must do our
11493 operations as unsigned. If we must use the AND, we have a choice.
11494 Normally unsigned is faster, but for some machines signed is. */
11495 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11496 #ifdef LOAD_EXTEND_OP
11497 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11498 #else
11500 #endif
11503 if (subtarget == 0 || GET_CODE (subtarget) != REG
11504 || GET_MODE (subtarget) != operand_mode
11505 || ! safe_from_p (subtarget, inner, 1))
11506 subtarget = 0;
11508 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11510 if (bitnum != 0)
11511 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11512 size_int (bitnum), subtarget, ops_unsignedp);
11514 if (GET_MODE (op0) != mode)
11515 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11517 if ((code == EQ && ! invert) || (code == NE && invert))
11518 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11519 ops_unsignedp, OPTAB_LIB_WIDEN);
11521 /* Put the AND last so it can combine with more things. */
11522 if (bitnum != TYPE_PRECISION (type) - 1)
11523 op0 = expand_and (op0, const1_rtx, subtarget);
11525 return op0;
11528 /* Now see if we are likely to be able to do this. Return if not. */
11529 if (! can_compare_p (operand_mode))
11530 return 0;
11531 icode = setcc_gen_code[(int) code];
11532 if (icode == CODE_FOR_nothing
11533 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11535 /* We can only do this if it is one of the special cases that
11536 can be handled without an scc insn. */
11537 if ((code == LT && integer_zerop (arg1))
11538 || (! only_cheap && code == GE && integer_zerop (arg1)))
11540 else if (BRANCH_COST >= 0
11541 && ! only_cheap && (code == NE || code == EQ)
11542 && TREE_CODE (type) != REAL_TYPE
11543 && ((abs_optab->handlers[(int) operand_mode].insn_code
11544 != CODE_FOR_nothing)
11545 || (ffs_optab->handlers[(int) operand_mode].insn_code
11546 != CODE_FOR_nothing)))
11548 else
11549 return 0;
11552 preexpand_calls (exp);
11553 if (subtarget == 0 || GET_CODE (subtarget) != REG
11554 || GET_MODE (subtarget) != operand_mode
11555 || ! safe_from_p (subtarget, arg1, 1))
11556 subtarget = 0;
11558 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11559 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11561 if (target == 0)
11562 target = gen_reg_rtx (mode);
11564 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11565 because, if the emit_store_flag does anything it will succeed and
11566 OP0 and OP1 will not be used subsequently. */
11568 result = emit_store_flag (target, code,
11569 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11570 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11571 operand_mode, unsignedp, 1);
11573 if (result)
11575 if (invert)
11576 result = expand_binop (mode, xor_optab, result, const1_rtx,
11577 result, 0, OPTAB_LIB_WIDEN);
11578 return result;
11581 /* If this failed, we have to do this with set/compare/jump/set code. */
11582 if (GET_CODE (target) != REG
11583 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11584 target = gen_reg_rtx (GET_MODE (target));
11586 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11587 result = compare_from_rtx (op0, op1, code, unsignedp,
11588 operand_mode, NULL_RTX, 0);
11589 if (GET_CODE (result) == CONST_INT)
11590 return (((result == const0_rtx && ! invert)
11591 || (result != const0_rtx && invert))
11592 ? const0_rtx : const1_rtx);
11594 label = gen_label_rtx ();
11595 if (bcc_gen_fctn[(int) code] == 0)
11596 abort ();
11598 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11599 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11600 emit_label (label);
11602 return target;
11605 /* Generate a tablejump instruction (used for switch statements). */
11607 #ifdef HAVE_tablejump
11609 /* INDEX is the value being switched on, with the lowest value
11610 in the table already subtracted.
11611 MODE is its expected mode (needed if INDEX is constant).
11612 RANGE is the length of the jump table.
11613 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11615 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11616 index value is out of range. */
11618 void
11619 do_tablejump (index, mode, range, table_label, default_label)
11620 rtx index, range, table_label, default_label;
11621 enum machine_mode mode;
11623 register rtx temp, vector;
11625 /* Do an unsigned comparison (in the proper mode) between the index
11626 expression and the value which represents the length of the range.
11627 Since we just finished subtracting the lower bound of the range
11628 from the index expression, this comparison allows us to simultaneously
11629 check that the original index expression value is both greater than
11630 or equal to the minimum value of the range and less than or equal to
11631 the maximum value of the range. */
11633 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11634 0, default_label);
11636 /* If index is in range, it must fit in Pmode.
11637 Convert to Pmode so we can index with it. */
11638 if (mode != Pmode)
11639 index = convert_to_mode (Pmode, index, 1);
11641 /* Don't let a MEM slip thru, because then INDEX that comes
11642 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11643 and break_out_memory_refs will go to work on it and mess it up. */
11644 #ifdef PIC_CASE_VECTOR_ADDRESS
11645 if (flag_pic && GET_CODE (index) != REG)
11646 index = copy_to_mode_reg (Pmode, index);
11647 #endif
11649 /* If flag_force_addr were to affect this address
11650 it could interfere with the tricky assumptions made
11651 about addresses that contain label-refs,
11652 which may be valid only very near the tablejump itself. */
11653 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11654 GET_MODE_SIZE, because this indicates how large insns are. The other
11655 uses should all be Pmode, because they are addresses. This code
11656 could fail if addresses and insns are not the same size. */
11657 index = gen_rtx_PLUS (Pmode,
11658 gen_rtx_MULT (Pmode, index,
11659 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11660 gen_rtx_LABEL_REF (Pmode, table_label));
11661 #ifdef PIC_CASE_VECTOR_ADDRESS
11662 if (flag_pic)
11663 index = PIC_CASE_VECTOR_ADDRESS (index);
11664 else
11665 #endif
11666 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11667 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11668 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11669 RTX_UNCHANGING_P (vector) = 1;
11670 convert_move (temp, vector, 0);
11672 emit_jump_insn (gen_tablejump (temp, table_label));
11674 /* If we are generating PIC code or if the table is PC-relative, the
11675 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11676 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11677 emit_barrier ();
11680 #endif /* HAVE_tablejump */