--with-gnu-ld uses different x- fiile under aix 4.1
[official-gcc.git] / gcc / expr.c
blob42e305d54f76219fc36d9d78b2a543d36f36a6f8
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
52 #ifdef PUSH_ROUNDING
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
58 #endif
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
71 #endif
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop;
96 /* Nonzero means __builtin_saveregs has already been done in this function.
97 The value is the pseudoreg containing the value __builtin_saveregs
98 returned. */
99 static rtx saveregs_value;
101 /* Similarly for __builtin_apply_args. */
102 static rtx apply_args_value;
104 /* Don't check memory usage, since code is being emitted to check a memory
105 usage. Used when current_function_check_memory_usage is true, to avoid
106 infinite recursion. */
107 static int in_check_memory_usage;
109 /* Postincrements that still need to be expanded. */
110 static rtx pending_chain;
112 /* This structure is used by move_by_pieces to describe the move to
113 be performed. */
114 struct move_by_pieces
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 int to_struct;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 int from_struct;
126 int len;
127 int offset;
128 int reverse;
131 /* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
134 struct clear_by_pieces
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
146 extern struct obstack permanent_obstack;
147 extern rtx arg_pointer_save_area;
149 static rtx get_push_address PROTO ((int));
151 static rtx enqueue_insn PROTO((rtx, rtx));
152 static void init_queue PROTO((void));
153 static int move_by_pieces_ninsns PROTO((unsigned int, int));
154 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
155 struct move_by_pieces *));
156 static void clear_by_pieces PROTO((rtx, int, int));
157 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
158 struct clear_by_pieces *));
159 static int is_zeros_p PROTO((tree));
160 static int mostly_zeros_p PROTO((tree));
161 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
162 tree, tree, int));
163 static void store_constructor PROTO((tree, rtx, int));
164 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
165 enum machine_mode, int, int,
166 int, int));
167 static enum memory_use_mode
168 get_memory_usage_from_modifier PROTO((enum expand_modifier));
169 static tree save_noncopied_parts PROTO((tree, tree));
170 static tree init_noncopied_parts PROTO((tree, tree));
171 static int safe_from_p PROTO((rtx, tree, int));
172 static int fixed_type_p PROTO((tree));
173 static rtx var_rtx PROTO((tree));
174 static int get_pointer_alignment PROTO((tree, unsigned));
175 static tree string_constant PROTO((tree, tree *));
176 static tree c_strlen PROTO((tree));
177 static rtx get_memory_rtx PROTO((tree));
178 static rtx expand_builtin PROTO((tree, rtx, rtx,
179 enum machine_mode, int));
180 static int apply_args_size PROTO((void));
181 static int apply_result_size PROTO((void));
182 static rtx result_vector PROTO((int, rtx));
183 static rtx expand_builtin_apply_args PROTO((void));
184 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
185 static void expand_builtin_return PROTO((rtx));
186 static rtx expand_increment PROTO((tree, int, int));
187 static void preexpand_calls PROTO((tree));
188 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
189 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
190 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
191 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
192 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
194 /* Record for each mode whether we can move a register directly to or
195 from an object of that mode in memory. If we can't, we won't try
196 to use that mode directly when accessing a field of that mode. */
198 static char direct_load[NUM_MACHINE_MODES];
199 static char direct_store[NUM_MACHINE_MODES];
201 /* If a memory-to-memory move would take MOVE_RATIO or more simple
202 move-instruction sequences, we will do a movstr or libcall instead. */
204 #ifndef MOVE_RATIO
205 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
206 #define MOVE_RATIO 2
207 #else
208 /* If we are optimizing for space (-Os), cut down the default move ratio */
209 #define MOVE_RATIO (optimize_size ? 3 : 15)
210 #endif
211 #endif
213 /* This macro is used to determine whether move_by_pieces should be called
214 to perform a structure copy. */
215 #ifndef MOVE_BY_PIECES_P
216 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
217 (SIZE, ALIGN) < MOVE_RATIO)
218 #endif
220 /* This array records the insn_code of insns to perform block moves. */
221 enum insn_code movstr_optab[NUM_MACHINE_MODES];
223 /* This array records the insn_code of insns to perform block clears. */
224 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
226 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
228 #ifndef SLOW_UNALIGNED_ACCESS
229 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
230 #endif
232 /* Register mappings for target machines without register windows. */
233 #ifndef INCOMING_REGNO
234 #define INCOMING_REGNO(OUT) (OUT)
235 #endif
236 #ifndef OUTGOING_REGNO
237 #define OUTGOING_REGNO(IN) (IN)
238 #endif
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
243 void
244 init_expr_once ()
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 char *free_point;
252 start_sequence ();
254 /* Since we are on the permanent obstack, we must be sure we save this
255 spot AFTER we call start_sequence, since it will reuse the rtl it
256 makes. */
257 free_point = (char *) oballoc (0);
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
266 pat = PATTERN (insn);
268 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
269 mode = (enum machine_mode) ((int) mode + 1))
271 int regno;
272 rtx reg;
274 direct_load[(int) mode] = direct_store[(int) mode] = 0;
275 PUT_MODE (mem, mode);
276 PUT_MODE (mem1, mode);
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
281 if (mode != VOIDmode && mode != BLKmode)
282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
283 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
284 regno++)
286 if (! HARD_REGNO_MODE_OK (regno, mode))
287 continue;
289 reg = gen_rtx_REG (mode, regno);
291 SET_SRC (pat) = mem;
292 SET_DEST (pat) = reg;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_load[(int) mode] = 1;
296 SET_SRC (pat) = mem1;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
301 SET_SRC (pat) = reg;
302 SET_DEST (pat) = mem;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_store[(int) mode] = 1;
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem1;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
313 end_sequence ();
314 obfree (free_point);
317 /* This is run at the start of compiling a function. */
319 void
320 init_expr ()
322 init_queue ();
324 pending_stack_adjust = 0;
325 inhibit_defer_pop = 0;
326 saveregs_value = 0;
327 apply_args_value = 0;
328 forced_labels = 0;
331 /* Save all variables describing the current status into the structure *P.
332 This is used before starting a nested function. */
334 void
335 save_expr_status (p)
336 struct function *p;
338 p->pending_chain = pending_chain;
339 p->pending_stack_adjust = pending_stack_adjust;
340 p->inhibit_defer_pop = inhibit_defer_pop;
341 p->saveregs_value = saveregs_value;
342 p->apply_args_value = apply_args_value;
343 p->forced_labels = forced_labels;
345 pending_chain = NULL_RTX;
346 pending_stack_adjust = 0;
347 inhibit_defer_pop = 0;
348 saveregs_value = 0;
349 apply_args_value = 0;
350 forced_labels = 0;
353 /* Restore all variables describing the current status from the structure *P.
354 This is used after a nested function. */
356 void
357 restore_expr_status (p)
358 struct function *p;
360 pending_chain = p->pending_chain;
361 pending_stack_adjust = p->pending_stack_adjust;
362 inhibit_defer_pop = p->inhibit_defer_pop;
363 saveregs_value = p->saveregs_value;
364 apply_args_value = p->apply_args_value;
365 forced_labels = p->forced_labels;
368 /* Manage the queue of increment instructions to be output
369 for POSTINCREMENT_EXPR expressions, etc. */
371 /* Queue up to increment (or change) VAR later. BODY says how:
372 BODY should be the same thing you would pass to emit_insn
373 to increment right away. It will go to emit_insn later on.
375 The value is a QUEUED expression to be used in place of VAR
376 where you want to guarantee the pre-incrementation value of VAR. */
378 static rtx
379 enqueue_insn (var, body)
380 rtx var, body;
382 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
383 var, NULL_RTX, NULL_RTX, body,
384 pending_chain);
385 return pending_chain;
388 /* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
404 protect_from_queue (x, modify)
405 register rtx x;
406 int modify;
408 register RTX_CODE code = GET_CODE (x);
410 #if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
413 return x;
414 #endif
416 if (code != QUEUED)
418 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
419 use of autoincrement. Make a copy of the contents of the memory
420 location rather than a copy of the address, but not if the value is
421 of mode BLKmode. Don't modify X in place since it might be
422 shared. */
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 register rtx y = XEXP (x, 0);
427 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
429 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
430 MEM_COPY_ATTRIBUTES (new, x);
431 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
433 if (QUEUED_INSN (y))
435 register rtx temp = gen_reg_rtx (GET_MODE (new));
436 emit_insn_before (gen_move_insn (temp, new),
437 QUEUED_INSN (y));
438 return temp;
440 return new;
442 /* Otherwise, recursively protect the subexpressions of all
443 the kinds of rtx's that can contain a QUEUED. */
444 if (code == MEM)
446 rtx tem = protect_from_queue (XEXP (x, 0), 0);
447 if (tem != XEXP (x, 0))
449 x = copy_rtx (x);
450 XEXP (x, 0) = tem;
453 else if (code == PLUS || code == MULT)
455 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
456 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
457 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
459 x = copy_rtx (x);
460 XEXP (x, 0) = new0;
461 XEXP (x, 1) = new1;
464 return x;
466 /* If the increment has not happened, use the variable itself. */
467 if (QUEUED_INSN (x) == 0)
468 return QUEUED_VAR (x);
469 /* If the increment has happened and a pre-increment copy exists,
470 use that copy. */
471 if (QUEUED_COPY (x) != 0)
472 return QUEUED_COPY (x);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
477 QUEUED_INSN (x));
478 return QUEUED_COPY (x);
481 /* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
487 queued_subexp_p (x)
488 rtx x;
490 register enum rtx_code code = GET_CODE (x);
491 switch (code)
493 case QUEUED:
494 return 1;
495 case MEM:
496 return queued_subexp_p (XEXP (x, 0));
497 case MULT:
498 case PLUS:
499 case MINUS:
500 return (queued_subexp_p (XEXP (x, 0))
501 || queued_subexp_p (XEXP (x, 1)));
502 default:
503 return 0;
507 /* Perform all the pending incrementations. */
509 void
510 emit_queue ()
512 register rtx p;
513 while ((p = pending_chain))
515 rtx body = QUEUED_BODY (p);
517 if (GET_CODE (body) == SEQUENCE)
519 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
520 emit_insn (QUEUED_BODY (p));
522 else
523 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
524 pending_chain = QUEUED_NEXT (p);
528 static void
529 init_queue ()
531 if (pending_chain)
532 abort ();
535 /* Copy data from FROM to TO, where the machine modes are not the same.
536 Both modes may be integer, or both may be floating.
537 UNSIGNEDP should be nonzero if FROM is an unsigned type.
538 This causes zero-extension instead of sign-extension. */
540 void
541 convert_move (to, from, unsignedp)
542 register rtx to, from;
543 int unsignedp;
545 enum machine_mode to_mode = GET_MODE (to);
546 enum machine_mode from_mode = GET_MODE (from);
547 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
548 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
549 enum insn_code code;
550 rtx libcall;
552 /* rtx code for making an equivalent value. */
553 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
555 to = protect_from_queue (to, 1);
556 from = protect_from_queue (from, 0);
558 if (to_real != from_real)
559 abort ();
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
563 TO here. */
565 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
567 >= GET_MODE_SIZE (to_mode))
568 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
569 from = gen_lowpart (to_mode, from), from_mode = to_mode;
571 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
572 abort ();
574 if (to_mode == from_mode
575 || (from_mode == VOIDmode && CONSTANT_P (from)))
577 emit_move_insn (to, from);
578 return;
581 if (to_real)
583 rtx value;
585 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
587 /* Try converting directly if the insn is supported. */
588 if ((code = can_extend_p (to_mode, from_mode, 0))
589 != CODE_FOR_nothing)
591 emit_unop_insn (code, to, from, UNKNOWN);
592 return;
596 #ifdef HAVE_trunchfqf2
597 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
599 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
600 return;
602 #endif
603 #ifdef HAVE_trunctqfqf2
604 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
606 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
607 return;
609 #endif
610 #ifdef HAVE_truncsfqf2
611 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
613 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
614 return;
616 #endif
617 #ifdef HAVE_truncdfqf2
618 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
620 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
621 return;
623 #endif
624 #ifdef HAVE_truncxfqf2
625 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
627 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
628 return;
630 #endif
631 #ifdef HAVE_trunctfqf2
632 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
634 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
635 return;
637 #endif
639 #ifdef HAVE_trunctqfhf2
640 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
642 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
643 return;
645 #endif
646 #ifdef HAVE_truncsfhf2
647 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
649 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
650 return;
652 #endif
653 #ifdef HAVE_truncdfhf2
654 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
656 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
657 return;
659 #endif
660 #ifdef HAVE_truncxfhf2
661 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
663 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
664 return;
666 #endif
667 #ifdef HAVE_trunctfhf2
668 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
670 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
671 return;
673 #endif
675 #ifdef HAVE_truncsftqf2
676 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
678 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
679 return;
681 #endif
682 #ifdef HAVE_truncdftqf2
683 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
685 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
686 return;
688 #endif
689 #ifdef HAVE_truncxftqf2
690 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
692 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
693 return;
695 #endif
696 #ifdef HAVE_trunctftqf2
697 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
699 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
700 return;
702 #endif
704 #ifdef HAVE_truncdfsf2
705 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
707 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
708 return;
710 #endif
711 #ifdef HAVE_truncxfsf2
712 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
714 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
715 return;
717 #endif
718 #ifdef HAVE_trunctfsf2
719 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
721 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
722 return;
724 #endif
725 #ifdef HAVE_truncxfdf2
726 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
728 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
729 return;
731 #endif
732 #ifdef HAVE_trunctfdf2
733 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
735 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
736 return;
738 #endif
740 libcall = (rtx) 0;
741 switch (from_mode)
743 case SFmode:
744 switch (to_mode)
746 case DFmode:
747 libcall = extendsfdf2_libfunc;
748 break;
750 case XFmode:
751 libcall = extendsfxf2_libfunc;
752 break;
754 case TFmode:
755 libcall = extendsftf2_libfunc;
756 break;
758 default:
759 break;
761 break;
763 case DFmode:
764 switch (to_mode)
766 case SFmode:
767 libcall = truncdfsf2_libfunc;
768 break;
770 case XFmode:
771 libcall = extenddfxf2_libfunc;
772 break;
774 case TFmode:
775 libcall = extenddftf2_libfunc;
776 break;
778 default:
779 break;
781 break;
783 case XFmode:
784 switch (to_mode)
786 case SFmode:
787 libcall = truncxfsf2_libfunc;
788 break;
790 case DFmode:
791 libcall = truncxfdf2_libfunc;
792 break;
794 default:
795 break;
797 break;
799 case TFmode:
800 switch (to_mode)
802 case SFmode:
803 libcall = trunctfsf2_libfunc;
804 break;
806 case DFmode:
807 libcall = trunctfdf2_libfunc;
808 break;
810 default:
811 break;
813 break;
815 default:
816 break;
819 if (libcall == (rtx) 0)
820 /* This conversion is not implemented yet. */
821 abort ();
823 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
824 1, from, from_mode);
825 emit_move_insn (to, value);
826 return;
829 /* Now both modes are integers. */
831 /* Handle expanding beyond a word. */
832 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
833 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
835 rtx insns;
836 rtx lowpart;
837 rtx fill_value;
838 rtx lowfrom;
839 int i;
840 enum machine_mode lowpart_mode;
841 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
843 /* Try converting directly if the insn is supported. */
844 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
845 != CODE_FOR_nothing)
847 /* If FROM is a SUBREG, put it into a register. Do this
848 so that we always generate the same set of insns for
849 better cse'ing; if an intermediate assignment occurred,
850 we won't be doing the operation directly on the SUBREG. */
851 if (optimize > 0 && GET_CODE (from) == SUBREG)
852 from = force_reg (from_mode, from);
853 emit_unop_insn (code, to, from, equiv_code);
854 return;
856 /* Next, try converting via full word. */
857 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
858 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
859 != CODE_FOR_nothing))
861 if (GET_CODE (to) == REG)
862 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
863 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
864 emit_unop_insn (code, to,
865 gen_lowpart (word_mode, to), equiv_code);
866 return;
869 /* No special multiword conversion insn; do it by hand. */
870 start_sequence ();
872 /* Since we will turn this into a no conflict block, we must ensure
873 that the source does not overlap the target. */
875 if (reg_overlap_mentioned_p (to, from))
876 from = force_reg (from_mode, from);
878 /* Get a copy of FROM widened to a word, if necessary. */
879 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
880 lowpart_mode = word_mode;
881 else
882 lowpart_mode = from_mode;
884 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
886 lowpart = gen_lowpart (lowpart_mode, to);
887 emit_move_insn (lowpart, lowfrom);
889 /* Compute the value to put in each remaining word. */
890 if (unsignedp)
891 fill_value = const0_rtx;
892 else
894 #ifdef HAVE_slt
895 if (HAVE_slt
896 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
897 && STORE_FLAG_VALUE == -1)
899 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
900 lowpart_mode, 0, 0);
901 fill_value = gen_reg_rtx (word_mode);
902 emit_insn (gen_slt (fill_value));
904 else
905 #endif
907 fill_value
908 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
909 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
910 NULL_RTX, 0);
911 fill_value = convert_to_mode (word_mode, fill_value, 1);
915 /* Fill the remaining words. */
916 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
918 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
919 rtx subword = operand_subword (to, index, 1, to_mode);
921 if (subword == 0)
922 abort ();
924 if (fill_value != subword)
925 emit_move_insn (subword, fill_value);
928 insns = get_insns ();
929 end_sequence ();
931 emit_no_conflict_block (insns, to, from, NULL_RTX,
932 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
933 return;
936 /* Truncating multi-word to a word or less. */
937 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
938 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
940 if (!((GET_CODE (from) == MEM
941 && ! MEM_VOLATILE_P (from)
942 && direct_load[(int) to_mode]
943 && ! mode_dependent_address_p (XEXP (from, 0)))
944 || GET_CODE (from) == REG
945 || GET_CODE (from) == SUBREG))
946 from = force_reg (from_mode, from);
947 convert_move (to, gen_lowpart (word_mode, from), 0);
948 return;
951 /* Handle pointer conversion */ /* SPEE 900220 */
952 if (to_mode == PQImode)
954 if (from_mode != QImode)
955 from = convert_to_mode (QImode, from, unsignedp);
957 #ifdef HAVE_truncqipqi2
958 if (HAVE_truncqipqi2)
960 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
961 return;
963 #endif /* HAVE_truncqipqi2 */
964 abort ();
967 if (from_mode == PQImode)
969 if (to_mode != QImode)
971 from = convert_to_mode (QImode, from, unsignedp);
972 from_mode = QImode;
974 else
976 #ifdef HAVE_extendpqiqi2
977 if (HAVE_extendpqiqi2)
979 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
980 return;
982 #endif /* HAVE_extendpqiqi2 */
983 abort ();
987 if (to_mode == PSImode)
989 if (from_mode != SImode)
990 from = convert_to_mode (SImode, from, unsignedp);
992 #ifdef HAVE_truncsipsi2
993 if (HAVE_truncsipsi2)
995 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
996 return;
998 #endif /* HAVE_truncsipsi2 */
999 abort ();
1002 if (from_mode == PSImode)
1004 if (to_mode != SImode)
1006 from = convert_to_mode (SImode, from, unsignedp);
1007 from_mode = SImode;
1009 else
1011 #ifdef HAVE_extendpsisi2
1012 if (HAVE_extendpsisi2)
1014 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1015 return;
1017 #endif /* HAVE_extendpsisi2 */
1018 abort ();
1022 if (to_mode == PDImode)
1024 if (from_mode != DImode)
1025 from = convert_to_mode (DImode, from, unsignedp);
1027 #ifdef HAVE_truncdipdi2
1028 if (HAVE_truncdipdi2)
1030 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1031 return;
1033 #endif /* HAVE_truncdipdi2 */
1034 abort ();
1037 if (from_mode == PDImode)
1039 if (to_mode != DImode)
1041 from = convert_to_mode (DImode, from, unsignedp);
1042 from_mode = DImode;
1044 else
1046 #ifdef HAVE_extendpdidi2
1047 if (HAVE_extendpdidi2)
1049 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1050 return;
1052 #endif /* HAVE_extendpdidi2 */
1053 abort ();
1057 /* Now follow all the conversions between integers
1058 no more than a word long. */
1060 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1061 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1062 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1063 GET_MODE_BITSIZE (from_mode)))
1065 if (!((GET_CODE (from) == MEM
1066 && ! MEM_VOLATILE_P (from)
1067 && direct_load[(int) to_mode]
1068 && ! mode_dependent_address_p (XEXP (from, 0)))
1069 || GET_CODE (from) == REG
1070 || GET_CODE (from) == SUBREG))
1071 from = force_reg (from_mode, from);
1072 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1073 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1074 from = copy_to_reg (from);
1075 emit_move_insn (to, gen_lowpart (to_mode, from));
1076 return;
1079 /* Handle extension. */
1080 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1082 /* Convert directly if that works. */
1083 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1084 != CODE_FOR_nothing)
1086 emit_unop_insn (code, to, from, equiv_code);
1087 return;
1089 else
1091 enum machine_mode intermediate;
1092 rtx tmp;
1093 tree shift_amount;
1095 /* Search for a mode to convert via. */
1096 for (intermediate = from_mode; intermediate != VOIDmode;
1097 intermediate = GET_MODE_WIDER_MODE (intermediate))
1098 if (((can_extend_p (to_mode, intermediate, unsignedp)
1099 != CODE_FOR_nothing)
1100 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1101 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1102 && (can_extend_p (intermediate, from_mode, unsignedp)
1103 != CODE_FOR_nothing))
1105 convert_move (to, convert_to_mode (intermediate, from,
1106 unsignedp), unsignedp);
1107 return;
1110 /* No suitable intermediate mode.
1111 Generate what we need with shifts. */
1112 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1113 - GET_MODE_BITSIZE (from_mode), 0);
1114 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1115 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1116 to, unsignedp);
1117 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1118 to, unsignedp);
1119 if (tmp != to)
1120 emit_move_insn (to, tmp);
1121 return;
1125 /* Support special truncate insns for certain modes. */
1127 if (from_mode == DImode && to_mode == SImode)
1129 #ifdef HAVE_truncdisi2
1130 if (HAVE_truncdisi2)
1132 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1133 return;
1135 #endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1140 if (from_mode == DImode && to_mode == HImode)
1142 #ifdef HAVE_truncdihi2
1143 if (HAVE_truncdihi2)
1145 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1146 return;
1148 #endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1153 if (from_mode == DImode && to_mode == QImode)
1155 #ifdef HAVE_truncdiqi2
1156 if (HAVE_truncdiqi2)
1158 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1159 return;
1161 #endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1166 if (from_mode == SImode && to_mode == HImode)
1168 #ifdef HAVE_truncsihi2
1169 if (HAVE_truncsihi2)
1171 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1172 return;
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1179 if (from_mode == SImode && to_mode == QImode)
1181 #ifdef HAVE_truncsiqi2
1182 if (HAVE_truncsiqi2)
1184 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1185 return;
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1192 if (from_mode == HImode && to_mode == QImode)
1194 #ifdef HAVE_trunchiqi2
1195 if (HAVE_trunchiqi2)
1197 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1198 return;
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1205 if (from_mode == TImode && to_mode == DImode)
1207 #ifdef HAVE_trunctidi2
1208 if (HAVE_trunctidi2)
1210 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1211 return;
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1218 if (from_mode == TImode && to_mode == SImode)
1220 #ifdef HAVE_trunctisi2
1221 if (HAVE_trunctisi2)
1223 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1224 return;
1226 #endif
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1228 return;
1231 if (from_mode == TImode && to_mode == HImode)
1233 #ifdef HAVE_trunctihi2
1234 if (HAVE_trunctihi2)
1236 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1237 return;
1239 #endif
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1241 return;
1244 if (from_mode == TImode && to_mode == QImode)
1246 #ifdef HAVE_trunctiqi2
1247 if (HAVE_trunctiqi2)
1249 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1250 return;
1252 #endif
1253 convert_move (to, force_reg (from_mode, from), unsignedp);
1254 return;
1257 /* Handle truncation of volatile memrefs, and so on;
1258 the things that couldn't be truncated directly,
1259 and for which there was no special instruction. */
1260 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1262 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1263 emit_move_insn (to, temp);
1264 return;
1267 /* Mode combination is not recognized. */
1268 abort ();
1271 /* Return an rtx for a value that would result
1272 from converting X to mode MODE.
1273 Both X and MODE may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1275 This can be done by referring to a part of X in place
1276 or by copying to a new temporary with conversion.
1278 This function *must not* call protect_from_queue
1279 except when putting X into an insn (in which case convert_move does it). */
1282 convert_to_mode (mode, x, unsignedp)
1283 enum machine_mode mode;
1284 rtx x;
1285 int unsignedp;
1287 return convert_modes (mode, VOIDmode, x, unsignedp);
1290 /* Return an rtx for a value that would result
1291 from converting X from mode OLDMODE to mode MODE.
1292 Both modes may be floating, or both integer.
1293 UNSIGNEDP is nonzero if X is an unsigned value.
1295 This can be done by referring to a part of X in place
1296 or by copying to a new temporary with conversion.
1298 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1300 This function *must not* call protect_from_queue
1301 except when putting X into an insn (in which case convert_move does it). */
1304 convert_modes (mode, oldmode, x, unsignedp)
1305 enum machine_mode mode, oldmode;
1306 rtx x;
1307 int unsignedp;
1309 register rtx temp;
1311 /* If FROM is a SUBREG that indicates that we have already done at least
1312 the required extension, strip it. */
1314 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1315 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1316 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1317 x = gen_lowpart (mode, x);
1319 if (GET_MODE (x) != VOIDmode)
1320 oldmode = GET_MODE (x);
1322 if (mode == oldmode)
1323 return x;
1325 /* There is one case that we must handle specially: If we are converting
1326 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1327 we are to interpret the constant as unsigned, gen_lowpart will do
1328 the wrong if the constant appears negative. What we want to do is
1329 make the high-order word of the constant zero, not all ones. */
1331 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1332 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1333 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1335 HOST_WIDE_INT val = INTVAL (x);
1337 if (oldmode != VOIDmode
1338 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1340 int width = GET_MODE_BITSIZE (oldmode);
1342 /* We need to zero extend VAL. */
1343 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1346 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1349 /* We can do this with a gen_lowpart if both desired and current modes
1350 are integer, and this is either a constant integer, a register, or a
1351 non-volatile MEM. Except for the constant case where MODE is no
1352 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1354 if ((GET_CODE (x) == CONST_INT
1355 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1356 || (GET_MODE_CLASS (mode) == MODE_INT
1357 && GET_MODE_CLASS (oldmode) == MODE_INT
1358 && (GET_CODE (x) == CONST_DOUBLE
1359 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1360 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1361 && direct_load[(int) mode])
1362 || (GET_CODE (x) == REG
1363 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1364 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1366 /* ?? If we don't know OLDMODE, we have to assume here that
1367 X does not need sign- or zero-extension. This may not be
1368 the case, but it's the best we can do. */
1369 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1370 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1372 HOST_WIDE_INT val = INTVAL (x);
1373 int width = GET_MODE_BITSIZE (oldmode);
1375 /* We must sign or zero-extend in this case. Start by
1376 zero-extending, then sign extend if we need to. */
1377 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1378 if (! unsignedp
1379 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1380 val |= (HOST_WIDE_INT) (-1) << width;
1382 return GEN_INT (val);
1385 return gen_lowpart (mode, x);
1388 temp = gen_reg_rtx (mode);
1389 convert_move (temp, x, unsignedp);
1390 return temp;
1394 /* This macro is used to determine what the largest unit size that
1395 move_by_pieces can use is. */
1397 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1398 move efficiently, as opposed to MOVE_MAX which is the maximum
1399 number of bhytes we can move with a single instruction. */
1401 #ifndef MOVE_MAX_PIECES
1402 #define MOVE_MAX_PIECES MOVE_MAX
1403 #endif
1405 /* Some architectures do not have complete pre/post increment/decrement
1406 instruction sets, or only move some modes efficiently. these macros
1407 allow us to fine tune move_by_pieces for these targets. */
1409 #ifndef USE_LOAD_POST_INCREMENT
1410 #define USE_LOAD_POST_INCREMENT(MODE) HAVE_POST_INCREMENT
1411 #endif
1413 #ifndef USE_LOAD_PRE_DECREMENT
1414 #define USE_LOAD_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT
1415 #endif
1417 #ifndef USE_STORE_POST_INCREMENT
1418 #define USE_STORE_POST_INCREMENT(MODE) HAVE_POST_INCREMENT
1419 #endif
1421 #ifndef USE_STORE_PRE_DECREMENT
1422 #define USE_STORE_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT
1423 #endif
1425 /* Generate several move instructions to copy LEN bytes
1426 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1427 The caller must pass FROM and TO
1428 through protect_from_queue before calling.
1429 ALIGN (in bytes) is maximum alignment we can assume. */
1431 void
1432 move_by_pieces (to, from, len, align)
1433 rtx to, from;
1434 int len, align;
1436 struct move_by_pieces data;
1437 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1438 int max_size = MOVE_MAX_PIECES + 1;
1439 enum machine_mode mode = VOIDmode, tmode;
1440 enum insn_code icode;
1442 data.offset = 0;
1443 data.to_addr = to_addr;
1444 data.from_addr = from_addr;
1445 data.to = to;
1446 data.from = from;
1447 data.autinc_to
1448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1450 data.autinc_from
1451 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1452 || GET_CODE (from_addr) == POST_INC
1453 || GET_CODE (from_addr) == POST_DEC);
1455 data.explicit_inc_from = 0;
1456 data.explicit_inc_to = 0;
1457 data.reverse
1458 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1459 if (data.reverse) data.offset = len;
1460 data.len = len;
1462 data.to_struct = MEM_IN_STRUCT_P (to);
1463 data.from_struct = MEM_IN_STRUCT_P (from);
1465 /* If copying requires more than two move insns,
1466 copy addresses to registers (to make displacements shorter)
1467 and use post-increment if available. */
1468 if (!(data.autinc_from && data.autinc_to)
1469 && move_by_pieces_ninsns (len, align) > 2)
1471 /* Find the mode of the largest move... */
1472 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1473 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1474 if (GET_MODE_SIZE (tmode) < max_size)
1475 mode = tmode;
1477 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1479 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1480 data.autinc_from = 1;
1481 data.explicit_inc_from = -1;
1483 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1485 data.from_addr = copy_addr_to_reg (from_addr);
1486 data.autinc_from = 1;
1487 data.explicit_inc_from = 1;
1489 if (!data.autinc_from && CONSTANT_P (from_addr))
1490 data.from_addr = copy_addr_to_reg (from_addr);
1491 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1493 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1494 data.autinc_to = 1;
1495 data.explicit_inc_to = -1;
1497 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1499 data.to_addr = copy_addr_to_reg (to_addr);
1500 data.autinc_to = 1;
1501 data.explicit_inc_to = 1;
1503 if (!data.autinc_to && CONSTANT_P (to_addr))
1504 data.to_addr = copy_addr_to_reg (to_addr);
1507 if (! SLOW_UNALIGNED_ACCESS
1508 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1509 align = MOVE_MAX;
1511 /* First move what we can in the largest integer mode, then go to
1512 successively smaller modes. */
1514 while (max_size > 1)
1516 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1517 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1518 if (GET_MODE_SIZE (tmode) < max_size)
1519 mode = tmode;
1521 if (mode == VOIDmode)
1522 break;
1524 icode = mov_optab->handlers[(int) mode].insn_code;
1525 if (icode != CODE_FOR_nothing
1526 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1527 GET_MODE_SIZE (mode)))
1528 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1530 max_size = GET_MODE_SIZE (mode);
1533 /* The code above should have handled everything. */
1534 if (data.len > 0)
1535 abort ();
1538 /* Return number of insns required to move L bytes by pieces.
1539 ALIGN (in bytes) is maximum alignment we can assume. */
1541 static int
1542 move_by_pieces_ninsns (l, align)
1543 unsigned int l;
1544 int align;
1546 register int n_insns = 0;
1547 int max_size = MOVE_MAX + 1;
1549 if (! SLOW_UNALIGNED_ACCESS
1550 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1551 align = MOVE_MAX;
1553 while (max_size > 1)
1555 enum machine_mode mode = VOIDmode, tmode;
1556 enum insn_code icode;
1558 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1559 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1560 if (GET_MODE_SIZE (tmode) < max_size)
1561 mode = tmode;
1563 if (mode == VOIDmode)
1564 break;
1566 icode = mov_optab->handlers[(int) mode].insn_code;
1567 if (icode != CODE_FOR_nothing
1568 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1569 GET_MODE_SIZE (mode)))
1570 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1572 max_size = GET_MODE_SIZE (mode);
1575 return n_insns;
1578 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1579 with move instructions for mode MODE. GENFUN is the gen_... function
1580 to make a move insn for that mode. DATA has all the other info. */
1582 static void
1583 move_by_pieces_1 (genfun, mode, data)
1584 rtx (*genfun) PROTO ((rtx, ...));
1585 enum machine_mode mode;
1586 struct move_by_pieces *data;
1588 register int size = GET_MODE_SIZE (mode);
1589 register rtx to1, from1;
1591 while (data->len >= size)
1593 if (data->reverse) data->offset -= size;
1595 to1 = (data->autinc_to
1596 ? gen_rtx_MEM (mode, data->to_addr)
1597 : copy_rtx (change_address (data->to, mode,
1598 plus_constant (data->to_addr,
1599 data->offset))));
1600 MEM_IN_STRUCT_P (to1) = data->to_struct;
1602 from1
1603 = (data->autinc_from
1604 ? gen_rtx_MEM (mode, data->from_addr)
1605 : copy_rtx (change_address (data->from, mode,
1606 plus_constant (data->from_addr,
1607 data->offset))));
1608 MEM_IN_STRUCT_P (from1) = data->from_struct;
1610 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1611 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1612 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1613 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1615 emit_insn ((*genfun) (to1, from1));
1616 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1617 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1618 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1619 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1621 if (! data->reverse) data->offset += size;
1623 data->len -= size;
1627 /* Emit code to move a block Y to a block X.
1628 This may be done with string-move instructions,
1629 with multiple scalar move instructions, or with a library call.
1631 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1632 with mode BLKmode.
1633 SIZE is an rtx that says how long they are.
1634 ALIGN is the maximum alignment we can assume they have,
1635 measured in bytes.
1637 Return the address of the new block, if memcpy is called and returns it,
1638 0 otherwise. */
1641 emit_block_move (x, y, size, align)
1642 rtx x, y;
1643 rtx size;
1644 int align;
1646 rtx retval = 0;
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 static tree fn;
1649 tree call_expr, arg_list;
1650 #endif
1652 if (GET_MODE (x) != BLKmode)
1653 abort ();
1655 if (GET_MODE (y) != BLKmode)
1656 abort ();
1658 x = protect_from_queue (x, 1);
1659 y = protect_from_queue (y, 0);
1660 size = protect_from_queue (size, 0);
1662 if (GET_CODE (x) != MEM)
1663 abort ();
1664 if (GET_CODE (y) != MEM)
1665 abort ();
1666 if (size == 0)
1667 abort ();
1669 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1670 move_by_pieces (x, y, INTVAL (size), align);
1671 else
1673 /* Try the most limited insn first, because there's no point
1674 including more than one in the machine description unless
1675 the more limited one has some advantage. */
1677 rtx opalign = GEN_INT (align);
1678 enum machine_mode mode;
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
1683 enum insn_code code = movstr_optab[(int) mode];
1685 if (code != CODE_FOR_nothing
1686 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1687 here because if SIZE is less than the mode mask, as it is
1688 returned by the macro, it will definitely be less than the
1689 actual mode mask. */
1690 && ((GET_CODE (size) == CONST_INT
1691 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1692 <= (GET_MODE_MASK (mode) >> 1)))
1693 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1694 && (insn_operand_predicate[(int) code][0] == 0
1695 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1696 && (insn_operand_predicate[(int) code][1] == 0
1697 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1698 && (insn_operand_predicate[(int) code][3] == 0
1699 || (*insn_operand_predicate[(int) code][3]) (opalign,
1700 VOIDmode)))
1702 rtx op2;
1703 rtx last = get_last_insn ();
1704 rtx pat;
1706 op2 = convert_to_mode (mode, size, 1);
1707 if (insn_operand_predicate[(int) code][2] != 0
1708 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1709 op2 = copy_to_mode_reg (mode, op2);
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1712 if (pat)
1714 emit_insn (pat);
1715 return 0;
1717 else
1718 delete_insns_since (last);
1722 #ifdef TARGET_MEM_FUNCTIONS
1723 /* It is incorrect to use the libcall calling conventions to call
1724 memcpy in this context.
1726 This could be a user call to memcpy and the user may wish to
1727 examine the return value from memcpy.
1729 For targets where libcalls and normal calls have different conventions
1730 for returning pointers, we could end up generating incorrect code.
1732 So instead of using a libcall sequence we build up a suitable
1733 CALL_EXPR and expand the call in the normal fashion. */
1734 if (fn == NULL_TREE)
1736 tree fntype;
1738 /* This was copied from except.c, I don't know if all this is
1739 necessary in this context or not. */
1740 fn = get_identifier ("memcpy");
1741 push_obstacks_nochange ();
1742 end_temporary_allocation ();
1743 fntype = build_pointer_type (void_type_node);
1744 fntype = build_function_type (fntype, NULL_TREE);
1745 fn = build_decl (FUNCTION_DECL, fn, fntype);
1746 DECL_EXTERNAL (fn) = 1;
1747 TREE_PUBLIC (fn) = 1;
1748 DECL_ARTIFICIAL (fn) = 1;
1749 make_decl_rtl (fn, NULL_PTR, 1);
1750 assemble_external (fn);
1751 pop_obstacks ();
1754 /* We need to make an argument list for the function call.
1756 memcpy has three arguments, the first two are void * addresses and
1757 the last is a size_t byte count for the copy. */
1758 arg_list
1759 = build_tree_list (NULL_TREE,
1760 make_tree (build_pointer_type (void_type_node),
1761 XEXP (x, 0)));
1762 TREE_CHAIN (arg_list)
1763 = build_tree_list (NULL_TREE,
1764 make_tree (build_pointer_type (void_type_node),
1765 XEXP (y, 0)));
1766 TREE_CHAIN (TREE_CHAIN (arg_list))
1767 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1768 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1770 /* Now we have to build up the CALL_EXPR itself. */
1771 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1772 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1773 call_expr, arg_list, NULL_TREE);
1774 TREE_SIDE_EFFECTS (call_expr) = 1;
1776 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1777 #else
1778 emit_library_call (bcopy_libfunc, 0,
1779 VOIDmode, 3, XEXP (y, 0), Pmode,
1780 XEXP (x, 0), Pmode,
1781 convert_to_mode (TYPE_MODE (integer_type_node), size,
1782 TREE_UNSIGNED (integer_type_node)),
1783 TYPE_MODE (integer_type_node));
1784 #endif
1787 return retval;
1790 /* Copy all or part of a value X into registers starting at REGNO.
1791 The number of registers to be filled is NREGS. */
1793 void
1794 move_block_to_reg (regno, x, nregs, mode)
1795 int regno;
1796 rtx x;
1797 int nregs;
1798 enum machine_mode mode;
1800 int i;
1801 #ifdef HAVE_load_multiple
1802 rtx pat;
1803 rtx last;
1804 #endif
1806 if (nregs == 0)
1807 return;
1809 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1810 x = validize_mem (force_const_mem (mode, x));
1812 /* See if the machine can do this with a load multiple insn. */
1813 #ifdef HAVE_load_multiple
1814 if (HAVE_load_multiple)
1816 last = get_last_insn ();
1817 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1818 GEN_INT (nregs));
1819 if (pat)
1821 emit_insn (pat);
1822 return;
1824 else
1825 delete_insns_since (last);
1827 #endif
1829 for (i = 0; i < nregs; i++)
1830 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1831 operand_subword_force (x, i, mode));
1834 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1835 The number of registers to be filled is NREGS. SIZE indicates the number
1836 of bytes in the object X. */
1839 void
1840 move_block_from_reg (regno, x, nregs, size)
1841 int regno;
1842 rtx x;
1843 int nregs;
1844 int size;
1846 int i;
1847 #ifdef HAVE_store_multiple
1848 rtx pat;
1849 rtx last;
1850 #endif
1851 enum machine_mode mode;
1853 /* If SIZE is that of a mode no bigger than a word, just use that
1854 mode's store operation. */
1855 if (size <= UNITS_PER_WORD
1856 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1858 emit_move_insn (change_address (x, mode, NULL),
1859 gen_rtx_REG (mode, regno));
1860 return;
1863 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1864 to the left before storing to memory. Note that the previous test
1865 doesn't handle all cases (e.g. SIZE == 3). */
1866 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1868 rtx tem = operand_subword (x, 0, 1, BLKmode);
1869 rtx shift;
1871 if (tem == 0)
1872 abort ();
1874 shift = expand_shift (LSHIFT_EXPR, word_mode,
1875 gen_rtx_REG (word_mode, regno),
1876 build_int_2 ((UNITS_PER_WORD - size)
1877 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1878 emit_move_insn (tem, shift);
1879 return;
1882 /* See if the machine can do this with a store multiple insn. */
1883 #ifdef HAVE_store_multiple
1884 if (HAVE_store_multiple)
1886 last = get_last_insn ();
1887 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1888 GEN_INT (nregs));
1889 if (pat)
1891 emit_insn (pat);
1892 return;
1894 else
1895 delete_insns_since (last);
1897 #endif
1899 for (i = 0; i < nregs; i++)
1901 rtx tem = operand_subword (x, i, 1, BLKmode);
1903 if (tem == 0)
1904 abort ();
1906 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1910 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1911 registers represented by a PARALLEL. SSIZE represents the total size of
1912 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1913 SRC in bits. */
1914 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1915 the balance will be in what would be the low-order memory addresses, i.e.
1916 left justified for big endian, right justified for little endian. This
1917 happens to be true for the targets currently using this support. If this
1918 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1919 would be needed. */
1921 void
1922 emit_group_load (dst, orig_src, ssize, align)
1923 rtx dst, orig_src;
1924 int align, ssize;
1926 rtx *tmps, src;
1927 int start, i;
1929 if (GET_CODE (dst) != PARALLEL)
1930 abort ();
1932 /* Check for a NULL entry, used to indicate that the parameter goes
1933 both on the stack and in registers. */
1934 if (XEXP (XVECEXP (dst, 0, 0), 0))
1935 start = 0;
1936 else
1937 start = 1;
1939 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1941 /* If we won't be loading directly from memory, protect the real source
1942 from strange tricks we might play. */
1943 src = orig_src;
1944 if (GET_CODE (src) != MEM)
1946 src = gen_reg_rtx (GET_MODE (orig_src));
1947 emit_move_insn (src, orig_src);
1950 /* Process the pieces. */
1951 for (i = start; i < XVECLEN (dst, 0); i++)
1953 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1954 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1955 int bytelen = GET_MODE_SIZE (mode);
1956 int shift = 0;
1958 /* Handle trailing fragments that run over the size of the struct. */
1959 if (ssize >= 0 && bytepos + bytelen > ssize)
1961 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1962 bytelen = ssize - bytepos;
1963 if (bytelen <= 0)
1964 abort();
1967 /* Optimize the access just a bit. */
1968 if (GET_CODE (src) == MEM
1969 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1970 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1971 && bytelen == GET_MODE_SIZE (mode))
1973 tmps[i] = gen_reg_rtx (mode);
1974 emit_move_insn (tmps[i],
1975 change_address (src, mode,
1976 plus_constant (XEXP (src, 0),
1977 bytepos)));
1979 else
1981 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1982 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1983 mode, mode, align, ssize);
1986 if (BYTES_BIG_ENDIAN && shift)
1988 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1989 tmps[i], 0, OPTAB_WIDEN);
1992 emit_queue();
1994 /* Copy the extracted pieces into the proper (probable) hard regs. */
1995 for (i = start; i < XVECLEN (dst, 0); i++)
1996 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1999 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2000 registers represented by a PARALLEL. SSIZE represents the total size of
2001 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2003 void
2004 emit_group_store (orig_dst, src, ssize, align)
2005 rtx orig_dst, src;
2006 int ssize, align;
2008 rtx *tmps, dst;
2009 int start, i;
2011 if (GET_CODE (src) != PARALLEL)
2012 abort ();
2014 /* Check for a NULL entry, used to indicate that the parameter goes
2015 both on the stack and in registers. */
2016 if (XEXP (XVECEXP (src, 0, 0), 0))
2017 start = 0;
2018 else
2019 start = 1;
2021 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2023 /* Copy the (probable) hard regs into pseudos. */
2024 for (i = start; i < XVECLEN (src, 0); i++)
2026 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2027 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2028 emit_move_insn (tmps[i], reg);
2030 emit_queue();
2032 /* If we won't be storing directly into memory, protect the real destination
2033 from strange tricks we might play. */
2034 dst = orig_dst;
2035 if (GET_CODE (dst) == PARALLEL)
2037 rtx temp;
2039 /* We can get a PARALLEL dst if there is a conditional expression in
2040 a return statement. In that case, the dst and src are the same,
2041 so no action is necessary. */
2042 if (rtx_equal_p (dst, src))
2043 return;
2045 /* It is unclear if we can ever reach here, but we may as well handle
2046 it. Allocate a temporary, and split this into a store/load to/from
2047 the temporary. */
2049 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2050 emit_group_store (temp, src, ssize, align);
2051 emit_group_load (dst, temp, ssize, align);
2052 return;
2054 else if (GET_CODE (dst) != MEM)
2056 dst = gen_reg_rtx (GET_MODE (orig_dst));
2057 /* Make life a bit easier for combine. */
2058 emit_move_insn (dst, const0_rtx);
2060 else if (! MEM_IN_STRUCT_P (dst))
2062 /* store_bit_field requires that memory operations have
2063 mem_in_struct_p set; we might not. */
2065 dst = copy_rtx (orig_dst);
2066 MEM_SET_IN_STRUCT_P (dst, 1);
2069 /* Process the pieces. */
2070 for (i = start; i < XVECLEN (src, 0); i++)
2072 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2073 enum machine_mode mode = GET_MODE (tmps[i]);
2074 int bytelen = GET_MODE_SIZE (mode);
2076 /* Handle trailing fragments that run over the size of the struct. */
2077 if (ssize >= 0 && bytepos + bytelen > ssize)
2079 if (BYTES_BIG_ENDIAN)
2081 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2082 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2083 tmps[i], 0, OPTAB_WIDEN);
2085 bytelen = ssize - bytepos;
2088 /* Optimize the access just a bit. */
2089 if (GET_CODE (dst) == MEM
2090 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2091 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2092 && bytelen == GET_MODE_SIZE (mode))
2094 emit_move_insn (change_address (dst, mode,
2095 plus_constant (XEXP (dst, 0),
2096 bytepos)),
2097 tmps[i]);
2099 else
2101 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2102 mode, tmps[i], align, ssize);
2105 emit_queue();
2107 /* Copy from the pseudo into the (probable) hard reg. */
2108 if (GET_CODE (dst) == REG)
2109 emit_move_insn (orig_dst, dst);
2112 /* Generate code to copy a BLKmode object of TYPE out of a
2113 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2114 is null, a stack temporary is created. TGTBLK is returned.
2116 The primary purpose of this routine is to handle functions
2117 that return BLKmode structures in registers. Some machines
2118 (the PA for example) want to return all small structures
2119 in registers regardless of the structure's alignment.
2123 copy_blkmode_from_reg(tgtblk,srcreg,type)
2124 rtx tgtblk;
2125 rtx srcreg;
2126 tree type;
2128 int bytes = int_size_in_bytes (type);
2129 rtx src = NULL, dst = NULL;
2130 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2131 int bitpos, xbitpos, big_endian_correction = 0;
2133 if (tgtblk == 0)
2135 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2136 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2137 preserve_temp_slots (tgtblk);
2140 /* This code assumes srcreg is at least a full word. If it isn't,
2141 copy it into a new pseudo which is a full word. */
2142 if (GET_MODE (srcreg) != BLKmode
2143 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2144 srcreg = convert_to_mode (word_mode, srcreg,
2145 TREE_UNSIGNED (type));
2147 /* Structures whose size is not a multiple of a word are aligned
2148 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2149 machine, this means we must skip the empty high order bytes when
2150 calculating the bit offset. */
2151 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2152 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2153 * BITS_PER_UNIT));
2155 /* Copy the structure BITSIZE bites at a time.
2157 We could probably emit more efficient code for machines
2158 which do not use strict alignment, but it doesn't seem
2159 worth the effort at the current time. */
2160 for (bitpos = 0, xbitpos = big_endian_correction;
2161 bitpos < bytes * BITS_PER_UNIT;
2162 bitpos += bitsize, xbitpos += bitsize)
2165 /* We need a new source operand each time xbitpos is on a
2166 word boundary and when xbitpos == big_endian_correction
2167 (the first time through). */
2168 if (xbitpos % BITS_PER_WORD == 0
2169 || xbitpos == big_endian_correction)
2170 src = operand_subword_force (srcreg,
2171 xbitpos / BITS_PER_WORD,
2172 BLKmode);
2174 /* We need a new destination operand each time bitpos is on
2175 a word boundary. */
2176 if (bitpos % BITS_PER_WORD == 0)
2177 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2179 /* Use xbitpos for the source extraction (right justified) and
2180 xbitpos for the destination store (left justified). */
2181 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2182 extract_bit_field (src, bitsize,
2183 xbitpos % BITS_PER_WORD, 1,
2184 NULL_RTX, word_mode,
2185 word_mode,
2186 bitsize / BITS_PER_UNIT,
2187 BITS_PER_WORD),
2188 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2190 return tgtblk;
2194 /* Add a USE expression for REG to the (possibly empty) list pointed
2195 to by CALL_FUSAGE. REG must denote a hard register. */
2197 void
2198 use_reg (call_fusage, reg)
2199 rtx *call_fusage, reg;
2201 if (GET_CODE (reg) != REG
2202 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2203 abort();
2205 *call_fusage
2206 = gen_rtx_EXPR_LIST (VOIDmode,
2207 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2210 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2211 starting at REGNO. All of these registers must be hard registers. */
2213 void
2214 use_regs (call_fusage, regno, nregs)
2215 rtx *call_fusage;
2216 int regno;
2217 int nregs;
2219 int i;
2221 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2222 abort ();
2224 for (i = 0; i < nregs; i++)
2225 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2228 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2229 PARALLEL REGS. This is for calls that pass values in multiple
2230 non-contiguous locations. The Irix 6 ABI has examples of this. */
2232 void
2233 use_group_regs (call_fusage, regs)
2234 rtx *call_fusage;
2235 rtx regs;
2237 int i;
2239 for (i = 0; i < XVECLEN (regs, 0); i++)
2241 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2243 /* A NULL entry means the parameter goes both on the stack and in
2244 registers. This can also be a MEM for targets that pass values
2245 partially on the stack and partially in registers. */
2246 if (reg != 0 && GET_CODE (reg) == REG)
2247 use_reg (call_fusage, reg);
2251 /* Generate several move instructions to clear LEN bytes of block TO.
2252 (A MEM rtx with BLKmode). The caller must pass TO through
2253 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2254 we can assume. */
2256 static void
2257 clear_by_pieces (to, len, align)
2258 rtx to;
2259 int len, align;
2261 struct clear_by_pieces data;
2262 rtx to_addr = XEXP (to, 0);
2263 int max_size = MOVE_MAX_PIECES + 1;
2264 enum machine_mode mode = VOIDmode, tmode;
2265 enum insn_code icode;
2267 data.offset = 0;
2268 data.to_addr = to_addr;
2269 data.to = to;
2270 data.autinc_to
2271 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2272 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2274 data.explicit_inc_to = 0;
2275 data.reverse
2276 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2277 if (data.reverse) data.offset = len;
2278 data.len = len;
2280 data.to_struct = MEM_IN_STRUCT_P (to);
2282 /* If copying requires more than two move insns,
2283 copy addresses to registers (to make displacements shorter)
2284 and use post-increment if available. */
2285 if (!data.autinc_to
2286 && move_by_pieces_ninsns (len, align) > 2)
2288 /* Determine the main mode we'll be using */
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2290 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) < max_size)
2292 mode = tmode;
2294 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2296 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2297 data.autinc_to = 1;
2298 data.explicit_inc_to = -1;
2300 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2302 data.to_addr = copy_addr_to_reg (to_addr);
2303 data.autinc_to = 1;
2304 data.explicit_inc_to = 1;
2306 if (!data.autinc_to && CONSTANT_P (to_addr))
2307 data.to_addr = copy_addr_to_reg (to_addr);
2310 if (! SLOW_UNALIGNED_ACCESS
2311 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2312 align = MOVE_MAX;
2314 /* First move what we can in the largest integer mode, then go to
2315 successively smaller modes. */
2317 while (max_size > 1)
2319 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2320 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2321 if (GET_MODE_SIZE (tmode) < max_size)
2322 mode = tmode;
2324 if (mode == VOIDmode)
2325 break;
2327 icode = mov_optab->handlers[(int) mode].insn_code;
2328 if (icode != CODE_FOR_nothing
2329 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2330 GET_MODE_SIZE (mode)))
2331 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2333 max_size = GET_MODE_SIZE (mode);
2336 /* The code above should have handled everything. */
2337 if (data.len != 0)
2338 abort ();
2341 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2342 with move instructions for mode MODE. GENFUN is the gen_... function
2343 to make a move insn for that mode. DATA has all the other info. */
2345 static void
2346 clear_by_pieces_1 (genfun, mode, data)
2347 rtx (*genfun) PROTO ((rtx, ...));
2348 enum machine_mode mode;
2349 struct clear_by_pieces *data;
2351 register int size = GET_MODE_SIZE (mode);
2352 register rtx to1;
2354 while (data->len >= size)
2356 if (data->reverse) data->offset -= size;
2358 to1 = (data->autinc_to
2359 ? gen_rtx_MEM (mode, data->to_addr)
2360 : copy_rtx (change_address (data->to, mode,
2361 plus_constant (data->to_addr,
2362 data->offset))));
2363 MEM_IN_STRUCT_P (to1) = data->to_struct;
2365 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2366 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2368 emit_insn ((*genfun) (to1, const0_rtx));
2369 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2370 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2372 if (! data->reverse) data->offset += size;
2374 data->len -= size;
2378 /* Write zeros through the storage of OBJECT.
2379 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2380 the maximum alignment we can is has, measured in bytes.
2382 If we call a function that returns the length of the block, return it. */
2385 clear_storage (object, size, align)
2386 rtx object;
2387 rtx size;
2388 int align;
2390 #ifdef TARGET_MEM_FUNCTIONS
2391 static tree fn;
2392 tree call_expr, arg_list;
2393 #endif
2394 rtx retval = 0;
2396 if (GET_MODE (object) == BLKmode)
2398 object = protect_from_queue (object, 1);
2399 size = protect_from_queue (size, 0);
2401 if (GET_CODE (size) == CONST_INT
2402 && MOVE_BY_PIECES_P (INTVAL (size), align))
2403 clear_by_pieces (object, INTVAL (size), align);
2405 else
2407 /* Try the most limited insn first, because there's no point
2408 including more than one in the machine description unless
2409 the more limited one has some advantage. */
2411 rtx opalign = GEN_INT (align);
2412 enum machine_mode mode;
2414 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2415 mode = GET_MODE_WIDER_MODE (mode))
2417 enum insn_code code = clrstr_optab[(int) mode];
2419 if (code != CODE_FOR_nothing
2420 /* We don't need MODE to be narrower than
2421 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2422 the mode mask, as it is returned by the macro, it will
2423 definitely be less than the actual mode mask. */
2424 && ((GET_CODE (size) == CONST_INT
2425 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2426 <= (GET_MODE_MASK (mode) >> 1)))
2427 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2428 && (insn_operand_predicate[(int) code][0] == 0
2429 || (*insn_operand_predicate[(int) code][0]) (object,
2430 BLKmode))
2431 && (insn_operand_predicate[(int) code][2] == 0
2432 || (*insn_operand_predicate[(int) code][2]) (opalign,
2433 VOIDmode)))
2435 rtx op1;
2436 rtx last = get_last_insn ();
2437 rtx pat;
2439 op1 = convert_to_mode (mode, size, 1);
2440 if (insn_operand_predicate[(int) code][1] != 0
2441 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2442 mode))
2443 op1 = copy_to_mode_reg (mode, op1);
2445 pat = GEN_FCN ((int) code) (object, op1, opalign);
2446 if (pat)
2448 emit_insn (pat);
2449 return 0;
2451 else
2452 delete_insns_since (last);
2457 #ifdef TARGET_MEM_FUNCTIONS
2458 /* It is incorrect to use the libcall calling conventions to call
2459 memset in this context.
2461 This could be a user call to memset and the user may wish to
2462 examine the return value from memset.
2464 For targets where libcalls and normal calls have different conventions
2465 for returning pointers, we could end up generating incorrect code.
2467 So instead of using a libcall sequence we build up a suitable
2468 CALL_EXPR and expand the call in the normal fashion. */
2469 if (fn == NULL_TREE)
2471 tree fntype;
2473 /* This was copied from except.c, I don't know if all this is
2474 necessary in this context or not. */
2475 fn = get_identifier ("memset");
2476 push_obstacks_nochange ();
2477 end_temporary_allocation ();
2478 fntype = build_pointer_type (void_type_node);
2479 fntype = build_function_type (fntype, NULL_TREE);
2480 fn = build_decl (FUNCTION_DECL, fn, fntype);
2481 DECL_EXTERNAL (fn) = 1;
2482 TREE_PUBLIC (fn) = 1;
2483 DECL_ARTIFICIAL (fn) = 1;
2484 make_decl_rtl (fn, NULL_PTR, 1);
2485 assemble_external (fn);
2486 pop_obstacks ();
2489 /* We need to make an argument list for the function call.
2491 memset has three arguments, the first is a void * addresses, the
2492 second a integer with the initialization value, the last is a size_t
2493 byte count for the copy. */
2494 arg_list
2495 = build_tree_list (NULL_TREE,
2496 make_tree (build_pointer_type (void_type_node),
2497 XEXP (object, 0)));
2498 TREE_CHAIN (arg_list)
2499 = build_tree_list (NULL_TREE,
2500 make_tree (integer_type_node, const0_rtx));
2501 TREE_CHAIN (TREE_CHAIN (arg_list))
2502 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2503 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2505 /* Now we have to build up the CALL_EXPR itself. */
2506 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2507 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2508 call_expr, arg_list, NULL_TREE);
2509 TREE_SIDE_EFFECTS (call_expr) = 1;
2511 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2512 #else
2513 emit_library_call (bzero_libfunc, 0,
2514 VOIDmode, 2,
2515 XEXP (object, 0), Pmode,
2516 convert_to_mode
2517 (TYPE_MODE (integer_type_node), size,
2518 TREE_UNSIGNED (integer_type_node)),
2519 TYPE_MODE (integer_type_node));
2520 #endif
2523 else
2524 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2526 return retval;
2529 /* Generate code to copy Y into X.
2530 Both Y and X must have the same mode, except that
2531 Y can be a constant with VOIDmode.
2532 This mode cannot be BLKmode; use emit_block_move for that.
2534 Return the last instruction emitted. */
2537 emit_move_insn (x, y)
2538 rtx x, y;
2540 enum machine_mode mode = GET_MODE (x);
2542 x = protect_from_queue (x, 1);
2543 y = protect_from_queue (y, 0);
2545 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2546 abort ();
2548 /* Never force constant_p_rtx to memory. */
2549 if (GET_CODE (y) == CONSTANT_P_RTX)
2551 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2552 y = force_const_mem (mode, y);
2554 /* If X or Y are memory references, verify that their addresses are valid
2555 for the machine. */
2556 if (GET_CODE (x) == MEM
2557 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2558 && ! push_operand (x, GET_MODE (x)))
2559 || (flag_force_addr
2560 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2561 x = change_address (x, VOIDmode, XEXP (x, 0));
2563 if (GET_CODE (y) == MEM
2564 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2565 || (flag_force_addr
2566 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2567 y = change_address (y, VOIDmode, XEXP (y, 0));
2569 if (mode == BLKmode)
2570 abort ();
2572 return emit_move_insn_1 (x, y);
2575 /* Low level part of emit_move_insn.
2576 Called just like emit_move_insn, but assumes X and Y
2577 are basically valid. */
2580 emit_move_insn_1 (x, y)
2581 rtx x, y;
2583 enum machine_mode mode = GET_MODE (x);
2584 enum machine_mode submode;
2585 enum mode_class class = GET_MODE_CLASS (mode);
2586 int i;
2588 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2589 return
2590 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2592 /* Expand complex moves by moving real part and imag part, if possible. */
2593 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2594 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2595 * BITS_PER_UNIT),
2596 (class == MODE_COMPLEX_INT
2597 ? MODE_INT : MODE_FLOAT),
2599 && (mov_optab->handlers[(int) submode].insn_code
2600 != CODE_FOR_nothing))
2602 /* Don't split destination if it is a stack push. */
2603 int stack = push_operand (x, GET_MODE (x));
2605 /* If this is a stack, push the highpart first, so it
2606 will be in the argument order.
2608 In that case, change_address is used only to convert
2609 the mode, not to change the address. */
2610 if (stack)
2612 /* Note that the real part always precedes the imag part in memory
2613 regardless of machine's endianness. */
2614 #ifdef STACK_GROWS_DOWNWARD
2615 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2616 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2617 gen_imagpart (submode, y)));
2618 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2619 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2620 gen_realpart (submode, y)));
2621 #else
2622 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2623 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2624 gen_realpart (submode, y)));
2625 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2626 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2627 gen_imagpart (submode, y)));
2628 #endif
2630 else
2632 /* Show the output dies here. This is necessary for pseudos;
2633 hard regs shouldn't appear here except as return values.
2634 We never want to emit such a clobber after reload. */
2635 if (x != y
2636 && ! (reload_in_progress || reload_completed))
2638 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2641 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2642 (gen_realpart (submode, x), gen_realpart (submode, y)));
2643 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2644 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2647 return get_last_insn ();
2650 /* This will handle any multi-word mode that lacks a move_insn pattern.
2651 However, you will get better code if you define such patterns,
2652 even if they must turn into multiple assembler instructions. */
2653 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2655 rtx last_insn = 0;
2657 #ifdef PUSH_ROUNDING
2659 /* If X is a push on the stack, do the push now and replace
2660 X with a reference to the stack pointer. */
2661 if (push_operand (x, GET_MODE (x)))
2663 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2664 x = change_address (x, VOIDmode, stack_pointer_rtx);
2666 #endif
2668 /* Show the output dies here. This is necessary for pseudos;
2669 hard regs shouldn't appear here except as return values.
2670 We never want to emit such a clobber after reload. */
2671 if (x != y
2672 && ! (reload_in_progress || reload_completed))
2674 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2677 for (i = 0;
2678 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2679 i++)
2681 rtx xpart = operand_subword (x, i, 1, mode);
2682 rtx ypart = operand_subword (y, i, 1, mode);
2684 /* If we can't get a part of Y, put Y into memory if it is a
2685 constant. Otherwise, force it into a register. If we still
2686 can't get a part of Y, abort. */
2687 if (ypart == 0 && CONSTANT_P (y))
2689 y = force_const_mem (mode, y);
2690 ypart = operand_subword (y, i, 1, mode);
2692 else if (ypart == 0)
2693 ypart = operand_subword_force (y, i, mode);
2695 if (xpart == 0 || ypart == 0)
2696 abort ();
2698 last_insn = emit_move_insn (xpart, ypart);
2701 return last_insn;
2703 else
2704 abort ();
2707 /* Pushing data onto the stack. */
2709 /* Push a block of length SIZE (perhaps variable)
2710 and return an rtx to address the beginning of the block.
2711 Note that it is not possible for the value returned to be a QUEUED.
2712 The value may be virtual_outgoing_args_rtx.
2714 EXTRA is the number of bytes of padding to push in addition to SIZE.
2715 BELOW nonzero means this padding comes at low addresses;
2716 otherwise, the padding comes at high addresses. */
2719 push_block (size, extra, below)
2720 rtx size;
2721 int extra, below;
2723 register rtx temp;
2725 size = convert_modes (Pmode, ptr_mode, size, 1);
2726 if (CONSTANT_P (size))
2727 anti_adjust_stack (plus_constant (size, extra));
2728 else if (GET_CODE (size) == REG && extra == 0)
2729 anti_adjust_stack (size);
2730 else
2732 rtx temp = copy_to_mode_reg (Pmode, size);
2733 if (extra != 0)
2734 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2735 temp, 0, OPTAB_LIB_WIDEN);
2736 anti_adjust_stack (temp);
2739 #if defined (STACK_GROWS_DOWNWARD) \
2740 || (defined (ARGS_GROW_DOWNWARD) \
2741 && !defined (ACCUMULATE_OUTGOING_ARGS))
2743 /* Return the lowest stack address when STACK or ARGS grow downward and
2744 we are not aaccumulating outgoing arguments (the c4x port uses such
2745 conventions). */
2746 temp = virtual_outgoing_args_rtx;
2747 if (extra != 0 && below)
2748 temp = plus_constant (temp, extra);
2749 #else
2750 if (GET_CODE (size) == CONST_INT)
2751 temp = plus_constant (virtual_outgoing_args_rtx,
2752 - INTVAL (size) - (below ? 0 : extra));
2753 else if (extra != 0 && !below)
2754 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2755 negate_rtx (Pmode, plus_constant (size, extra)));
2756 else
2757 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2758 negate_rtx (Pmode, size));
2759 #endif
2761 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2765 gen_push_operand ()
2767 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2770 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2771 block of SIZE bytes. */
2773 static rtx
2774 get_push_address (size)
2775 int size;
2777 register rtx temp;
2779 if (STACK_PUSH_CODE == POST_DEC)
2780 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2781 else if (STACK_PUSH_CODE == POST_INC)
2782 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2783 else
2784 temp = stack_pointer_rtx;
2786 return copy_to_reg (temp);
2789 /* Generate code to push X onto the stack, assuming it has mode MODE and
2790 type TYPE.
2791 MODE is redundant except when X is a CONST_INT (since they don't
2792 carry mode info).
2793 SIZE is an rtx for the size of data to be copied (in bytes),
2794 needed only if X is BLKmode.
2796 ALIGN (in bytes) is maximum alignment we can assume.
2798 If PARTIAL and REG are both nonzero, then copy that many of the first
2799 words of X into registers starting with REG, and push the rest of X.
2800 The amount of space pushed is decreased by PARTIAL words,
2801 rounded *down* to a multiple of PARM_BOUNDARY.
2802 REG must be a hard register in this case.
2803 If REG is zero but PARTIAL is not, take any all others actions for an
2804 argument partially in registers, but do not actually load any
2805 registers.
2807 EXTRA is the amount in bytes of extra space to leave next to this arg.
2808 This is ignored if an argument block has already been allocated.
2810 On a machine that lacks real push insns, ARGS_ADDR is the address of
2811 the bottom of the argument block for this call. We use indexing off there
2812 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2813 argument block has not been preallocated.
2815 ARGS_SO_FAR is the size of args previously pushed for this call.
2817 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2818 for arguments passed in registers. If nonzero, it will be the number
2819 of bytes required. */
2821 void
2822 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2823 args_addr, args_so_far, reg_parm_stack_space)
2824 register rtx x;
2825 enum machine_mode mode;
2826 tree type;
2827 rtx size;
2828 int align;
2829 int partial;
2830 rtx reg;
2831 int extra;
2832 rtx args_addr;
2833 rtx args_so_far;
2834 int reg_parm_stack_space;
2836 rtx xinner;
2837 enum direction stack_direction
2838 #ifdef STACK_GROWS_DOWNWARD
2839 = downward;
2840 #else
2841 = upward;
2842 #endif
2844 /* Decide where to pad the argument: `downward' for below,
2845 `upward' for above, or `none' for don't pad it.
2846 Default is below for small data on big-endian machines; else above. */
2847 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2849 /* Invert direction if stack is post-update. */
2850 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2851 if (where_pad != none)
2852 where_pad = (where_pad == downward ? upward : downward);
2854 xinner = x = protect_from_queue (x, 0);
2856 if (mode == BLKmode)
2858 /* Copy a block into the stack, entirely or partially. */
2860 register rtx temp;
2861 int used = partial * UNITS_PER_WORD;
2862 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2863 int skip;
2865 if (size == 0)
2866 abort ();
2868 used -= offset;
2870 /* USED is now the # of bytes we need not copy to the stack
2871 because registers will take care of them. */
2873 if (partial != 0)
2874 xinner = change_address (xinner, BLKmode,
2875 plus_constant (XEXP (xinner, 0), used));
2877 /* If the partial register-part of the arg counts in its stack size,
2878 skip the part of stack space corresponding to the registers.
2879 Otherwise, start copying to the beginning of the stack space,
2880 by setting SKIP to 0. */
2881 skip = (reg_parm_stack_space == 0) ? 0 : used;
2883 #ifdef PUSH_ROUNDING
2884 /* Do it with several push insns if that doesn't take lots of insns
2885 and if there is no difficulty with push insns that skip bytes
2886 on the stack for alignment purposes. */
2887 if (args_addr == 0
2888 && GET_CODE (size) == CONST_INT
2889 && skip == 0
2890 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)))
2891 /* Here we avoid the case of a structure whose weak alignment
2892 forces many pushes of a small amount of data,
2893 and such small pushes do rounding that causes trouble. */
2894 && ((! SLOW_UNALIGNED_ACCESS)
2895 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2896 || PUSH_ROUNDING (align) == align)
2897 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2899 /* Push padding now if padding above and stack grows down,
2900 or if padding below and stack grows up.
2901 But if space already allocated, this has already been done. */
2902 if (extra && args_addr == 0
2903 && where_pad != none && where_pad != stack_direction)
2904 anti_adjust_stack (GEN_INT (extra));
2906 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2907 INTVAL (size) - used, align);
2909 if (current_function_check_memory_usage && ! in_check_memory_usage)
2911 rtx temp;
2913 in_check_memory_usage = 1;
2914 temp = get_push_address (INTVAL(size) - used);
2915 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2916 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2917 temp, ptr_mode,
2918 XEXP (xinner, 0), ptr_mode,
2919 GEN_INT (INTVAL(size) - used),
2920 TYPE_MODE (sizetype));
2921 else
2922 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2923 temp, ptr_mode,
2924 GEN_INT (INTVAL(size) - used),
2925 TYPE_MODE (sizetype),
2926 GEN_INT (MEMORY_USE_RW),
2927 TYPE_MODE (integer_type_node));
2928 in_check_memory_usage = 0;
2931 else
2932 #endif /* PUSH_ROUNDING */
2934 /* Otherwise make space on the stack and copy the data
2935 to the address of that space. */
2937 /* Deduct words put into registers from the size we must copy. */
2938 if (partial != 0)
2940 if (GET_CODE (size) == CONST_INT)
2941 size = GEN_INT (INTVAL (size) - used);
2942 else
2943 size = expand_binop (GET_MODE (size), sub_optab, size,
2944 GEN_INT (used), NULL_RTX, 0,
2945 OPTAB_LIB_WIDEN);
2948 /* Get the address of the stack space.
2949 In this case, we do not deal with EXTRA separately.
2950 A single stack adjust will do. */
2951 if (! args_addr)
2953 temp = push_block (size, extra, where_pad == downward);
2954 extra = 0;
2956 else if (GET_CODE (args_so_far) == CONST_INT)
2957 temp = memory_address (BLKmode,
2958 plus_constant (args_addr,
2959 skip + INTVAL (args_so_far)));
2960 else
2961 temp = memory_address (BLKmode,
2962 plus_constant (gen_rtx_PLUS (Pmode,
2963 args_addr,
2964 args_so_far),
2965 skip));
2966 if (current_function_check_memory_usage && ! in_check_memory_usage)
2968 rtx target;
2970 in_check_memory_usage = 1;
2971 target = copy_to_reg (temp);
2972 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2973 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2974 target, ptr_mode,
2975 XEXP (xinner, 0), ptr_mode,
2976 size, TYPE_MODE (sizetype));
2977 else
2978 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2979 target, ptr_mode,
2980 size, TYPE_MODE (sizetype),
2981 GEN_INT (MEMORY_USE_RW),
2982 TYPE_MODE (integer_type_node));
2983 in_check_memory_usage = 0;
2986 /* TEMP is the address of the block. Copy the data there. */
2987 if (GET_CODE (size) == CONST_INT
2988 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
2990 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2991 INTVAL (size), align);
2992 goto ret;
2994 else
2996 rtx opalign = GEN_INT (align);
2997 enum machine_mode mode;
2998 rtx target = gen_rtx_MEM (BLKmode, temp);
3000 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3001 mode != VOIDmode;
3002 mode = GET_MODE_WIDER_MODE (mode))
3004 enum insn_code code = movstr_optab[(int) mode];
3006 if (code != CODE_FOR_nothing
3007 && ((GET_CODE (size) == CONST_INT
3008 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3009 <= (GET_MODE_MASK (mode) >> 1)))
3010 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3011 && (insn_operand_predicate[(int) code][0] == 0
3012 || ((*insn_operand_predicate[(int) code][0])
3013 (target, BLKmode)))
3014 && (insn_operand_predicate[(int) code][1] == 0
3015 || ((*insn_operand_predicate[(int) code][1])
3016 (xinner, BLKmode)))
3017 && (insn_operand_predicate[(int) code][3] == 0
3018 || ((*insn_operand_predicate[(int) code][3])
3019 (opalign, VOIDmode))))
3021 rtx op2 = convert_to_mode (mode, size, 1);
3022 rtx last = get_last_insn ();
3023 rtx pat;
3025 if (insn_operand_predicate[(int) code][2] != 0
3026 && ! ((*insn_operand_predicate[(int) code][2])
3027 (op2, mode)))
3028 op2 = copy_to_mode_reg (mode, op2);
3030 pat = GEN_FCN ((int) code) (target, xinner,
3031 op2, opalign);
3032 if (pat)
3034 emit_insn (pat);
3035 goto ret;
3037 else
3038 delete_insns_since (last);
3043 #ifndef ACCUMULATE_OUTGOING_ARGS
3044 /* If the source is referenced relative to the stack pointer,
3045 copy it to another register to stabilize it. We do not need
3046 to do this if we know that we won't be changing sp. */
3048 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3049 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3050 temp = copy_to_reg (temp);
3051 #endif
3053 /* Make inhibit_defer_pop nonzero around the library call
3054 to force it to pop the bcopy-arguments right away. */
3055 NO_DEFER_POP;
3056 #ifdef TARGET_MEM_FUNCTIONS
3057 emit_library_call (memcpy_libfunc, 0,
3058 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3059 convert_to_mode (TYPE_MODE (sizetype),
3060 size, TREE_UNSIGNED (sizetype)),
3061 TYPE_MODE (sizetype));
3062 #else
3063 emit_library_call (bcopy_libfunc, 0,
3064 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3065 convert_to_mode (TYPE_MODE (integer_type_node),
3066 size,
3067 TREE_UNSIGNED (integer_type_node)),
3068 TYPE_MODE (integer_type_node));
3069 #endif
3070 OK_DEFER_POP;
3073 else if (partial > 0)
3075 /* Scalar partly in registers. */
3077 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3078 int i;
3079 int not_stack;
3080 /* # words of start of argument
3081 that we must make space for but need not store. */
3082 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3083 int args_offset = INTVAL (args_so_far);
3084 int skip;
3086 /* Push padding now if padding above and stack grows down,
3087 or if padding below and stack grows up.
3088 But if space already allocated, this has already been done. */
3089 if (extra && args_addr == 0
3090 && where_pad != none && where_pad != stack_direction)
3091 anti_adjust_stack (GEN_INT (extra));
3093 /* If we make space by pushing it, we might as well push
3094 the real data. Otherwise, we can leave OFFSET nonzero
3095 and leave the space uninitialized. */
3096 if (args_addr == 0)
3097 offset = 0;
3099 /* Now NOT_STACK gets the number of words that we don't need to
3100 allocate on the stack. */
3101 not_stack = partial - offset;
3103 /* If the partial register-part of the arg counts in its stack size,
3104 skip the part of stack space corresponding to the registers.
3105 Otherwise, start copying to the beginning of the stack space,
3106 by setting SKIP to 0. */
3107 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3109 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3110 x = validize_mem (force_const_mem (mode, x));
3112 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3113 SUBREGs of such registers are not allowed. */
3114 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3115 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3116 x = copy_to_reg (x);
3118 /* Loop over all the words allocated on the stack for this arg. */
3119 /* We can do it by words, because any scalar bigger than a word
3120 has a size a multiple of a word. */
3121 #ifndef PUSH_ARGS_REVERSED
3122 for (i = not_stack; i < size; i++)
3123 #else
3124 for (i = size - 1; i >= not_stack; i--)
3125 #endif
3126 if (i >= not_stack + offset)
3127 emit_push_insn (operand_subword_force (x, i, mode),
3128 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3129 0, args_addr,
3130 GEN_INT (args_offset + ((i - not_stack + skip)
3131 * UNITS_PER_WORD)),
3132 reg_parm_stack_space);
3134 else
3136 rtx addr;
3137 rtx target = NULL_RTX;
3139 /* Push padding now if padding above and stack grows down,
3140 or if padding below and stack grows up.
3141 But if space already allocated, this has already been done. */
3142 if (extra && args_addr == 0
3143 && where_pad != none && where_pad != stack_direction)
3144 anti_adjust_stack (GEN_INT (extra));
3146 #ifdef PUSH_ROUNDING
3147 if (args_addr == 0)
3148 addr = gen_push_operand ();
3149 else
3150 #endif
3152 if (GET_CODE (args_so_far) == CONST_INT)
3153 addr
3154 = memory_address (mode,
3155 plus_constant (args_addr,
3156 INTVAL (args_so_far)));
3157 else
3158 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3159 args_so_far));
3160 target = addr;
3163 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3165 if (current_function_check_memory_usage && ! in_check_memory_usage)
3167 in_check_memory_usage = 1;
3168 if (target == 0)
3169 target = get_push_address (GET_MODE_SIZE (mode));
3171 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3172 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3173 target, ptr_mode,
3174 XEXP (x, 0), ptr_mode,
3175 GEN_INT (GET_MODE_SIZE (mode)),
3176 TYPE_MODE (sizetype));
3177 else
3178 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3179 target, ptr_mode,
3180 GEN_INT (GET_MODE_SIZE (mode)),
3181 TYPE_MODE (sizetype),
3182 GEN_INT (MEMORY_USE_RW),
3183 TYPE_MODE (integer_type_node));
3184 in_check_memory_usage = 0;
3188 ret:
3189 /* If part should go in registers, copy that part
3190 into the appropriate registers. Do this now, at the end,
3191 since mem-to-mem copies above may do function calls. */
3192 if (partial > 0 && reg != 0)
3194 /* Handle calls that pass values in multiple non-contiguous locations.
3195 The Irix 6 ABI has examples of this. */
3196 if (GET_CODE (reg) == PARALLEL)
3197 emit_group_load (reg, x, -1, align); /* ??? size? */
3198 else
3199 move_block_to_reg (REGNO (reg), x, partial, mode);
3202 if (extra && args_addr == 0 && where_pad == stack_direction)
3203 anti_adjust_stack (GEN_INT (extra));
3206 /* Expand an assignment that stores the value of FROM into TO.
3207 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3208 (This may contain a QUEUED rtx;
3209 if the value is constant, this rtx is a constant.)
3210 Otherwise, the returned value is NULL_RTX.
3212 SUGGEST_REG is no longer actually used.
3213 It used to mean, copy the value through a register
3214 and return that register, if that is possible.
3215 We now use WANT_VALUE to decide whether to do this. */
3218 expand_assignment (to, from, want_value, suggest_reg)
3219 tree to, from;
3220 int want_value;
3221 int suggest_reg;
3223 register rtx to_rtx = 0;
3224 rtx result;
3226 /* Don't crash if the lhs of the assignment was erroneous. */
3228 if (TREE_CODE (to) == ERROR_MARK)
3230 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3231 return want_value ? result : NULL_RTX;
3234 /* Assignment of a structure component needs special treatment
3235 if the structure component's rtx is not simply a MEM.
3236 Assignment of an array element at a constant index, and assignment of
3237 an array element in an unaligned packed structure field, has the same
3238 problem. */
3240 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3241 || TREE_CODE (to) == ARRAY_REF)
3243 enum machine_mode mode1;
3244 int bitsize;
3245 int bitpos;
3246 tree offset;
3247 int unsignedp;
3248 int volatilep = 0;
3249 tree tem;
3250 int alignment;
3252 push_temp_slots ();
3253 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3254 &unsignedp, &volatilep, &alignment);
3256 /* If we are going to use store_bit_field and extract_bit_field,
3257 make sure to_rtx will be safe for multiple use. */
3259 if (mode1 == VOIDmode && want_value)
3260 tem = stabilize_reference (tem);
3262 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3263 if (offset != 0)
3265 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3267 if (GET_CODE (to_rtx) != MEM)
3268 abort ();
3270 if (GET_MODE (offset_rtx) != ptr_mode)
3272 #ifdef POINTERS_EXTEND_UNSIGNED
3273 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3274 #else
3275 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3276 #endif
3279 if (GET_CODE (to_rtx) == MEM
3280 && GET_MODE (to_rtx) == BLKmode
3281 && bitsize
3282 && (bitpos % bitsize) == 0
3283 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3284 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3286 rtx temp = change_address (to_rtx, mode1,
3287 plus_constant (XEXP (to_rtx, 0),
3288 (bitpos /
3289 BITS_PER_UNIT)));
3290 if (GET_CODE (XEXP (temp, 0)) == REG)
3291 to_rtx = temp;
3292 else
3293 to_rtx = change_address (to_rtx, mode1,
3294 force_reg (GET_MODE (XEXP (temp, 0)),
3295 XEXP (temp, 0)));
3296 bitpos = 0;
3299 to_rtx = change_address (to_rtx, VOIDmode,
3300 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3301 force_reg (ptr_mode, offset_rtx)));
3303 if (volatilep)
3305 if (GET_CODE (to_rtx) == MEM)
3307 /* When the offset is zero, to_rtx is the address of the
3308 structure we are storing into, and hence may be shared.
3309 We must make a new MEM before setting the volatile bit. */
3310 if (offset == 0)
3311 to_rtx = copy_rtx (to_rtx);
3313 MEM_VOLATILE_P (to_rtx) = 1;
3315 #if 0 /* This was turned off because, when a field is volatile
3316 in an object which is not volatile, the object may be in a register,
3317 and then we would abort over here. */
3318 else
3319 abort ();
3320 #endif
3323 if (TREE_CODE (to) == COMPONENT_REF
3324 && TREE_READONLY (TREE_OPERAND (to, 1)))
3326 if (offset == 0)
3327 to_rtx = copy_rtx (to_rtx);
3329 RTX_UNCHANGING_P (to_rtx) = 1;
3332 /* Check the access. */
3333 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3335 rtx to_addr;
3336 int size;
3337 int best_mode_size;
3338 enum machine_mode best_mode;
3340 best_mode = get_best_mode (bitsize, bitpos,
3341 TYPE_ALIGN (TREE_TYPE (tem)),
3342 mode1, volatilep);
3343 if (best_mode == VOIDmode)
3344 best_mode = QImode;
3346 best_mode_size = GET_MODE_BITSIZE (best_mode);
3347 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3348 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3349 size *= GET_MODE_SIZE (best_mode);
3351 /* Check the access right of the pointer. */
3352 if (size)
3353 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3354 to_addr, ptr_mode,
3355 GEN_INT (size), TYPE_MODE (sizetype),
3356 GEN_INT (MEMORY_USE_WO),
3357 TYPE_MODE (integer_type_node));
3360 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3361 (want_value
3362 /* Spurious cast makes HPUX compiler happy. */
3363 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3364 : VOIDmode),
3365 unsignedp,
3366 /* Required alignment of containing datum. */
3367 alignment,
3368 int_size_in_bytes (TREE_TYPE (tem)),
3369 get_alias_set (to));
3370 preserve_temp_slots (result);
3371 free_temp_slots ();
3372 pop_temp_slots ();
3374 /* If the value is meaningful, convert RESULT to the proper mode.
3375 Otherwise, return nothing. */
3376 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3377 TYPE_MODE (TREE_TYPE (from)),
3378 result,
3379 TREE_UNSIGNED (TREE_TYPE (to)))
3380 : NULL_RTX);
3383 /* If the rhs is a function call and its value is not an aggregate,
3384 call the function before we start to compute the lhs.
3385 This is needed for correct code for cases such as
3386 val = setjmp (buf) on machines where reference to val
3387 requires loading up part of an address in a separate insn.
3389 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3390 a promoted variable where the zero- or sign- extension needs to be done.
3391 Handling this in the normal way is safe because no computation is done
3392 before the call. */
3393 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3394 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3395 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3397 rtx value;
3399 push_temp_slots ();
3400 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3401 if (to_rtx == 0)
3402 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3404 /* Handle calls that return values in multiple non-contiguous locations.
3405 The Irix 6 ABI has examples of this. */
3406 if (GET_CODE (to_rtx) == PARALLEL)
3407 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3408 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3409 else if (GET_MODE (to_rtx) == BLKmode)
3410 emit_block_move (to_rtx, value, expr_size (from),
3411 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3412 else
3413 emit_move_insn (to_rtx, value);
3414 preserve_temp_slots (to_rtx);
3415 free_temp_slots ();
3416 pop_temp_slots ();
3417 return want_value ? to_rtx : NULL_RTX;
3420 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3421 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3423 if (to_rtx == 0)
3425 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3426 if (GET_CODE (to_rtx) == MEM)
3427 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3430 /* Don't move directly into a return register. */
3431 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3433 rtx temp;
3435 push_temp_slots ();
3436 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3437 emit_move_insn (to_rtx, temp);
3438 preserve_temp_slots (to_rtx);
3439 free_temp_slots ();
3440 pop_temp_slots ();
3441 return want_value ? to_rtx : NULL_RTX;
3444 /* In case we are returning the contents of an object which overlaps
3445 the place the value is being stored, use a safe function when copying
3446 a value through a pointer into a structure value return block. */
3447 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3448 && current_function_returns_struct
3449 && !current_function_returns_pcc_struct)
3451 rtx from_rtx, size;
3453 push_temp_slots ();
3454 size = expr_size (from);
3455 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3456 EXPAND_MEMORY_USE_DONT);
3458 /* Copy the rights of the bitmap. */
3459 if (current_function_check_memory_usage)
3460 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3461 XEXP (to_rtx, 0), ptr_mode,
3462 XEXP (from_rtx, 0), ptr_mode,
3463 convert_to_mode (TYPE_MODE (sizetype),
3464 size, TREE_UNSIGNED (sizetype)),
3465 TYPE_MODE (sizetype));
3467 #ifdef TARGET_MEM_FUNCTIONS
3468 emit_library_call (memcpy_libfunc, 0,
3469 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3470 XEXP (from_rtx, 0), Pmode,
3471 convert_to_mode (TYPE_MODE (sizetype),
3472 size, TREE_UNSIGNED (sizetype)),
3473 TYPE_MODE (sizetype));
3474 #else
3475 emit_library_call (bcopy_libfunc, 0,
3476 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3477 XEXP (to_rtx, 0), Pmode,
3478 convert_to_mode (TYPE_MODE (integer_type_node),
3479 size, TREE_UNSIGNED (integer_type_node)),
3480 TYPE_MODE (integer_type_node));
3481 #endif
3483 preserve_temp_slots (to_rtx);
3484 free_temp_slots ();
3485 pop_temp_slots ();
3486 return want_value ? to_rtx : NULL_RTX;
3489 /* Compute FROM and store the value in the rtx we got. */
3491 push_temp_slots ();
3492 result = store_expr (from, to_rtx, want_value);
3493 preserve_temp_slots (result);
3494 free_temp_slots ();
3495 pop_temp_slots ();
3496 return want_value ? result : NULL_RTX;
3499 /* Generate code for computing expression EXP,
3500 and storing the value into TARGET.
3501 TARGET may contain a QUEUED rtx.
3503 If WANT_VALUE is nonzero, return a copy of the value
3504 not in TARGET, so that we can be sure to use the proper
3505 value in a containing expression even if TARGET has something
3506 else stored in it. If possible, we copy the value through a pseudo
3507 and return that pseudo. Or, if the value is constant, we try to
3508 return the constant. In some cases, we return a pseudo
3509 copied *from* TARGET.
3511 If the mode is BLKmode then we may return TARGET itself.
3512 It turns out that in BLKmode it doesn't cause a problem.
3513 because C has no operators that could combine two different
3514 assignments into the same BLKmode object with different values
3515 with no sequence point. Will other languages need this to
3516 be more thorough?
3518 If WANT_VALUE is 0, we return NULL, to make sure
3519 to catch quickly any cases where the caller uses the value
3520 and fails to set WANT_VALUE. */
3523 store_expr (exp, target, want_value)
3524 register tree exp;
3525 register rtx target;
3526 int want_value;
3528 register rtx temp;
3529 int dont_return_target = 0;
3531 if (TREE_CODE (exp) == COMPOUND_EXPR)
3533 /* Perform first part of compound expression, then assign from second
3534 part. */
3535 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3536 emit_queue ();
3537 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3539 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3541 /* For conditional expression, get safe form of the target. Then
3542 test the condition, doing the appropriate assignment on either
3543 side. This avoids the creation of unnecessary temporaries.
3544 For non-BLKmode, it is more efficient not to do this. */
3546 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3548 emit_queue ();
3549 target = protect_from_queue (target, 1);
3551 do_pending_stack_adjust ();
3552 NO_DEFER_POP;
3553 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3554 start_cleanup_deferral ();
3555 store_expr (TREE_OPERAND (exp, 1), target, 0);
3556 end_cleanup_deferral ();
3557 emit_queue ();
3558 emit_jump_insn (gen_jump (lab2));
3559 emit_barrier ();
3560 emit_label (lab1);
3561 start_cleanup_deferral ();
3562 store_expr (TREE_OPERAND (exp, 2), target, 0);
3563 end_cleanup_deferral ();
3564 emit_queue ();
3565 emit_label (lab2);
3566 OK_DEFER_POP;
3568 return want_value ? target : NULL_RTX;
3570 else if (queued_subexp_p (target))
3571 /* If target contains a postincrement, let's not risk
3572 using it as the place to generate the rhs. */
3574 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3576 /* Expand EXP into a new pseudo. */
3577 temp = gen_reg_rtx (GET_MODE (target));
3578 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3580 else
3581 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3583 /* If target is volatile, ANSI requires accessing the value
3584 *from* the target, if it is accessed. So make that happen.
3585 In no case return the target itself. */
3586 if (! MEM_VOLATILE_P (target) && want_value)
3587 dont_return_target = 1;
3589 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3590 && GET_MODE (target) != BLKmode)
3591 /* If target is in memory and caller wants value in a register instead,
3592 arrange that. Pass TARGET as target for expand_expr so that,
3593 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3594 We know expand_expr will not use the target in that case.
3595 Don't do this if TARGET is volatile because we are supposed
3596 to write it and then read it. */
3598 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3599 GET_MODE (target), 0);
3600 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3601 temp = copy_to_reg (temp);
3602 dont_return_target = 1;
3604 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3605 /* If this is an scalar in a register that is stored in a wider mode
3606 than the declared mode, compute the result into its declared mode
3607 and then convert to the wider mode. Our value is the computed
3608 expression. */
3610 /* If we don't want a value, we can do the conversion inside EXP,
3611 which will often result in some optimizations. Do the conversion
3612 in two steps: first change the signedness, if needed, then
3613 the extend. But don't do this if the type of EXP is a subtype
3614 of something else since then the conversion might involve
3615 more than just converting modes. */
3616 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3617 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3619 if (TREE_UNSIGNED (TREE_TYPE (exp))
3620 != SUBREG_PROMOTED_UNSIGNED_P (target))
3622 = convert
3623 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3624 TREE_TYPE (exp)),
3625 exp);
3627 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3628 SUBREG_PROMOTED_UNSIGNED_P (target)),
3629 exp);
3632 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3634 /* If TEMP is a volatile MEM and we want a result value, make
3635 the access now so it gets done only once. Likewise if
3636 it contains TARGET. */
3637 if (GET_CODE (temp) == MEM && want_value
3638 && (MEM_VOLATILE_P (temp)
3639 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3640 temp = copy_to_reg (temp);
3642 /* If TEMP is a VOIDmode constant, use convert_modes to make
3643 sure that we properly convert it. */
3644 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3645 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3646 TYPE_MODE (TREE_TYPE (exp)), temp,
3647 SUBREG_PROMOTED_UNSIGNED_P (target));
3649 convert_move (SUBREG_REG (target), temp,
3650 SUBREG_PROMOTED_UNSIGNED_P (target));
3651 return want_value ? temp : NULL_RTX;
3653 else
3655 temp = expand_expr (exp, target, GET_MODE (target), 0);
3656 /* Return TARGET if it's a specified hardware register.
3657 If TARGET is a volatile mem ref, either return TARGET
3658 or return a reg copied *from* TARGET; ANSI requires this.
3660 Otherwise, if TEMP is not TARGET, return TEMP
3661 if it is constant (for efficiency),
3662 or if we really want the correct value. */
3663 if (!(target && GET_CODE (target) == REG
3664 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3665 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3666 && ! rtx_equal_p (temp, target)
3667 && (CONSTANT_P (temp) || want_value))
3668 dont_return_target = 1;
3671 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3672 the same as that of TARGET, adjust the constant. This is needed, for
3673 example, in case it is a CONST_DOUBLE and we want only a word-sized
3674 value. */
3675 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3676 && TREE_CODE (exp) != ERROR_MARK
3677 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3678 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3679 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3681 if (current_function_check_memory_usage
3682 && GET_CODE (target) == MEM
3683 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3685 if (GET_CODE (temp) == MEM)
3686 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3687 XEXP (target, 0), ptr_mode,
3688 XEXP (temp, 0), ptr_mode,
3689 expr_size (exp), TYPE_MODE (sizetype));
3690 else
3691 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3692 XEXP (target, 0), ptr_mode,
3693 expr_size (exp), TYPE_MODE (sizetype),
3694 GEN_INT (MEMORY_USE_WO),
3695 TYPE_MODE (integer_type_node));
3698 /* If value was not generated in the target, store it there.
3699 Convert the value to TARGET's type first if nec. */
3700 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3701 one or both of them are volatile memory refs, we have to distinguish
3702 two cases:
3703 - expand_expr has used TARGET. In this case, we must not generate
3704 another copy. This can be detected by TARGET being equal according
3705 to == .
3706 - expand_expr has not used TARGET - that means that the source just
3707 happens to have the same RTX form. Since temp will have been created
3708 by expand_expr, it will compare unequal according to == .
3709 We must generate a copy in this case, to reach the correct number
3710 of volatile memory references. */
3712 if ((! rtx_equal_p (temp, target)
3713 || (temp != target && (side_effects_p (temp)
3714 || side_effects_p (target))))
3715 && TREE_CODE (exp) != ERROR_MARK)
3717 target = protect_from_queue (target, 1);
3718 if (GET_MODE (temp) != GET_MODE (target)
3719 && GET_MODE (temp) != VOIDmode)
3721 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3722 if (dont_return_target)
3724 /* In this case, we will return TEMP,
3725 so make sure it has the proper mode.
3726 But don't forget to store the value into TARGET. */
3727 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3728 emit_move_insn (target, temp);
3730 else
3731 convert_move (target, temp, unsignedp);
3734 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3736 /* Handle copying a string constant into an array.
3737 The string constant may be shorter than the array.
3738 So copy just the string's actual length, and clear the rest. */
3739 rtx size;
3740 rtx addr;
3742 /* Get the size of the data type of the string,
3743 which is actually the size of the target. */
3744 size = expr_size (exp);
3745 if (GET_CODE (size) == CONST_INT
3746 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3747 emit_block_move (target, temp, size,
3748 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3749 else
3751 /* Compute the size of the data to copy from the string. */
3752 tree copy_size
3753 = size_binop (MIN_EXPR,
3754 make_tree (sizetype, size),
3755 convert (sizetype,
3756 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3757 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3758 VOIDmode, 0);
3759 rtx label = 0;
3761 /* Copy that much. */
3762 emit_block_move (target, temp, copy_size_rtx,
3763 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3765 /* Figure out how much is left in TARGET that we have to clear.
3766 Do all calculations in ptr_mode. */
3768 addr = XEXP (target, 0);
3769 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3771 if (GET_CODE (copy_size_rtx) == CONST_INT)
3773 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3774 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3776 else
3778 addr = force_reg (ptr_mode, addr);
3779 addr = expand_binop (ptr_mode, add_optab, addr,
3780 copy_size_rtx, NULL_RTX, 0,
3781 OPTAB_LIB_WIDEN);
3783 size = expand_binop (ptr_mode, sub_optab, size,
3784 copy_size_rtx, NULL_RTX, 0,
3785 OPTAB_LIB_WIDEN);
3787 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3788 GET_MODE (size), 0, 0);
3789 label = gen_label_rtx ();
3790 emit_jump_insn (gen_blt (label));
3793 if (size != const0_rtx)
3795 /* Be sure we can write on ADDR. */
3796 if (current_function_check_memory_usage)
3797 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3798 addr, ptr_mode,
3799 size, TYPE_MODE (sizetype),
3800 GEN_INT (MEMORY_USE_WO),
3801 TYPE_MODE (integer_type_node));
3802 #ifdef TARGET_MEM_FUNCTIONS
3803 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3804 addr, ptr_mode,
3805 const0_rtx, TYPE_MODE (integer_type_node),
3806 convert_to_mode (TYPE_MODE (sizetype),
3807 size,
3808 TREE_UNSIGNED (sizetype)),
3809 TYPE_MODE (sizetype));
3810 #else
3811 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3812 addr, ptr_mode,
3813 convert_to_mode (TYPE_MODE (integer_type_node),
3814 size,
3815 TREE_UNSIGNED (integer_type_node)),
3816 TYPE_MODE (integer_type_node));
3817 #endif
3820 if (label)
3821 emit_label (label);
3824 /* Handle calls that return values in multiple non-contiguous locations.
3825 The Irix 6 ABI has examples of this. */
3826 else if (GET_CODE (target) == PARALLEL)
3827 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3828 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3829 else if (GET_MODE (temp) == BLKmode)
3830 emit_block_move (target, temp, expr_size (exp),
3831 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3832 else
3833 emit_move_insn (target, temp);
3836 /* If we don't want a value, return NULL_RTX. */
3837 if (! want_value)
3838 return NULL_RTX;
3840 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3841 ??? The latter test doesn't seem to make sense. */
3842 else if (dont_return_target && GET_CODE (temp) != MEM)
3843 return temp;
3845 /* Return TARGET itself if it is a hard register. */
3846 else if (want_value && GET_MODE (target) != BLKmode
3847 && ! (GET_CODE (target) == REG
3848 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3849 return copy_to_reg (target);
3851 else
3852 return target;
3855 /* Return 1 if EXP just contains zeros. */
3857 static int
3858 is_zeros_p (exp)
3859 tree exp;
3861 tree elt;
3863 switch (TREE_CODE (exp))
3865 case CONVERT_EXPR:
3866 case NOP_EXPR:
3867 case NON_LVALUE_EXPR:
3868 return is_zeros_p (TREE_OPERAND (exp, 0));
3870 case INTEGER_CST:
3871 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3873 case COMPLEX_CST:
3874 return
3875 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3877 case REAL_CST:
3878 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3880 case CONSTRUCTOR:
3881 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3882 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3883 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3884 if (! is_zeros_p (TREE_VALUE (elt)))
3885 return 0;
3887 return 1;
3889 default:
3890 return 0;
3894 /* Return 1 if EXP contains mostly (3/4) zeros. */
3896 static int
3897 mostly_zeros_p (exp)
3898 tree exp;
3900 if (TREE_CODE (exp) == CONSTRUCTOR)
3902 int elts = 0, zeros = 0;
3903 tree elt = CONSTRUCTOR_ELTS (exp);
3904 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3906 /* If there are no ranges of true bits, it is all zero. */
3907 return elt == NULL_TREE;
3909 for (; elt; elt = TREE_CHAIN (elt))
3911 /* We do not handle the case where the index is a RANGE_EXPR,
3912 so the statistic will be somewhat inaccurate.
3913 We do make a more accurate count in store_constructor itself,
3914 so since this function is only used for nested array elements,
3915 this should be close enough. */
3916 if (mostly_zeros_p (TREE_VALUE (elt)))
3917 zeros++;
3918 elts++;
3921 return 4 * zeros >= 3 * elts;
3924 return is_zeros_p (exp);
3927 /* Helper function for store_constructor.
3928 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3929 TYPE is the type of the CONSTRUCTOR, not the element type.
3930 CLEARED is as for store_constructor.
3932 This provides a recursive shortcut back to store_constructor when it isn't
3933 necessary to go through store_field. This is so that we can pass through
3934 the cleared field to let store_constructor know that we may not have to
3935 clear a substructure if the outer structure has already been cleared. */
3937 static void
3938 store_constructor_field (target, bitsize, bitpos,
3939 mode, exp, type, cleared)
3940 rtx target;
3941 int bitsize, bitpos;
3942 enum machine_mode mode;
3943 tree exp, type;
3944 int cleared;
3946 if (TREE_CODE (exp) == CONSTRUCTOR
3947 && bitpos % BITS_PER_UNIT == 0
3948 /* If we have a non-zero bitpos for a register target, then we just
3949 let store_field do the bitfield handling. This is unlikely to
3950 generate unnecessary clear instructions anyways. */
3951 && (bitpos == 0 || GET_CODE (target) == MEM))
3953 if (bitpos != 0)
3954 target = change_address (target, VOIDmode,
3955 plus_constant (XEXP (target, 0),
3956 bitpos / BITS_PER_UNIT));
3957 store_constructor (exp, target, cleared);
3959 else
3960 store_field (target, bitsize, bitpos, mode, exp,
3961 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3962 int_size_in_bytes (type), 0);
3965 /* Store the value of constructor EXP into the rtx TARGET.
3966 TARGET is either a REG or a MEM.
3967 CLEARED is true if TARGET is known to have been zero'd. */
3969 static void
3970 store_constructor (exp, target, cleared)
3971 tree exp;
3972 rtx target;
3973 int cleared;
3975 tree type = TREE_TYPE (exp);
3976 rtx exp_size = expr_size (exp);
3978 /* We know our target cannot conflict, since safe_from_p has been called. */
3979 #if 0
3980 /* Don't try copying piece by piece into a hard register
3981 since that is vulnerable to being clobbered by EXP.
3982 Instead, construct in a pseudo register and then copy it all. */
3983 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3985 rtx temp = gen_reg_rtx (GET_MODE (target));
3986 store_constructor (exp, temp, 0);
3987 emit_move_insn (target, temp);
3988 return;
3990 #endif
3992 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3993 || TREE_CODE (type) == QUAL_UNION_TYPE)
3995 register tree elt;
3997 /* Inform later passes that the whole union value is dead. */
3998 if (TREE_CODE (type) == UNION_TYPE
3999 || TREE_CODE (type) == QUAL_UNION_TYPE)
4000 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4002 /* If we are building a static constructor into a register,
4003 set the initial value as zero so we can fold the value into
4004 a constant. But if more than one register is involved,
4005 this probably loses. */
4006 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4007 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4009 if (! cleared)
4010 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4012 cleared = 1;
4015 /* If the constructor has fewer fields than the structure
4016 or if we are initializing the structure to mostly zeros,
4017 clear the whole structure first. */
4018 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4019 != list_length (TYPE_FIELDS (type)))
4020 || mostly_zeros_p (exp))
4022 if (! cleared)
4023 clear_storage (target, expr_size (exp),
4024 TYPE_ALIGN (type) / BITS_PER_UNIT);
4026 cleared = 1;
4028 else
4029 /* Inform later passes that the old value is dead. */
4030 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4032 /* Store each element of the constructor into
4033 the corresponding field of TARGET. */
4035 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4037 register tree field = TREE_PURPOSE (elt);
4038 tree value = TREE_VALUE (elt);
4039 register enum machine_mode mode;
4040 int bitsize;
4041 int bitpos = 0;
4042 int unsignedp;
4043 tree pos, constant = 0, offset = 0;
4044 rtx to_rtx = target;
4046 /* Just ignore missing fields.
4047 We cleared the whole structure, above,
4048 if any fields are missing. */
4049 if (field == 0)
4050 continue;
4052 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4053 continue;
4055 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4056 unsignedp = TREE_UNSIGNED (field);
4057 mode = DECL_MODE (field);
4058 if (DECL_BIT_FIELD (field))
4059 mode = VOIDmode;
4061 pos = DECL_FIELD_BITPOS (field);
4062 if (TREE_CODE (pos) == INTEGER_CST)
4063 constant = pos;
4064 else if (TREE_CODE (pos) == PLUS_EXPR
4065 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4066 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4067 else
4068 offset = pos;
4070 if (constant)
4071 bitpos = TREE_INT_CST_LOW (constant);
4073 if (offset)
4075 rtx offset_rtx;
4077 if (contains_placeholder_p (offset))
4078 offset = build (WITH_RECORD_EXPR, sizetype,
4079 offset, make_tree (TREE_TYPE (exp), target));
4081 offset = size_binop (FLOOR_DIV_EXPR, offset,
4082 size_int (BITS_PER_UNIT));
4084 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4085 if (GET_CODE (to_rtx) != MEM)
4086 abort ();
4088 if (GET_MODE (offset_rtx) != ptr_mode)
4090 #ifdef POINTERS_EXTEND_UNSIGNED
4091 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4092 #else
4093 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4094 #endif
4097 to_rtx
4098 = change_address (to_rtx, VOIDmode,
4099 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4100 force_reg (ptr_mode, offset_rtx)));
4102 if (TREE_READONLY (field))
4104 if (GET_CODE (to_rtx) == MEM)
4105 to_rtx = copy_rtx (to_rtx);
4107 RTX_UNCHANGING_P (to_rtx) = 1;
4110 #ifdef WORD_REGISTER_OPERATIONS
4111 /* If this initializes a field that is smaller than a word, at the
4112 start of a word, try to widen it to a full word.
4113 This special case allows us to output C++ member function
4114 initializations in a form that the optimizers can understand. */
4115 if (constant
4116 && GET_CODE (target) == REG
4117 && bitsize < BITS_PER_WORD
4118 && bitpos % BITS_PER_WORD == 0
4119 && GET_MODE_CLASS (mode) == MODE_INT
4120 && TREE_CODE (value) == INTEGER_CST
4121 && GET_CODE (exp_size) == CONST_INT
4122 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4124 tree type = TREE_TYPE (value);
4125 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4127 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4128 value = convert (type, value);
4130 if (BYTES_BIG_ENDIAN)
4131 value
4132 = fold (build (LSHIFT_EXPR, type, value,
4133 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4134 bitsize = BITS_PER_WORD;
4135 mode = word_mode;
4137 #endif
4138 store_constructor_field (to_rtx, bitsize, bitpos,
4139 mode, value, type, cleared);
4142 else if (TREE_CODE (type) == ARRAY_TYPE)
4144 register tree elt;
4145 register int i;
4146 int need_to_clear;
4147 tree domain = TYPE_DOMAIN (type);
4148 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4149 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4150 tree elttype = TREE_TYPE (type);
4152 /* If the constructor has fewer elements than the array,
4153 clear the whole array first. Similarly if this is
4154 static constructor of a non-BLKmode object. */
4155 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4156 need_to_clear = 1;
4157 else
4159 HOST_WIDE_INT count = 0, zero_count = 0;
4160 need_to_clear = 0;
4161 /* This loop is a more accurate version of the loop in
4162 mostly_zeros_p (it handles RANGE_EXPR in an index).
4163 It is also needed to check for missing elements. */
4164 for (elt = CONSTRUCTOR_ELTS (exp);
4165 elt != NULL_TREE;
4166 elt = TREE_CHAIN (elt))
4168 tree index = TREE_PURPOSE (elt);
4169 HOST_WIDE_INT this_node_count;
4170 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4172 tree lo_index = TREE_OPERAND (index, 0);
4173 tree hi_index = TREE_OPERAND (index, 1);
4174 if (TREE_CODE (lo_index) != INTEGER_CST
4175 || TREE_CODE (hi_index) != INTEGER_CST)
4177 need_to_clear = 1;
4178 break;
4180 this_node_count = TREE_INT_CST_LOW (hi_index)
4181 - TREE_INT_CST_LOW (lo_index) + 1;
4183 else
4184 this_node_count = 1;
4185 count += this_node_count;
4186 if (mostly_zeros_p (TREE_VALUE (elt)))
4187 zero_count += this_node_count;
4189 /* Clear the entire array first if there are any missing elements,
4190 or if the incidence of zero elements is >= 75%. */
4191 if (count < maxelt - minelt + 1
4192 || 4 * zero_count >= 3 * count)
4193 need_to_clear = 1;
4195 if (need_to_clear)
4197 if (! cleared)
4198 clear_storage (target, expr_size (exp),
4199 TYPE_ALIGN (type) / BITS_PER_UNIT);
4200 cleared = 1;
4202 else
4203 /* Inform later passes that the old value is dead. */
4204 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4206 /* Store each element of the constructor into
4207 the corresponding element of TARGET, determined
4208 by counting the elements. */
4209 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4210 elt;
4211 elt = TREE_CHAIN (elt), i++)
4213 register enum machine_mode mode;
4214 int bitsize;
4215 int bitpos;
4216 int unsignedp;
4217 tree value = TREE_VALUE (elt);
4218 tree index = TREE_PURPOSE (elt);
4219 rtx xtarget = target;
4221 if (cleared && is_zeros_p (value))
4222 continue;
4224 mode = TYPE_MODE (elttype);
4225 bitsize = GET_MODE_BITSIZE (mode);
4226 unsignedp = TREE_UNSIGNED (elttype);
4228 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4230 tree lo_index = TREE_OPERAND (index, 0);
4231 tree hi_index = TREE_OPERAND (index, 1);
4232 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4233 struct nesting *loop;
4234 HOST_WIDE_INT lo, hi, count;
4235 tree position;
4237 /* If the range is constant and "small", unroll the loop. */
4238 if (TREE_CODE (lo_index) == INTEGER_CST
4239 && TREE_CODE (hi_index) == INTEGER_CST
4240 && (lo = TREE_INT_CST_LOW (lo_index),
4241 hi = TREE_INT_CST_LOW (hi_index),
4242 count = hi - lo + 1,
4243 (GET_CODE (target) != MEM
4244 || count <= 2
4245 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4246 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4247 <= 40 * 8))))
4249 lo -= minelt; hi -= minelt;
4250 for (; lo <= hi; lo++)
4252 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4253 store_constructor_field (target, bitsize, bitpos,
4254 mode, value, type, cleared);
4257 else
4259 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4260 loop_top = gen_label_rtx ();
4261 loop_end = gen_label_rtx ();
4263 unsignedp = TREE_UNSIGNED (domain);
4265 index = build_decl (VAR_DECL, NULL_TREE, domain);
4267 DECL_RTL (index) = index_r
4268 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4269 &unsignedp, 0));
4271 if (TREE_CODE (value) == SAVE_EXPR
4272 && SAVE_EXPR_RTL (value) == 0)
4274 /* Make sure value gets expanded once before the
4275 loop. */
4276 expand_expr (value, const0_rtx, VOIDmode, 0);
4277 emit_queue ();
4279 store_expr (lo_index, index_r, 0);
4280 loop = expand_start_loop (0);
4282 /* Assign value to element index. */
4283 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4284 size_int (BITS_PER_UNIT));
4285 position = size_binop (MULT_EXPR,
4286 size_binop (MINUS_EXPR, index,
4287 TYPE_MIN_VALUE (domain)),
4288 position);
4289 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4290 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4291 xtarget = change_address (target, mode, addr);
4292 if (TREE_CODE (value) == CONSTRUCTOR)
4293 store_constructor (value, xtarget, cleared);
4294 else
4295 store_expr (value, xtarget, 0);
4297 expand_exit_loop_if_false (loop,
4298 build (LT_EXPR, integer_type_node,
4299 index, hi_index));
4301 expand_increment (build (PREINCREMENT_EXPR,
4302 TREE_TYPE (index),
4303 index, integer_one_node), 0, 0);
4304 expand_end_loop ();
4305 emit_label (loop_end);
4307 /* Needed by stupid register allocation. to extend the
4308 lifetime of pseudo-regs used by target past the end
4309 of the loop. */
4310 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4313 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4314 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4316 rtx pos_rtx, addr;
4317 tree position;
4319 if (index == 0)
4320 index = size_int (i);
4322 if (minelt)
4323 index = size_binop (MINUS_EXPR, index,
4324 TYPE_MIN_VALUE (domain));
4325 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4326 size_int (BITS_PER_UNIT));
4327 position = size_binop (MULT_EXPR, index, position);
4328 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4329 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4330 xtarget = change_address (target, mode, addr);
4331 store_expr (value, xtarget, 0);
4333 else
4335 if (index != 0)
4336 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4337 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4338 else
4339 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4340 store_constructor_field (target, bitsize, bitpos,
4341 mode, value, type, cleared);
4345 /* set constructor assignments */
4346 else if (TREE_CODE (type) == SET_TYPE)
4348 tree elt = CONSTRUCTOR_ELTS (exp);
4349 int nbytes = int_size_in_bytes (type), nbits;
4350 tree domain = TYPE_DOMAIN (type);
4351 tree domain_min, domain_max, bitlength;
4353 /* The default implementation strategy is to extract the constant
4354 parts of the constructor, use that to initialize the target,
4355 and then "or" in whatever non-constant ranges we need in addition.
4357 If a large set is all zero or all ones, it is
4358 probably better to set it using memset (if available) or bzero.
4359 Also, if a large set has just a single range, it may also be
4360 better to first clear all the first clear the set (using
4361 bzero/memset), and set the bits we want. */
4363 /* Check for all zeros. */
4364 if (elt == NULL_TREE)
4366 if (!cleared)
4367 clear_storage (target, expr_size (exp),
4368 TYPE_ALIGN (type) / BITS_PER_UNIT);
4369 return;
4372 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4373 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4374 bitlength = size_binop (PLUS_EXPR,
4375 size_binop (MINUS_EXPR, domain_max, domain_min),
4376 size_one_node);
4378 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4379 abort ();
4380 nbits = TREE_INT_CST_LOW (bitlength);
4382 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4383 are "complicated" (more than one range), initialize (the
4384 constant parts) by copying from a constant. */
4385 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4386 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4388 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4389 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4390 char *bit_buffer = (char *) alloca (nbits);
4391 HOST_WIDE_INT word = 0;
4392 int bit_pos = 0;
4393 int ibit = 0;
4394 int offset = 0; /* In bytes from beginning of set. */
4395 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4396 for (;;)
4398 if (bit_buffer[ibit])
4400 if (BYTES_BIG_ENDIAN)
4401 word |= (1 << (set_word_size - 1 - bit_pos));
4402 else
4403 word |= 1 << bit_pos;
4405 bit_pos++; ibit++;
4406 if (bit_pos >= set_word_size || ibit == nbits)
4408 if (word != 0 || ! cleared)
4410 rtx datum = GEN_INT (word);
4411 rtx to_rtx;
4412 /* The assumption here is that it is safe to use
4413 XEXP if the set is multi-word, but not if
4414 it's single-word. */
4415 if (GET_CODE (target) == MEM)
4417 to_rtx = plus_constant (XEXP (target, 0), offset);
4418 to_rtx = change_address (target, mode, to_rtx);
4420 else if (offset == 0)
4421 to_rtx = target;
4422 else
4423 abort ();
4424 emit_move_insn (to_rtx, datum);
4426 if (ibit == nbits)
4427 break;
4428 word = 0;
4429 bit_pos = 0;
4430 offset += set_word_size / BITS_PER_UNIT;
4434 else if (!cleared)
4436 /* Don't bother clearing storage if the set is all ones. */
4437 if (TREE_CHAIN (elt) != NULL_TREE
4438 || (TREE_PURPOSE (elt) == NULL_TREE
4439 ? nbits != 1
4440 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4441 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4442 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4443 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4444 != nbits))))
4445 clear_storage (target, expr_size (exp),
4446 TYPE_ALIGN (type) / BITS_PER_UNIT);
4449 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4451 /* start of range of element or NULL */
4452 tree startbit = TREE_PURPOSE (elt);
4453 /* end of range of element, or element value */
4454 tree endbit = TREE_VALUE (elt);
4455 #ifdef TARGET_MEM_FUNCTIONS
4456 HOST_WIDE_INT startb, endb;
4457 #endif
4458 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4460 bitlength_rtx = expand_expr (bitlength,
4461 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4463 /* handle non-range tuple element like [ expr ] */
4464 if (startbit == NULL_TREE)
4466 startbit = save_expr (endbit);
4467 endbit = startbit;
4469 startbit = convert (sizetype, startbit);
4470 endbit = convert (sizetype, endbit);
4471 if (! integer_zerop (domain_min))
4473 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4474 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4476 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4477 EXPAND_CONST_ADDRESS);
4478 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4479 EXPAND_CONST_ADDRESS);
4481 if (REG_P (target))
4483 targetx = assign_stack_temp (GET_MODE (target),
4484 GET_MODE_SIZE (GET_MODE (target)),
4486 emit_move_insn (targetx, target);
4488 else if (GET_CODE (target) == MEM)
4489 targetx = target;
4490 else
4491 abort ();
4493 #ifdef TARGET_MEM_FUNCTIONS
4494 /* Optimization: If startbit and endbit are
4495 constants divisible by BITS_PER_UNIT,
4496 call memset instead. */
4497 if (TREE_CODE (startbit) == INTEGER_CST
4498 && TREE_CODE (endbit) == INTEGER_CST
4499 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4500 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4502 emit_library_call (memset_libfunc, 0,
4503 VOIDmode, 3,
4504 plus_constant (XEXP (targetx, 0),
4505 startb / BITS_PER_UNIT),
4506 Pmode,
4507 constm1_rtx, TYPE_MODE (integer_type_node),
4508 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4509 TYPE_MODE (sizetype));
4511 else
4512 #endif
4514 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4515 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4516 bitlength_rtx, TYPE_MODE (sizetype),
4517 startbit_rtx, TYPE_MODE (sizetype),
4518 endbit_rtx, TYPE_MODE (sizetype));
4520 if (REG_P (target))
4521 emit_move_insn (target, targetx);
4525 else
4526 abort ();
4529 /* Store the value of EXP (an expression tree)
4530 into a subfield of TARGET which has mode MODE and occupies
4531 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4532 If MODE is VOIDmode, it means that we are storing into a bit-field.
4534 If VALUE_MODE is VOIDmode, return nothing in particular.
4535 UNSIGNEDP is not used in this case.
4537 Otherwise, return an rtx for the value stored. This rtx
4538 has mode VALUE_MODE if that is convenient to do.
4539 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4541 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4542 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4544 ALIAS_SET is the alias set for the destination. This value will
4545 (in general) be different from that for TARGET, since TARGET is a
4546 reference to the containing structure. */
4548 static rtx
4549 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4550 unsignedp, align, total_size, alias_set)
4551 rtx target;
4552 int bitsize, bitpos;
4553 enum machine_mode mode;
4554 tree exp;
4555 enum machine_mode value_mode;
4556 int unsignedp;
4557 int align;
4558 int total_size;
4559 int alias_set;
4561 HOST_WIDE_INT width_mask = 0;
4563 if (TREE_CODE (exp) == ERROR_MARK)
4564 return const0_rtx;
4566 if (bitsize < HOST_BITS_PER_WIDE_INT)
4567 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4569 /* If we are storing into an unaligned field of an aligned union that is
4570 in a register, we may have the mode of TARGET being an integer mode but
4571 MODE == BLKmode. In that case, get an aligned object whose size and
4572 alignment are the same as TARGET and store TARGET into it (we can avoid
4573 the store if the field being stored is the entire width of TARGET). Then
4574 call ourselves recursively to store the field into a BLKmode version of
4575 that object. Finally, load from the object into TARGET. This is not
4576 very efficient in general, but should only be slightly more expensive
4577 than the otherwise-required unaligned accesses. Perhaps this can be
4578 cleaned up later. */
4580 if (mode == BLKmode
4581 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4583 rtx object = assign_stack_temp (GET_MODE (target),
4584 GET_MODE_SIZE (GET_MODE (target)), 0);
4585 rtx blk_object = copy_rtx (object);
4587 MEM_SET_IN_STRUCT_P (object, 1);
4588 MEM_SET_IN_STRUCT_P (blk_object, 1);
4589 PUT_MODE (blk_object, BLKmode);
4591 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4592 emit_move_insn (object, target);
4594 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4595 align, total_size, alias_set);
4597 /* Even though we aren't returning target, we need to
4598 give it the updated value. */
4599 emit_move_insn (target, object);
4601 return blk_object;
4604 /* If the structure is in a register or if the component
4605 is a bit field, we cannot use addressing to access it.
4606 Use bit-field techniques or SUBREG to store in it. */
4608 if (mode == VOIDmode
4609 || (mode != BLKmode && ! direct_store[(int) mode])
4610 || GET_CODE (target) == REG
4611 || GET_CODE (target) == SUBREG
4612 /* If the field isn't aligned enough to store as an ordinary memref,
4613 store it as a bit field. */
4614 || (SLOW_UNALIGNED_ACCESS
4615 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4616 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4618 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4620 /* If BITSIZE is narrower than the size of the type of EXP
4621 we will be narrowing TEMP. Normally, what's wanted are the
4622 low-order bits. However, if EXP's type is a record and this is
4623 big-endian machine, we want the upper BITSIZE bits. */
4624 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4625 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4626 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4627 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4628 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4629 - bitsize),
4630 temp, 1);
4632 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4633 MODE. */
4634 if (mode != VOIDmode && mode != BLKmode
4635 && mode != TYPE_MODE (TREE_TYPE (exp)))
4636 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4638 /* If the modes of TARGET and TEMP are both BLKmode, both
4639 must be in memory and BITPOS must be aligned on a byte
4640 boundary. If so, we simply do a block copy. */
4641 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4643 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4644 || bitpos % BITS_PER_UNIT != 0)
4645 abort ();
4647 target = change_address (target, VOIDmode,
4648 plus_constant (XEXP (target, 0),
4649 bitpos / BITS_PER_UNIT));
4651 emit_block_move (target, temp,
4652 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4653 / BITS_PER_UNIT),
4656 return value_mode == VOIDmode ? const0_rtx : target;
4659 /* Store the value in the bitfield. */
4660 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4661 if (value_mode != VOIDmode)
4663 /* The caller wants an rtx for the value. */
4664 /* If possible, avoid refetching from the bitfield itself. */
4665 if (width_mask != 0
4666 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4668 tree count;
4669 enum machine_mode tmode;
4671 if (unsignedp)
4672 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4673 tmode = GET_MODE (temp);
4674 if (tmode == VOIDmode)
4675 tmode = value_mode;
4676 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4677 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4678 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4680 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4681 NULL_RTX, value_mode, 0, align,
4682 total_size);
4684 return const0_rtx;
4686 else
4688 rtx addr = XEXP (target, 0);
4689 rtx to_rtx;
4691 /* If a value is wanted, it must be the lhs;
4692 so make the address stable for multiple use. */
4694 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4695 && ! CONSTANT_ADDRESS_P (addr)
4696 /* A frame-pointer reference is already stable. */
4697 && ! (GET_CODE (addr) == PLUS
4698 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4699 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4700 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4701 addr = copy_to_reg (addr);
4703 /* Now build a reference to just the desired component. */
4705 to_rtx = copy_rtx (change_address (target, mode,
4706 plus_constant (addr,
4707 (bitpos
4708 / BITS_PER_UNIT))));
4709 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4710 MEM_ALIAS_SET (to_rtx) = alias_set;
4712 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4716 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4717 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4718 ARRAY_REFs and find the ultimate containing object, which we return.
4720 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4721 bit position, and *PUNSIGNEDP to the signedness of the field.
4722 If the position of the field is variable, we store a tree
4723 giving the variable offset (in units) in *POFFSET.
4724 This offset is in addition to the bit position.
4725 If the position is not variable, we store 0 in *POFFSET.
4726 We set *PALIGNMENT to the alignment in bytes of the address that will be
4727 computed. This is the alignment of the thing we return if *POFFSET
4728 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4730 If any of the extraction expressions is volatile,
4731 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4733 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4734 is a mode that can be used to access the field. In that case, *PBITSIZE
4735 is redundant.
4737 If the field describes a variable-sized object, *PMODE is set to
4738 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4739 this case, but the address of the object can be found. */
4741 tree
4742 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4743 punsignedp, pvolatilep, palignment)
4744 tree exp;
4745 int *pbitsize;
4746 int *pbitpos;
4747 tree *poffset;
4748 enum machine_mode *pmode;
4749 int *punsignedp;
4750 int *pvolatilep;
4751 int *palignment;
4753 tree orig_exp = exp;
4754 tree size_tree = 0;
4755 enum machine_mode mode = VOIDmode;
4756 tree offset = integer_zero_node;
4757 unsigned int alignment = BIGGEST_ALIGNMENT;
4759 if (TREE_CODE (exp) == COMPONENT_REF)
4761 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4762 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4763 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4764 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4766 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4768 size_tree = TREE_OPERAND (exp, 1);
4769 *punsignedp = TREE_UNSIGNED (exp);
4771 else
4773 mode = TYPE_MODE (TREE_TYPE (exp));
4774 *pbitsize = GET_MODE_BITSIZE (mode);
4775 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4778 if (size_tree)
4780 if (TREE_CODE (size_tree) != INTEGER_CST)
4781 mode = BLKmode, *pbitsize = -1;
4782 else
4783 *pbitsize = TREE_INT_CST_LOW (size_tree);
4786 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4787 and find the ultimate containing object. */
4789 *pbitpos = 0;
4791 while (1)
4793 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4795 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4796 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4797 : TREE_OPERAND (exp, 2));
4798 tree constant = integer_zero_node, var = pos;
4800 /* If this field hasn't been filled in yet, don't go
4801 past it. This should only happen when folding expressions
4802 made during type construction. */
4803 if (pos == 0)
4804 break;
4806 /* Assume here that the offset is a multiple of a unit.
4807 If not, there should be an explicitly added constant. */
4808 if (TREE_CODE (pos) == PLUS_EXPR
4809 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4810 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4811 else if (TREE_CODE (pos) == INTEGER_CST)
4812 constant = pos, var = integer_zero_node;
4814 *pbitpos += TREE_INT_CST_LOW (constant);
4815 offset = size_binop (PLUS_EXPR, offset,
4816 size_binop (EXACT_DIV_EXPR, var,
4817 size_int (BITS_PER_UNIT)));
4820 else if (TREE_CODE (exp) == ARRAY_REF)
4822 /* This code is based on the code in case ARRAY_REF in expand_expr
4823 below. We assume here that the size of an array element is
4824 always an integral multiple of BITS_PER_UNIT. */
4826 tree index = TREE_OPERAND (exp, 1);
4827 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4828 tree low_bound
4829 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4830 tree index_type = TREE_TYPE (index);
4831 tree xindex;
4833 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4835 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4836 index);
4837 index_type = TREE_TYPE (index);
4840 /* Optimize the special-case of a zero lower bound.
4842 We convert the low_bound to sizetype to avoid some problems
4843 with constant folding. (E.g. suppose the lower bound is 1,
4844 and its mode is QI. Without the conversion, (ARRAY
4845 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4846 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4848 But sizetype isn't quite right either (especially if
4849 the lowbound is negative). FIXME */
4851 if (! integer_zerop (low_bound))
4852 index = fold (build (MINUS_EXPR, index_type, index,
4853 convert (sizetype, low_bound)));
4855 if (TREE_CODE (index) == INTEGER_CST)
4857 index = convert (sbitsizetype, index);
4858 index_type = TREE_TYPE (index);
4861 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4862 convert (sbitsizetype,
4863 TYPE_SIZE (TREE_TYPE (exp)))));
4865 if (TREE_CODE (xindex) == INTEGER_CST
4866 && TREE_INT_CST_HIGH (xindex) == 0)
4867 *pbitpos += TREE_INT_CST_LOW (xindex);
4868 else
4870 /* Either the bit offset calculated above is not constant, or
4871 it overflowed. In either case, redo the multiplication
4872 against the size in units. This is especially important
4873 in the non-constant case to avoid a division at runtime. */
4874 xindex = fold (build (MULT_EXPR, ssizetype, index,
4875 convert (ssizetype,
4876 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4878 if (contains_placeholder_p (xindex))
4879 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4881 offset = size_binop (PLUS_EXPR, offset, xindex);
4884 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4885 && ! ((TREE_CODE (exp) == NOP_EXPR
4886 || TREE_CODE (exp) == CONVERT_EXPR)
4887 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4888 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4889 != UNION_TYPE))
4890 && (TYPE_MODE (TREE_TYPE (exp))
4891 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4892 break;
4894 /* If any reference in the chain is volatile, the effect is volatile. */
4895 if (TREE_THIS_VOLATILE (exp))
4896 *pvolatilep = 1;
4898 /* If the offset is non-constant already, then we can't assume any
4899 alignment more than the alignment here. */
4900 if (! integer_zerop (offset))
4901 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4903 exp = TREE_OPERAND (exp, 0);
4906 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4907 alignment = MIN (alignment, DECL_ALIGN (exp));
4908 else if (TREE_TYPE (exp) != 0)
4909 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4911 if (integer_zerop (offset))
4912 offset = 0;
4914 if (offset != 0 && contains_placeholder_p (offset))
4915 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4917 *pmode = mode;
4918 *poffset = offset;
4919 *palignment = alignment / BITS_PER_UNIT;
4920 return exp;
4923 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4924 static enum memory_use_mode
4925 get_memory_usage_from_modifier (modifier)
4926 enum expand_modifier modifier;
4928 switch (modifier)
4930 case EXPAND_NORMAL:
4931 case EXPAND_SUM:
4932 return MEMORY_USE_RO;
4933 break;
4934 case EXPAND_MEMORY_USE_WO:
4935 return MEMORY_USE_WO;
4936 break;
4937 case EXPAND_MEMORY_USE_RW:
4938 return MEMORY_USE_RW;
4939 break;
4940 case EXPAND_MEMORY_USE_DONT:
4941 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4942 MEMORY_USE_DONT, because they are modifiers to a call of
4943 expand_expr in the ADDR_EXPR case of expand_expr. */
4944 case EXPAND_CONST_ADDRESS:
4945 case EXPAND_INITIALIZER:
4946 return MEMORY_USE_DONT;
4947 case EXPAND_MEMORY_USE_BAD:
4948 default:
4949 abort ();
4953 /* Given an rtx VALUE that may contain additions and multiplications,
4954 return an equivalent value that just refers to a register or memory.
4955 This is done by generating instructions to perform the arithmetic
4956 and returning a pseudo-register containing the value.
4958 The returned value may be a REG, SUBREG, MEM or constant. */
4961 force_operand (value, target)
4962 rtx value, target;
4964 register optab binoptab = 0;
4965 /* Use a temporary to force order of execution of calls to
4966 `force_operand'. */
4967 rtx tmp;
4968 register rtx op2;
4969 /* Use subtarget as the target for operand 0 of a binary operation. */
4970 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4972 /* Check for a PIC address load. */
4973 if (flag_pic
4974 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4975 && XEXP (value, 0) == pic_offset_table_rtx
4976 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4977 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4978 || GET_CODE (XEXP (value, 1)) == CONST))
4980 if (!subtarget)
4981 subtarget = gen_reg_rtx (GET_MODE (value));
4982 emit_move_insn (subtarget, value);
4983 return subtarget;
4986 if (GET_CODE (value) == PLUS)
4987 binoptab = add_optab;
4988 else if (GET_CODE (value) == MINUS)
4989 binoptab = sub_optab;
4990 else if (GET_CODE (value) == MULT)
4992 op2 = XEXP (value, 1);
4993 if (!CONSTANT_P (op2)
4994 && !(GET_CODE (op2) == REG && op2 != subtarget))
4995 subtarget = 0;
4996 tmp = force_operand (XEXP (value, 0), subtarget);
4997 return expand_mult (GET_MODE (value), tmp,
4998 force_operand (op2, NULL_RTX),
4999 target, 0);
5002 if (binoptab)
5004 op2 = XEXP (value, 1);
5005 if (!CONSTANT_P (op2)
5006 && !(GET_CODE (op2) == REG && op2 != subtarget))
5007 subtarget = 0;
5008 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5010 binoptab = add_optab;
5011 op2 = negate_rtx (GET_MODE (value), op2);
5014 /* Check for an addition with OP2 a constant integer and our first
5015 operand a PLUS of a virtual register and something else. In that
5016 case, we want to emit the sum of the virtual register and the
5017 constant first and then add the other value. This allows virtual
5018 register instantiation to simply modify the constant rather than
5019 creating another one around this addition. */
5020 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5021 && GET_CODE (XEXP (value, 0)) == PLUS
5022 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5023 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5024 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5026 rtx temp = expand_binop (GET_MODE (value), binoptab,
5027 XEXP (XEXP (value, 0), 0), op2,
5028 subtarget, 0, OPTAB_LIB_WIDEN);
5029 return expand_binop (GET_MODE (value), binoptab, temp,
5030 force_operand (XEXP (XEXP (value, 0), 1), 0),
5031 target, 0, OPTAB_LIB_WIDEN);
5034 tmp = force_operand (XEXP (value, 0), subtarget);
5035 return expand_binop (GET_MODE (value), binoptab, tmp,
5036 force_operand (op2, NULL_RTX),
5037 target, 0, OPTAB_LIB_WIDEN);
5038 /* We give UNSIGNEDP = 0 to expand_binop
5039 because the only operations we are expanding here are signed ones. */
5041 return value;
5044 /* Subroutine of expand_expr:
5045 save the non-copied parts (LIST) of an expr (LHS), and return a list
5046 which can restore these values to their previous values,
5047 should something modify their storage. */
5049 static tree
5050 save_noncopied_parts (lhs, list)
5051 tree lhs;
5052 tree list;
5054 tree tail;
5055 tree parts = 0;
5057 for (tail = list; tail; tail = TREE_CHAIN (tail))
5058 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5059 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5060 else
5062 tree part = TREE_VALUE (tail);
5063 tree part_type = TREE_TYPE (part);
5064 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5065 rtx target = assign_temp (part_type, 0, 1, 1);
5066 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5067 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5068 parts = tree_cons (to_be_saved,
5069 build (RTL_EXPR, part_type, NULL_TREE,
5070 (tree) target),
5071 parts);
5072 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5074 return parts;
5077 /* Subroutine of expand_expr:
5078 record the non-copied parts (LIST) of an expr (LHS), and return a list
5079 which specifies the initial values of these parts. */
5081 static tree
5082 init_noncopied_parts (lhs, list)
5083 tree lhs;
5084 tree list;
5086 tree tail;
5087 tree parts = 0;
5089 for (tail = list; tail; tail = TREE_CHAIN (tail))
5090 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5091 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5092 else
5094 tree part = TREE_VALUE (tail);
5095 tree part_type = TREE_TYPE (part);
5096 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5097 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5099 return parts;
5102 /* Subroutine of expand_expr: return nonzero iff there is no way that
5103 EXP can reference X, which is being modified. TOP_P is nonzero if this
5104 call is going to be used to determine whether we need a temporary
5105 for EXP, as opposed to a recursive call to this function.
5107 It is always safe for this routine to return zero since it merely
5108 searches for optimization opportunities. */
5110 static int
5111 safe_from_p (x, exp, top_p)
5112 rtx x;
5113 tree exp;
5114 int top_p;
5116 rtx exp_rtl = 0;
5117 int i, nops;
5118 static int save_expr_count;
5119 static int save_expr_size = 0;
5120 static tree *save_expr_rewritten;
5121 static tree save_expr_trees[256];
5123 if (x == 0
5124 /* If EXP has varying size, we MUST use a target since we currently
5125 have no way of allocating temporaries of variable size
5126 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5127 So we assume here that something at a higher level has prevented a
5128 clash. This is somewhat bogus, but the best we can do. Only
5129 do this when X is BLKmode and when we are at the top level. */
5130 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5131 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5132 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5133 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5134 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5135 != INTEGER_CST)
5136 && GET_MODE (x) == BLKmode))
5137 return 1;
5139 if (top_p && save_expr_size == 0)
5141 int rtn;
5143 save_expr_count = 0;
5144 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5145 save_expr_rewritten = &save_expr_trees[0];
5147 rtn = safe_from_p (x, exp, 1);
5149 for (i = 0; i < save_expr_count; ++i)
5151 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5152 abort ();
5153 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5156 save_expr_size = 0;
5158 return rtn;
5161 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5162 find the underlying pseudo. */
5163 if (GET_CODE (x) == SUBREG)
5165 x = SUBREG_REG (x);
5166 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5167 return 0;
5170 /* If X is a location in the outgoing argument area, it is always safe. */
5171 if (GET_CODE (x) == MEM
5172 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5173 || (GET_CODE (XEXP (x, 0)) == PLUS
5174 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5175 return 1;
5177 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5179 case 'd':
5180 exp_rtl = DECL_RTL (exp);
5181 break;
5183 case 'c':
5184 return 1;
5186 case 'x':
5187 if (TREE_CODE (exp) == TREE_LIST)
5188 return ((TREE_VALUE (exp) == 0
5189 || safe_from_p (x, TREE_VALUE (exp), 0))
5190 && (TREE_CHAIN (exp) == 0
5191 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5192 else if (TREE_CODE (exp) == ERROR_MARK)
5193 return 1; /* An already-visited SAVE_EXPR? */
5194 else
5195 return 0;
5197 case '1':
5198 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5200 case '2':
5201 case '<':
5202 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5203 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5205 case 'e':
5206 case 'r':
5207 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5208 the expression. If it is set, we conflict iff we are that rtx or
5209 both are in memory. Otherwise, we check all operands of the
5210 expression recursively. */
5212 switch (TREE_CODE (exp))
5214 case ADDR_EXPR:
5215 return (staticp (TREE_OPERAND (exp, 0))
5216 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5217 || TREE_STATIC (exp));
5219 case INDIRECT_REF:
5220 if (GET_CODE (x) == MEM)
5221 return 0;
5222 break;
5224 case CALL_EXPR:
5225 exp_rtl = CALL_EXPR_RTL (exp);
5226 if (exp_rtl == 0)
5228 /* Assume that the call will clobber all hard registers and
5229 all of memory. */
5230 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5231 || GET_CODE (x) == MEM)
5232 return 0;
5235 break;
5237 case RTL_EXPR:
5238 /* If a sequence exists, we would have to scan every instruction
5239 in the sequence to see if it was safe. This is probably not
5240 worthwhile. */
5241 if (RTL_EXPR_SEQUENCE (exp))
5242 return 0;
5244 exp_rtl = RTL_EXPR_RTL (exp);
5245 break;
5247 case WITH_CLEANUP_EXPR:
5248 exp_rtl = RTL_EXPR_RTL (exp);
5249 break;
5251 case CLEANUP_POINT_EXPR:
5252 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5254 case SAVE_EXPR:
5255 exp_rtl = SAVE_EXPR_RTL (exp);
5256 if (exp_rtl)
5257 break;
5259 /* This SAVE_EXPR might appear many times in the top-level
5260 safe_from_p() expression, and if it has a complex
5261 subexpression, examining it multiple times could result
5262 in a combinatorial explosion. E.g. on an Alpha
5263 running at least 200MHz, a Fortran test case compiled with
5264 optimization took about 28 minutes to compile -- even though
5265 it was only a few lines long, and the complicated line causing
5266 so much time to be spent in the earlier version of safe_from_p()
5267 had only 293 or so unique nodes.
5269 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5270 where it is so we can turn it back in the top-level safe_from_p()
5271 when we're done. */
5273 /* For now, don't bother re-sizing the array. */
5274 if (save_expr_count >= save_expr_size)
5275 return 0;
5276 save_expr_rewritten[save_expr_count++] = exp;
5278 nops = tree_code_length[(int) SAVE_EXPR];
5279 for (i = 0; i < nops; i++)
5281 tree operand = TREE_OPERAND (exp, i);
5282 if (operand == NULL_TREE)
5283 continue;
5284 TREE_SET_CODE (exp, ERROR_MARK);
5285 if (!safe_from_p (x, operand, 0))
5286 return 0;
5287 TREE_SET_CODE (exp, SAVE_EXPR);
5289 TREE_SET_CODE (exp, ERROR_MARK);
5290 return 1;
5292 case BIND_EXPR:
5293 /* The only operand we look at is operand 1. The rest aren't
5294 part of the expression. */
5295 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5297 case METHOD_CALL_EXPR:
5298 /* This takes a rtx argument, but shouldn't appear here. */
5299 abort ();
5301 default:
5302 break;
5305 /* If we have an rtx, we do not need to scan our operands. */
5306 if (exp_rtl)
5307 break;
5309 nops = tree_code_length[(int) TREE_CODE (exp)];
5310 for (i = 0; i < nops; i++)
5311 if (TREE_OPERAND (exp, i) != 0
5312 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5313 return 0;
5316 /* If we have an rtl, find any enclosed object. Then see if we conflict
5317 with it. */
5318 if (exp_rtl)
5320 if (GET_CODE (exp_rtl) == SUBREG)
5322 exp_rtl = SUBREG_REG (exp_rtl);
5323 if (GET_CODE (exp_rtl) == REG
5324 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5325 return 0;
5328 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5329 are memory and EXP is not readonly. */
5330 return ! (rtx_equal_p (x, exp_rtl)
5331 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5332 && ! TREE_READONLY (exp)));
5335 /* If we reach here, it is safe. */
5336 return 1;
5339 /* Subroutine of expand_expr: return nonzero iff EXP is an
5340 expression whose type is statically determinable. */
5342 static int
5343 fixed_type_p (exp)
5344 tree exp;
5346 if (TREE_CODE (exp) == PARM_DECL
5347 || TREE_CODE (exp) == VAR_DECL
5348 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5349 || TREE_CODE (exp) == COMPONENT_REF
5350 || TREE_CODE (exp) == ARRAY_REF)
5351 return 1;
5352 return 0;
5355 /* Subroutine of expand_expr: return rtx if EXP is a
5356 variable or parameter; else return 0. */
5358 static rtx
5359 var_rtx (exp)
5360 tree exp;
5362 STRIP_NOPS (exp);
5363 switch (TREE_CODE (exp))
5365 case PARM_DECL:
5366 case VAR_DECL:
5367 return DECL_RTL (exp);
5368 default:
5369 return 0;
5373 #ifdef MAX_INTEGER_COMPUTATION_MODE
5374 void
5375 check_max_integer_computation_mode (exp)
5376 tree exp;
5378 enum tree_code code = TREE_CODE (exp);
5379 enum machine_mode mode;
5381 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5382 if (code == NOP_EXPR
5383 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5384 return;
5386 /* First check the type of the overall operation. We need only look at
5387 unary, binary and relational operations. */
5388 if (TREE_CODE_CLASS (code) == '1'
5389 || TREE_CODE_CLASS (code) == '2'
5390 || TREE_CODE_CLASS (code) == '<')
5392 mode = TYPE_MODE (TREE_TYPE (exp));
5393 if (GET_MODE_CLASS (mode) == MODE_INT
5394 && mode > MAX_INTEGER_COMPUTATION_MODE)
5395 fatal ("unsupported wide integer operation");
5398 /* Check operand of a unary op. */
5399 if (TREE_CODE_CLASS (code) == '1')
5401 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5402 if (GET_MODE_CLASS (mode) == MODE_INT
5403 && mode > MAX_INTEGER_COMPUTATION_MODE)
5404 fatal ("unsupported wide integer operation");
5407 /* Check operands of a binary/comparison op. */
5408 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5410 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5411 if (GET_MODE_CLASS (mode) == MODE_INT
5412 && mode > MAX_INTEGER_COMPUTATION_MODE)
5413 fatal ("unsupported wide integer operation");
5415 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5416 if (GET_MODE_CLASS (mode) == MODE_INT
5417 && mode > MAX_INTEGER_COMPUTATION_MODE)
5418 fatal ("unsupported wide integer operation");
5421 #endif
5424 /* expand_expr: generate code for computing expression EXP.
5425 An rtx for the computed value is returned. The value is never null.
5426 In the case of a void EXP, const0_rtx is returned.
5428 The value may be stored in TARGET if TARGET is nonzero.
5429 TARGET is just a suggestion; callers must assume that
5430 the rtx returned may not be the same as TARGET.
5432 If TARGET is CONST0_RTX, it means that the value will be ignored.
5434 If TMODE is not VOIDmode, it suggests generating the
5435 result in mode TMODE. But this is done only when convenient.
5436 Otherwise, TMODE is ignored and the value generated in its natural mode.
5437 TMODE is just a suggestion; callers must assume that
5438 the rtx returned may not have mode TMODE.
5440 Note that TARGET may have neither TMODE nor MODE. In that case, it
5441 probably will not be used.
5443 If MODIFIER is EXPAND_SUM then when EXP is an addition
5444 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5445 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5446 products as above, or REG or MEM, or constant.
5447 Ordinarily in such cases we would output mul or add instructions
5448 and then return a pseudo reg containing the sum.
5450 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5451 it also marks a label as absolutely required (it can't be dead).
5452 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5453 This is used for outputting expressions used in initializers.
5455 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5456 with a constant address even if that address is not normally legitimate.
5457 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5460 expand_expr (exp, target, tmode, modifier)
5461 register tree exp;
5462 rtx target;
5463 enum machine_mode tmode;
5464 enum expand_modifier modifier;
5466 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5467 This is static so it will be accessible to our recursive callees. */
5468 static tree placeholder_list = 0;
5469 register rtx op0, op1, temp;
5470 tree type = TREE_TYPE (exp);
5471 int unsignedp = TREE_UNSIGNED (type);
5472 register enum machine_mode mode = TYPE_MODE (type);
5473 register enum tree_code code = TREE_CODE (exp);
5474 optab this_optab;
5475 /* Use subtarget as the target for operand 0 of a binary operation. */
5476 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5477 rtx original_target = target;
5478 int ignore = (target == const0_rtx
5479 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5480 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5481 || code == COND_EXPR)
5482 && TREE_CODE (type) == VOID_TYPE));
5483 tree context;
5484 /* Used by check-memory-usage to make modifier read only. */
5485 enum expand_modifier ro_modifier;
5487 /* Make a read-only version of the modifier. */
5488 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5489 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5490 ro_modifier = modifier;
5491 else
5492 ro_modifier = EXPAND_NORMAL;
5494 /* Don't use hard regs as subtargets, because the combiner
5495 can only handle pseudo regs. */
5496 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5497 subtarget = 0;
5498 /* Avoid subtargets inside loops,
5499 since they hide some invariant expressions. */
5500 if (preserve_subexpressions_p ())
5501 subtarget = 0;
5503 /* If we are going to ignore this result, we need only do something
5504 if there is a side-effect somewhere in the expression. If there
5505 is, short-circuit the most common cases here. Note that we must
5506 not call expand_expr with anything but const0_rtx in case this
5507 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5509 if (ignore)
5511 if (! TREE_SIDE_EFFECTS (exp))
5512 return const0_rtx;
5514 /* Ensure we reference a volatile object even if value is ignored. */
5515 if (TREE_THIS_VOLATILE (exp)
5516 && TREE_CODE (exp) != FUNCTION_DECL
5517 && mode != VOIDmode && mode != BLKmode)
5519 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5520 if (GET_CODE (temp) == MEM)
5521 temp = copy_to_reg (temp);
5522 return const0_rtx;
5525 if (TREE_CODE_CLASS (code) == '1')
5526 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5527 VOIDmode, ro_modifier);
5528 else if (TREE_CODE_CLASS (code) == '2'
5529 || TREE_CODE_CLASS (code) == '<')
5531 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5532 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5533 return const0_rtx;
5535 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5536 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5537 /* If the second operand has no side effects, just evaluate
5538 the first. */
5539 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5540 VOIDmode, ro_modifier);
5542 target = 0;
5545 #ifdef MAX_INTEGER_COMPUTATION_MODE
5546 if (target
5547 && TREE_CODE (exp) != INTEGER_CST
5548 && TREE_CODE (exp) != PARM_DECL
5549 && TREE_CODE (exp) != ARRAY_REF
5550 && TREE_CODE (exp) != COMPONENT_REF
5551 && TREE_CODE (exp) != BIT_FIELD_REF
5552 && TREE_CODE (exp) != INDIRECT_REF
5553 && TREE_CODE (exp) != VAR_DECL)
5555 enum machine_mode mode = GET_MODE (target);
5557 if (GET_MODE_CLASS (mode) == MODE_INT
5558 && mode > MAX_INTEGER_COMPUTATION_MODE)
5559 fatal ("unsupported wide integer operation");
5562 if (TREE_CODE (exp) != INTEGER_CST
5563 && TREE_CODE (exp) != PARM_DECL
5564 && TREE_CODE (exp) != ARRAY_REF
5565 && TREE_CODE (exp) != COMPONENT_REF
5566 && TREE_CODE (exp) != BIT_FIELD_REF
5567 && TREE_CODE (exp) != INDIRECT_REF
5568 && TREE_CODE (exp) != VAR_DECL
5569 && GET_MODE_CLASS (tmode) == MODE_INT
5570 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5571 fatal ("unsupported wide integer operation");
5573 check_max_integer_computation_mode (exp);
5574 #endif
5576 /* If will do cse, generate all results into pseudo registers
5577 since 1) that allows cse to find more things
5578 and 2) otherwise cse could produce an insn the machine
5579 cannot support. */
5581 if (! cse_not_expected && mode != BLKmode && target
5582 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5583 target = subtarget;
5585 switch (code)
5587 case LABEL_DECL:
5589 tree function = decl_function_context (exp);
5590 /* Handle using a label in a containing function. */
5591 if (function != current_function_decl
5592 && function != inline_function_decl && function != 0)
5594 struct function *p = find_function_data (function);
5595 /* Allocate in the memory associated with the function
5596 that the label is in. */
5597 push_obstacks (p->function_obstack,
5598 p->function_maybepermanent_obstack);
5600 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5601 label_rtx (exp),
5602 p->forced_labels);
5603 pop_obstacks ();
5605 else if (modifier == EXPAND_INITIALIZER)
5606 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5607 label_rtx (exp), forced_labels);
5608 temp = gen_rtx_MEM (FUNCTION_MODE,
5609 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5610 if (function != current_function_decl
5611 && function != inline_function_decl && function != 0)
5612 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5613 return temp;
5616 case PARM_DECL:
5617 if (DECL_RTL (exp) == 0)
5619 error_with_decl (exp, "prior parameter's size depends on `%s'");
5620 return CONST0_RTX (mode);
5623 /* ... fall through ... */
5625 case VAR_DECL:
5626 /* If a static var's type was incomplete when the decl was written,
5627 but the type is complete now, lay out the decl now. */
5628 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5629 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5631 push_obstacks_nochange ();
5632 end_temporary_allocation ();
5633 layout_decl (exp, 0);
5634 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5635 pop_obstacks ();
5638 /* Although static-storage variables start off initialized, according to
5639 ANSI C, a memcpy could overwrite them with uninitialized values. So
5640 we check them too. This also lets us check for read-only variables
5641 accessed via a non-const declaration, in case it won't be detected
5642 any other way (e.g., in an embedded system or OS kernel without
5643 memory protection).
5645 Aggregates are not checked here; they're handled elsewhere. */
5646 if (current_function_check_memory_usage && code == VAR_DECL
5647 && GET_CODE (DECL_RTL (exp)) == MEM
5648 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5650 enum memory_use_mode memory_usage;
5651 memory_usage = get_memory_usage_from_modifier (modifier);
5653 if (memory_usage != MEMORY_USE_DONT)
5654 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5655 XEXP (DECL_RTL (exp), 0), ptr_mode,
5656 GEN_INT (int_size_in_bytes (type)),
5657 TYPE_MODE (sizetype),
5658 GEN_INT (memory_usage),
5659 TYPE_MODE (integer_type_node));
5662 /* ... fall through ... */
5664 case FUNCTION_DECL:
5665 case RESULT_DECL:
5666 if (DECL_RTL (exp) == 0)
5667 abort ();
5669 /* Ensure variable marked as used even if it doesn't go through
5670 a parser. If it hasn't be used yet, write out an external
5671 definition. */
5672 if (! TREE_USED (exp))
5674 assemble_external (exp);
5675 TREE_USED (exp) = 1;
5678 /* Show we haven't gotten RTL for this yet. */
5679 temp = 0;
5681 /* Handle variables inherited from containing functions. */
5682 context = decl_function_context (exp);
5684 /* We treat inline_function_decl as an alias for the current function
5685 because that is the inline function whose vars, types, etc.
5686 are being merged into the current function.
5687 See expand_inline_function. */
5689 if (context != 0 && context != current_function_decl
5690 && context != inline_function_decl
5691 /* If var is static, we don't need a static chain to access it. */
5692 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5693 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5695 rtx addr;
5697 /* Mark as non-local and addressable. */
5698 DECL_NONLOCAL (exp) = 1;
5699 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5700 abort ();
5701 mark_addressable (exp);
5702 if (GET_CODE (DECL_RTL (exp)) != MEM)
5703 abort ();
5704 addr = XEXP (DECL_RTL (exp), 0);
5705 if (GET_CODE (addr) == MEM)
5706 addr = gen_rtx_MEM (Pmode,
5707 fix_lexical_addr (XEXP (addr, 0), exp));
5708 else
5709 addr = fix_lexical_addr (addr, exp);
5710 temp = change_address (DECL_RTL (exp), mode, addr);
5713 /* This is the case of an array whose size is to be determined
5714 from its initializer, while the initializer is still being parsed.
5715 See expand_decl. */
5717 else if (GET_CODE (DECL_RTL (exp)) == MEM
5718 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5719 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5720 XEXP (DECL_RTL (exp), 0));
5722 /* If DECL_RTL is memory, we are in the normal case and either
5723 the address is not valid or it is not a register and -fforce-addr
5724 is specified, get the address into a register. */
5726 else if (GET_CODE (DECL_RTL (exp)) == MEM
5727 && modifier != EXPAND_CONST_ADDRESS
5728 && modifier != EXPAND_SUM
5729 && modifier != EXPAND_INITIALIZER
5730 && (! memory_address_p (DECL_MODE (exp),
5731 XEXP (DECL_RTL (exp), 0))
5732 || (flag_force_addr
5733 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5734 temp = change_address (DECL_RTL (exp), VOIDmode,
5735 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5737 /* If we got something, return it. But first, set the alignment
5738 the address is a register. */
5739 if (temp != 0)
5741 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5742 mark_reg_pointer (XEXP (temp, 0),
5743 DECL_ALIGN (exp) / BITS_PER_UNIT);
5745 return temp;
5748 /* If the mode of DECL_RTL does not match that of the decl, it
5749 must be a promoted value. We return a SUBREG of the wanted mode,
5750 but mark it so that we know that it was already extended. */
5752 if (GET_CODE (DECL_RTL (exp)) == REG
5753 && GET_MODE (DECL_RTL (exp)) != mode)
5755 /* Get the signedness used for this variable. Ensure we get the
5756 same mode we got when the variable was declared. */
5757 if (GET_MODE (DECL_RTL (exp))
5758 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5759 abort ();
5761 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5762 SUBREG_PROMOTED_VAR_P (temp) = 1;
5763 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5764 return temp;
5767 return DECL_RTL (exp);
5769 case INTEGER_CST:
5770 return immed_double_const (TREE_INT_CST_LOW (exp),
5771 TREE_INT_CST_HIGH (exp),
5772 mode);
5774 case CONST_DECL:
5775 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5776 EXPAND_MEMORY_USE_BAD);
5778 case REAL_CST:
5779 /* If optimized, generate immediate CONST_DOUBLE
5780 which will be turned into memory by reload if necessary.
5782 We used to force a register so that loop.c could see it. But
5783 this does not allow gen_* patterns to perform optimizations with
5784 the constants. It also produces two insns in cases like "x = 1.0;".
5785 On most machines, floating-point constants are not permitted in
5786 many insns, so we'd end up copying it to a register in any case.
5788 Now, we do the copying in expand_binop, if appropriate. */
5789 return immed_real_const (exp);
5791 case COMPLEX_CST:
5792 case STRING_CST:
5793 if (! TREE_CST_RTL (exp))
5794 output_constant_def (exp);
5796 /* TREE_CST_RTL probably contains a constant address.
5797 On RISC machines where a constant address isn't valid,
5798 make some insns to get that address into a register. */
5799 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5800 && modifier != EXPAND_CONST_ADDRESS
5801 && modifier != EXPAND_INITIALIZER
5802 && modifier != EXPAND_SUM
5803 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5804 || (flag_force_addr
5805 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5806 return change_address (TREE_CST_RTL (exp), VOIDmode,
5807 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5808 return TREE_CST_RTL (exp);
5810 case EXPR_WITH_FILE_LOCATION:
5812 rtx to_return;
5813 char *saved_input_filename = input_filename;
5814 int saved_lineno = lineno;
5815 input_filename = EXPR_WFL_FILENAME (exp);
5816 lineno = EXPR_WFL_LINENO (exp);
5817 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5818 emit_line_note (input_filename, lineno);
5819 /* Possibly avoid switching back and force here */
5820 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5821 input_filename = saved_input_filename;
5822 lineno = saved_lineno;
5823 return to_return;
5826 case SAVE_EXPR:
5827 context = decl_function_context (exp);
5829 /* If this SAVE_EXPR was at global context, assume we are an
5830 initialization function and move it into our context. */
5831 if (context == 0)
5832 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5834 /* We treat inline_function_decl as an alias for the current function
5835 because that is the inline function whose vars, types, etc.
5836 are being merged into the current function.
5837 See expand_inline_function. */
5838 if (context == current_function_decl || context == inline_function_decl)
5839 context = 0;
5841 /* If this is non-local, handle it. */
5842 if (context)
5844 /* The following call just exists to abort if the context is
5845 not of a containing function. */
5846 find_function_data (context);
5848 temp = SAVE_EXPR_RTL (exp);
5849 if (temp && GET_CODE (temp) == REG)
5851 put_var_into_stack (exp);
5852 temp = SAVE_EXPR_RTL (exp);
5854 if (temp == 0 || GET_CODE (temp) != MEM)
5855 abort ();
5856 return change_address (temp, mode,
5857 fix_lexical_addr (XEXP (temp, 0), exp));
5859 if (SAVE_EXPR_RTL (exp) == 0)
5861 if (mode == VOIDmode)
5862 temp = const0_rtx;
5863 else
5864 temp = assign_temp (type, 3, 0, 0);
5866 SAVE_EXPR_RTL (exp) = temp;
5867 if (!optimize && GET_CODE (temp) == REG)
5868 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5869 save_expr_regs);
5871 /* If the mode of TEMP does not match that of the expression, it
5872 must be a promoted value. We pass store_expr a SUBREG of the
5873 wanted mode but mark it so that we know that it was already
5874 extended. Note that `unsignedp' was modified above in
5875 this case. */
5877 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5879 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5880 SUBREG_PROMOTED_VAR_P (temp) = 1;
5881 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5884 if (temp == const0_rtx)
5885 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5886 EXPAND_MEMORY_USE_BAD);
5887 else
5888 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5890 TREE_USED (exp) = 1;
5893 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5894 must be a promoted value. We return a SUBREG of the wanted mode,
5895 but mark it so that we know that it was already extended. */
5897 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5898 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5900 /* Compute the signedness and make the proper SUBREG. */
5901 promote_mode (type, mode, &unsignedp, 0);
5902 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5903 SUBREG_PROMOTED_VAR_P (temp) = 1;
5904 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5905 return temp;
5908 return SAVE_EXPR_RTL (exp);
5910 case UNSAVE_EXPR:
5912 rtx temp;
5913 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5914 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5915 return temp;
5918 case PLACEHOLDER_EXPR:
5920 tree placeholder_expr;
5922 /* If there is an object on the head of the placeholder list,
5923 see if some object in it of type TYPE or a pointer to it. For
5924 further information, see tree.def. */
5925 for (placeholder_expr = placeholder_list;
5926 placeholder_expr != 0;
5927 placeholder_expr = TREE_CHAIN (placeholder_expr))
5929 tree need_type = TYPE_MAIN_VARIANT (type);
5930 tree object = 0;
5931 tree old_list = placeholder_list;
5932 tree elt;
5934 /* Find the outermost reference that is of the type we want.
5935 If none, see if any object has a type that is a pointer to
5936 the type we want. */
5937 for (elt = TREE_PURPOSE (placeholder_expr);
5938 elt != 0 && object == 0;
5940 = ((TREE_CODE (elt) == COMPOUND_EXPR
5941 || TREE_CODE (elt) == COND_EXPR)
5942 ? TREE_OPERAND (elt, 1)
5943 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5944 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5945 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5946 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5947 ? TREE_OPERAND (elt, 0) : 0))
5948 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5949 object = elt;
5951 for (elt = TREE_PURPOSE (placeholder_expr);
5952 elt != 0 && object == 0;
5954 = ((TREE_CODE (elt) == COMPOUND_EXPR
5955 || TREE_CODE (elt) == COND_EXPR)
5956 ? TREE_OPERAND (elt, 1)
5957 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5958 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5959 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5960 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5961 ? TREE_OPERAND (elt, 0) : 0))
5962 if (POINTER_TYPE_P (TREE_TYPE (elt))
5963 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5964 == need_type))
5965 object = build1 (INDIRECT_REF, need_type, elt);
5967 if (object != 0)
5969 /* Expand this object skipping the list entries before
5970 it was found in case it is also a PLACEHOLDER_EXPR.
5971 In that case, we want to translate it using subsequent
5972 entries. */
5973 placeholder_list = TREE_CHAIN (placeholder_expr);
5974 temp = expand_expr (object, original_target, tmode,
5975 ro_modifier);
5976 placeholder_list = old_list;
5977 return temp;
5982 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5983 abort ();
5985 case WITH_RECORD_EXPR:
5986 /* Put the object on the placeholder list, expand our first operand,
5987 and pop the list. */
5988 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5989 placeholder_list);
5990 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5991 tmode, ro_modifier);
5992 placeholder_list = TREE_CHAIN (placeholder_list);
5993 return target;
5995 case GOTO_EXPR:
5996 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
5997 expand_goto (TREE_OPERAND (exp, 0));
5998 else
5999 expand_computed_goto (TREE_OPERAND (exp, 0));
6000 return const0_rtx;
6002 case EXIT_EXPR:
6003 expand_exit_loop_if_false (NULL_PTR,
6004 invert_truthvalue (TREE_OPERAND (exp, 0)));
6005 return const0_rtx;
6007 case LABELED_BLOCK_EXPR:
6008 if (LABELED_BLOCK_BODY (exp))
6009 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6010 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6011 return const0_rtx;
6013 case EXIT_BLOCK_EXPR:
6014 if (EXIT_BLOCK_RETURN (exp))
6015 really_sorry ("returned value in block_exit_expr");
6016 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6017 return const0_rtx;
6019 case LOOP_EXPR:
6020 push_temp_slots ();
6021 expand_start_loop (1);
6022 expand_expr_stmt (TREE_OPERAND (exp, 0));
6023 expand_end_loop ();
6024 pop_temp_slots ();
6026 return const0_rtx;
6028 case BIND_EXPR:
6030 tree vars = TREE_OPERAND (exp, 0);
6031 int vars_need_expansion = 0;
6033 /* Need to open a binding contour here because
6034 if there are any cleanups they must be contained here. */
6035 expand_start_bindings (0);
6037 /* Mark the corresponding BLOCK for output in its proper place. */
6038 if (TREE_OPERAND (exp, 2) != 0
6039 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6040 insert_block (TREE_OPERAND (exp, 2));
6042 /* If VARS have not yet been expanded, expand them now. */
6043 while (vars)
6045 if (DECL_RTL (vars) == 0)
6047 vars_need_expansion = 1;
6048 expand_decl (vars);
6050 expand_decl_init (vars);
6051 vars = TREE_CHAIN (vars);
6054 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6056 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6058 return temp;
6061 case RTL_EXPR:
6062 if (RTL_EXPR_SEQUENCE (exp))
6064 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6065 abort ();
6066 emit_insns (RTL_EXPR_SEQUENCE (exp));
6067 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6069 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6070 free_temps_for_rtl_expr (exp);
6071 return RTL_EXPR_RTL (exp);
6073 case CONSTRUCTOR:
6074 /* If we don't need the result, just ensure we evaluate any
6075 subexpressions. */
6076 if (ignore)
6078 tree elt;
6079 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6080 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6081 EXPAND_MEMORY_USE_BAD);
6082 return const0_rtx;
6085 /* All elts simple constants => refer to a constant in memory. But
6086 if this is a non-BLKmode mode, let it store a field at a time
6087 since that should make a CONST_INT or CONST_DOUBLE when we
6088 fold. Likewise, if we have a target we can use, it is best to
6089 store directly into the target unless the type is large enough
6090 that memcpy will be used. If we are making an initializer and
6091 all operands are constant, put it in memory as well. */
6092 else if ((TREE_STATIC (exp)
6093 && ((mode == BLKmode
6094 && ! (target != 0 && safe_from_p (target, exp, 1)))
6095 || TREE_ADDRESSABLE (exp)
6096 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6097 && (!MOVE_BY_PIECES_P
6098 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6099 TYPE_ALIGN (type) / BITS_PER_UNIT))
6100 && ! mostly_zeros_p (exp))))
6101 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6103 rtx constructor = output_constant_def (exp);
6104 if (modifier != EXPAND_CONST_ADDRESS
6105 && modifier != EXPAND_INITIALIZER
6106 && modifier != EXPAND_SUM
6107 && (! memory_address_p (GET_MODE (constructor),
6108 XEXP (constructor, 0))
6109 || (flag_force_addr
6110 && GET_CODE (XEXP (constructor, 0)) != REG)))
6111 constructor = change_address (constructor, VOIDmode,
6112 XEXP (constructor, 0));
6113 return constructor;
6116 else
6118 /* Handle calls that pass values in multiple non-contiguous
6119 locations. The Irix 6 ABI has examples of this. */
6120 if (target == 0 || ! safe_from_p (target, exp, 1)
6121 || GET_CODE (target) == PARALLEL)
6123 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6124 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6125 else
6126 target = assign_temp (type, 0, 1, 1);
6129 if (TREE_READONLY (exp))
6131 if (GET_CODE (target) == MEM)
6132 target = copy_rtx (target);
6134 RTX_UNCHANGING_P (target) = 1;
6137 store_constructor (exp, target, 0);
6138 return target;
6141 case INDIRECT_REF:
6143 tree exp1 = TREE_OPERAND (exp, 0);
6144 tree exp2;
6145 tree index;
6146 tree string = string_constant (exp1, &index);
6147 int i;
6149 /* Try to optimize reads from const strings. */
6150 if (string
6151 && TREE_CODE (string) == STRING_CST
6152 && TREE_CODE (index) == INTEGER_CST
6153 && !TREE_INT_CST_HIGH (index)
6154 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6155 && GET_MODE_CLASS (mode) == MODE_INT
6156 && GET_MODE_SIZE (mode) == 1
6157 && modifier != EXPAND_MEMORY_USE_WO)
6158 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6160 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6161 op0 = memory_address (mode, op0);
6163 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6165 enum memory_use_mode memory_usage;
6166 memory_usage = get_memory_usage_from_modifier (modifier);
6168 if (memory_usage != MEMORY_USE_DONT)
6170 in_check_memory_usage = 1;
6171 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6172 op0, ptr_mode,
6173 GEN_INT (int_size_in_bytes (type)),
6174 TYPE_MODE (sizetype),
6175 GEN_INT (memory_usage),
6176 TYPE_MODE (integer_type_node));
6177 in_check_memory_usage = 0;
6181 temp = gen_rtx_MEM (mode, op0);
6182 /* If address was computed by addition,
6183 mark this as an element of an aggregate. */
6184 if (TREE_CODE (exp1) == PLUS_EXPR
6185 || (TREE_CODE (exp1) == SAVE_EXPR
6186 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6187 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6188 || (TREE_CODE (exp1) == ADDR_EXPR
6189 && (exp2 = TREE_OPERAND (exp1, 0))
6190 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6191 MEM_SET_IN_STRUCT_P (temp, 1);
6193 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6194 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6196 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6197 here, because, in C and C++, the fact that a location is accessed
6198 through a pointer to const does not mean that the value there can
6199 never change. Languages where it can never change should
6200 also set TREE_STATIC. */
6201 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6202 return temp;
6205 case ARRAY_REF:
6206 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6207 abort ();
6210 tree array = TREE_OPERAND (exp, 0);
6211 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6212 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6213 tree index = TREE_OPERAND (exp, 1);
6214 tree index_type = TREE_TYPE (index);
6215 HOST_WIDE_INT i;
6217 /* Optimize the special-case of a zero lower bound.
6219 We convert the low_bound to sizetype to avoid some problems
6220 with constant folding. (E.g. suppose the lower bound is 1,
6221 and its mode is QI. Without the conversion, (ARRAY
6222 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6223 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6225 But sizetype isn't quite right either (especially if
6226 the lowbound is negative). FIXME */
6228 if (! integer_zerop (low_bound))
6229 index = fold (build (MINUS_EXPR, index_type, index,
6230 convert (sizetype, low_bound)));
6232 /* Fold an expression like: "foo"[2].
6233 This is not done in fold so it won't happen inside &.
6234 Don't fold if this is for wide characters since it's too
6235 difficult to do correctly and this is a very rare case. */
6237 if (TREE_CODE (array) == STRING_CST
6238 && TREE_CODE (index) == INTEGER_CST
6239 && !TREE_INT_CST_HIGH (index)
6240 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6241 && GET_MODE_CLASS (mode) == MODE_INT
6242 && GET_MODE_SIZE (mode) == 1)
6243 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6245 /* If this is a constant index into a constant array,
6246 just get the value from the array. Handle both the cases when
6247 we have an explicit constructor and when our operand is a variable
6248 that was declared const. */
6250 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6252 if (TREE_CODE (index) == INTEGER_CST
6253 && TREE_INT_CST_HIGH (index) == 0)
6255 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6257 i = TREE_INT_CST_LOW (index);
6258 while (elem && i--)
6259 elem = TREE_CHAIN (elem);
6260 if (elem)
6261 return expand_expr (fold (TREE_VALUE (elem)), target,
6262 tmode, ro_modifier);
6266 else if (optimize >= 1
6267 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6268 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6269 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6271 if (TREE_CODE (index) == INTEGER_CST)
6273 tree init = DECL_INITIAL (array);
6275 i = TREE_INT_CST_LOW (index);
6276 if (TREE_CODE (init) == CONSTRUCTOR)
6278 tree elem = CONSTRUCTOR_ELTS (init);
6280 while (elem
6281 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6282 elem = TREE_CHAIN (elem);
6283 if (elem)
6284 return expand_expr (fold (TREE_VALUE (elem)), target,
6285 tmode, ro_modifier);
6287 else if (TREE_CODE (init) == STRING_CST
6288 && TREE_INT_CST_HIGH (index) == 0
6289 && (TREE_INT_CST_LOW (index)
6290 < TREE_STRING_LENGTH (init)))
6291 return (GEN_INT
6292 (TREE_STRING_POINTER
6293 (init)[TREE_INT_CST_LOW (index)]));
6298 /* ... fall through ... */
6300 case COMPONENT_REF:
6301 case BIT_FIELD_REF:
6302 /* If the operand is a CONSTRUCTOR, we can just extract the
6303 appropriate field if it is present. Don't do this if we have
6304 already written the data since we want to refer to that copy
6305 and varasm.c assumes that's what we'll do. */
6306 if (code != ARRAY_REF
6307 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6308 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6310 tree elt;
6312 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6313 elt = TREE_CHAIN (elt))
6314 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6315 /* We can normally use the value of the field in the
6316 CONSTRUCTOR. However, if this is a bitfield in
6317 an integral mode that we can fit in a HOST_WIDE_INT,
6318 we must mask only the number of bits in the bitfield,
6319 since this is done implicitly by the constructor. If
6320 the bitfield does not meet either of those conditions,
6321 we can't do this optimization. */
6322 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6323 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6324 == MODE_INT)
6325 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6326 <= HOST_BITS_PER_WIDE_INT))))
6328 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6329 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6331 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6333 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6335 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6336 op0 = expand_and (op0, op1, target);
6338 else
6340 enum machine_mode imode
6341 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6342 tree count
6343 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6346 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6347 target, 0);
6348 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6349 target, 0);
6353 return op0;
6358 enum machine_mode mode1;
6359 int bitsize;
6360 int bitpos;
6361 tree offset;
6362 int volatilep = 0;
6363 int alignment;
6364 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6365 &mode1, &unsignedp, &volatilep,
6366 &alignment);
6368 /* If we got back the original object, something is wrong. Perhaps
6369 we are evaluating an expression too early. In any event, don't
6370 infinitely recurse. */
6371 if (tem == exp)
6372 abort ();
6374 /* If TEM's type is a union of variable size, pass TARGET to the inner
6375 computation, since it will need a temporary and TARGET is known
6376 to have to do. This occurs in unchecked conversion in Ada. */
6378 op0 = expand_expr (tem,
6379 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6380 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6381 != INTEGER_CST)
6382 ? target : NULL_RTX),
6383 VOIDmode,
6384 modifier == EXPAND_INITIALIZER
6385 ? modifier : EXPAND_NORMAL);
6387 /* If this is a constant, put it into a register if it is a
6388 legitimate constant and memory if it isn't. */
6389 if (CONSTANT_P (op0))
6391 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6392 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6393 op0 = force_reg (mode, op0);
6394 else
6395 op0 = validize_mem (force_const_mem (mode, op0));
6398 if (offset != 0)
6400 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6402 if (GET_CODE (op0) != MEM)
6403 abort ();
6405 if (GET_MODE (offset_rtx) != ptr_mode)
6407 #ifdef POINTERS_EXTEND_UNSIGNED
6408 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6409 #else
6410 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6411 #endif
6414 if (GET_CODE (op0) == MEM
6415 && GET_MODE (op0) == BLKmode
6416 && bitsize
6417 && (bitpos % bitsize) == 0
6418 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6419 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6421 rtx temp = change_address (op0, mode1,
6422 plus_constant (XEXP (op0, 0),
6423 (bitpos /
6424 BITS_PER_UNIT)));
6425 if (GET_CODE (XEXP (temp, 0)) == REG)
6426 op0 = temp;
6427 else
6428 op0 = change_address (op0, mode1,
6429 force_reg (GET_MODE (XEXP (temp, 0)),
6430 XEXP (temp, 0)));
6431 bitpos = 0;
6435 op0 = change_address (op0, VOIDmode,
6436 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6437 force_reg (ptr_mode, offset_rtx)));
6440 /* Don't forget about volatility even if this is a bitfield. */
6441 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6443 op0 = copy_rtx (op0);
6444 MEM_VOLATILE_P (op0) = 1;
6447 /* Check the access. */
6448 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6450 enum memory_use_mode memory_usage;
6451 memory_usage = get_memory_usage_from_modifier (modifier);
6453 if (memory_usage != MEMORY_USE_DONT)
6455 rtx to;
6456 int size;
6458 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6459 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6461 /* Check the access right of the pointer. */
6462 if (size > BITS_PER_UNIT)
6463 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6464 to, ptr_mode,
6465 GEN_INT (size / BITS_PER_UNIT),
6466 TYPE_MODE (sizetype),
6467 GEN_INT (memory_usage),
6468 TYPE_MODE (integer_type_node));
6472 /* In cases where an aligned union has an unaligned object
6473 as a field, we might be extracting a BLKmode value from
6474 an integer-mode (e.g., SImode) object. Handle this case
6475 by doing the extract into an object as wide as the field
6476 (which we know to be the width of a basic mode), then
6477 storing into memory, and changing the mode to BLKmode.
6478 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6479 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6480 if (mode1 == VOIDmode
6481 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6482 || (modifier != EXPAND_CONST_ADDRESS
6483 && modifier != EXPAND_INITIALIZER
6484 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6485 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6486 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6487 /* If the field isn't aligned enough to fetch as a memref,
6488 fetch it as a bit field. */
6489 || (SLOW_UNALIGNED_ACCESS
6490 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6491 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6493 enum machine_mode ext_mode = mode;
6495 if (ext_mode == BLKmode)
6496 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6498 if (ext_mode == BLKmode)
6500 /* In this case, BITPOS must start at a byte boundary and
6501 TARGET, if specified, must be a MEM. */
6502 if (GET_CODE (op0) != MEM
6503 || (target != 0 && GET_CODE (target) != MEM)
6504 || bitpos % BITS_PER_UNIT != 0)
6505 abort ();
6507 op0 = change_address (op0, VOIDmode,
6508 plus_constant (XEXP (op0, 0),
6509 bitpos / BITS_PER_UNIT));
6510 if (target == 0)
6511 target = assign_temp (type, 0, 1, 1);
6513 emit_block_move (target, op0,
6514 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6515 / BITS_PER_UNIT),
6518 return target;
6521 op0 = validize_mem (op0);
6523 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6524 mark_reg_pointer (XEXP (op0, 0), alignment);
6526 op0 = extract_bit_field (op0, bitsize, bitpos,
6527 unsignedp, target, ext_mode, ext_mode,
6528 alignment,
6529 int_size_in_bytes (TREE_TYPE (tem)));
6531 /* If the result is a record type and BITSIZE is narrower than
6532 the mode of OP0, an integral mode, and this is a big endian
6533 machine, we must put the field into the high-order bits. */
6534 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6535 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6536 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6537 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6538 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6539 - bitsize),
6540 op0, 1);
6542 if (mode == BLKmode)
6544 rtx new = assign_stack_temp (ext_mode,
6545 bitsize / BITS_PER_UNIT, 0);
6547 emit_move_insn (new, op0);
6548 op0 = copy_rtx (new);
6549 PUT_MODE (op0, BLKmode);
6550 MEM_SET_IN_STRUCT_P (op0, 1);
6553 return op0;
6556 /* If the result is BLKmode, use that to access the object
6557 now as well. */
6558 if (mode == BLKmode)
6559 mode1 = BLKmode;
6561 /* Get a reference to just this component. */
6562 if (modifier == EXPAND_CONST_ADDRESS
6563 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6564 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6565 (bitpos / BITS_PER_UNIT)));
6566 else
6567 op0 = change_address (op0, mode1,
6568 plus_constant (XEXP (op0, 0),
6569 (bitpos / BITS_PER_UNIT)));
6571 if (GET_CODE (op0) == MEM)
6572 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6574 if (GET_CODE (XEXP (op0, 0)) == REG)
6575 mark_reg_pointer (XEXP (op0, 0), alignment);
6577 MEM_SET_IN_STRUCT_P (op0, 1);
6578 MEM_VOLATILE_P (op0) |= volatilep;
6579 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6580 || modifier == EXPAND_CONST_ADDRESS
6581 || modifier == EXPAND_INITIALIZER)
6582 return op0;
6583 else if (target == 0)
6584 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6586 convert_move (target, op0, unsignedp);
6587 return target;
6590 /* Intended for a reference to a buffer of a file-object in Pascal.
6591 But it's not certain that a special tree code will really be
6592 necessary for these. INDIRECT_REF might work for them. */
6593 case BUFFER_REF:
6594 abort ();
6596 case IN_EXPR:
6598 /* Pascal set IN expression.
6600 Algorithm:
6601 rlo = set_low - (set_low%bits_per_word);
6602 the_word = set [ (index - rlo)/bits_per_word ];
6603 bit_index = index % bits_per_word;
6604 bitmask = 1 << bit_index;
6605 return !!(the_word & bitmask); */
6607 tree set = TREE_OPERAND (exp, 0);
6608 tree index = TREE_OPERAND (exp, 1);
6609 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6610 tree set_type = TREE_TYPE (set);
6611 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6612 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6613 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6614 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6615 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6616 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6617 rtx setaddr = XEXP (setval, 0);
6618 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6619 rtx rlow;
6620 rtx diff, quo, rem, addr, bit, result;
6622 preexpand_calls (exp);
6624 /* If domain is empty, answer is no. Likewise if index is constant
6625 and out of bounds. */
6626 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6627 && TREE_CODE (set_low_bound) == INTEGER_CST
6628 && tree_int_cst_lt (set_high_bound, set_low_bound))
6629 || (TREE_CODE (index) == INTEGER_CST
6630 && TREE_CODE (set_low_bound) == INTEGER_CST
6631 && tree_int_cst_lt (index, set_low_bound))
6632 || (TREE_CODE (set_high_bound) == INTEGER_CST
6633 && TREE_CODE (index) == INTEGER_CST
6634 && tree_int_cst_lt (set_high_bound, index))))
6635 return const0_rtx;
6637 if (target == 0)
6638 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6640 /* If we get here, we have to generate the code for both cases
6641 (in range and out of range). */
6643 op0 = gen_label_rtx ();
6644 op1 = gen_label_rtx ();
6646 if (! (GET_CODE (index_val) == CONST_INT
6647 && GET_CODE (lo_r) == CONST_INT))
6649 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6650 GET_MODE (index_val), iunsignedp, 0);
6651 emit_jump_insn (gen_blt (op1));
6654 if (! (GET_CODE (index_val) == CONST_INT
6655 && GET_CODE (hi_r) == CONST_INT))
6657 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6658 GET_MODE (index_val), iunsignedp, 0);
6659 emit_jump_insn (gen_bgt (op1));
6662 /* Calculate the element number of bit zero in the first word
6663 of the set. */
6664 if (GET_CODE (lo_r) == CONST_INT)
6665 rlow = GEN_INT (INTVAL (lo_r)
6666 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6667 else
6668 rlow = expand_binop (index_mode, and_optab, lo_r,
6669 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6670 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6672 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6673 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6675 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6676 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6677 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6678 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6680 addr = memory_address (byte_mode,
6681 expand_binop (index_mode, add_optab, diff,
6682 setaddr, NULL_RTX, iunsignedp,
6683 OPTAB_LIB_WIDEN));
6685 /* Extract the bit we want to examine */
6686 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6687 gen_rtx_MEM (byte_mode, addr),
6688 make_tree (TREE_TYPE (index), rem),
6689 NULL_RTX, 1);
6690 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6691 GET_MODE (target) == byte_mode ? target : 0,
6692 1, OPTAB_LIB_WIDEN);
6694 if (result != target)
6695 convert_move (target, result, 1);
6697 /* Output the code to handle the out-of-range case. */
6698 emit_jump (op0);
6699 emit_label (op1);
6700 emit_move_insn (target, const0_rtx);
6701 emit_label (op0);
6702 return target;
6705 case WITH_CLEANUP_EXPR:
6706 if (RTL_EXPR_RTL (exp) == 0)
6708 RTL_EXPR_RTL (exp)
6709 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6710 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6712 /* That's it for this cleanup. */
6713 TREE_OPERAND (exp, 2) = 0;
6715 return RTL_EXPR_RTL (exp);
6717 case CLEANUP_POINT_EXPR:
6719 extern int temp_slot_level;
6720 /* Start a new binding layer that will keep track of all cleanup
6721 actions to be performed. */
6722 expand_start_bindings (0);
6724 target_temp_slot_level = temp_slot_level;
6726 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6727 /* If we're going to use this value, load it up now. */
6728 if (! ignore)
6729 op0 = force_not_mem (op0);
6730 preserve_temp_slots (op0);
6731 expand_end_bindings (NULL_TREE, 0, 0);
6733 return op0;
6735 case CALL_EXPR:
6736 /* Check for a built-in function. */
6737 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6738 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6739 == FUNCTION_DECL)
6740 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6741 return expand_builtin (exp, target, subtarget, tmode, ignore);
6743 /* If this call was expanded already by preexpand_calls,
6744 just return the result we got. */
6745 if (CALL_EXPR_RTL (exp) != 0)
6746 return CALL_EXPR_RTL (exp);
6748 return expand_call (exp, target, ignore);
6750 case NON_LVALUE_EXPR:
6751 case NOP_EXPR:
6752 case CONVERT_EXPR:
6753 case REFERENCE_EXPR:
6754 if (TREE_CODE (type) == UNION_TYPE)
6756 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6757 if (target == 0)
6759 if (mode != BLKmode)
6760 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6761 else
6762 target = assign_temp (type, 0, 1, 1);
6765 if (GET_CODE (target) == MEM)
6766 /* Store data into beginning of memory target. */
6767 store_expr (TREE_OPERAND (exp, 0),
6768 change_address (target, TYPE_MODE (valtype), 0), 0);
6770 else if (GET_CODE (target) == REG)
6771 /* Store this field into a union of the proper type. */
6772 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6773 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6774 VOIDmode, 0, 1,
6775 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6777 else
6778 abort ();
6780 /* Return the entire union. */
6781 return target;
6784 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6786 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6787 ro_modifier);
6789 /* If the signedness of the conversion differs and OP0 is
6790 a promoted SUBREG, clear that indication since we now
6791 have to do the proper extension. */
6792 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6793 && GET_CODE (op0) == SUBREG)
6794 SUBREG_PROMOTED_VAR_P (op0) = 0;
6796 return op0;
6799 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6800 if (GET_MODE (op0) == mode)
6801 return op0;
6803 /* If OP0 is a constant, just convert it into the proper mode. */
6804 if (CONSTANT_P (op0))
6805 return
6806 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6807 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6809 if (modifier == EXPAND_INITIALIZER)
6810 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6812 if (target == 0)
6813 return
6814 convert_to_mode (mode, op0,
6815 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6816 else
6817 convert_move (target, op0,
6818 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6819 return target;
6821 case PLUS_EXPR:
6822 /* We come here from MINUS_EXPR when the second operand is a
6823 constant. */
6824 plus_expr:
6825 this_optab = add_optab;
6827 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6828 something else, make sure we add the register to the constant and
6829 then to the other thing. This case can occur during strength
6830 reduction and doing it this way will produce better code if the
6831 frame pointer or argument pointer is eliminated.
6833 fold-const.c will ensure that the constant is always in the inner
6834 PLUS_EXPR, so the only case we need to do anything about is if
6835 sp, ap, or fp is our second argument, in which case we must swap
6836 the innermost first argument and our second argument. */
6838 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6839 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6840 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6841 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6842 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6843 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6845 tree t = TREE_OPERAND (exp, 1);
6847 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6848 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6851 /* If the result is to be ptr_mode and we are adding an integer to
6852 something, we might be forming a constant. So try to use
6853 plus_constant. If it produces a sum and we can't accept it,
6854 use force_operand. This allows P = &ARR[const] to generate
6855 efficient code on machines where a SYMBOL_REF is not a valid
6856 address.
6858 If this is an EXPAND_SUM call, always return the sum. */
6859 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6860 || mode == ptr_mode)
6862 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6863 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6864 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6866 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6867 EXPAND_SUM);
6868 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6869 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6870 op1 = force_operand (op1, target);
6871 return op1;
6874 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6875 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6876 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6878 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6879 EXPAND_SUM);
6880 if (! CONSTANT_P (op0))
6882 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6883 VOIDmode, modifier);
6884 /* Don't go to both_summands if modifier
6885 says it's not right to return a PLUS. */
6886 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6887 goto binop2;
6888 goto both_summands;
6890 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6891 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6892 op0 = force_operand (op0, target);
6893 return op0;
6897 /* No sense saving up arithmetic to be done
6898 if it's all in the wrong mode to form part of an address.
6899 And force_operand won't know whether to sign-extend or
6900 zero-extend. */
6901 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6902 || mode != ptr_mode)
6903 goto binop;
6905 preexpand_calls (exp);
6906 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6907 subtarget = 0;
6909 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6910 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6912 both_summands:
6913 /* Make sure any term that's a sum with a constant comes last. */
6914 if (GET_CODE (op0) == PLUS
6915 && CONSTANT_P (XEXP (op0, 1)))
6917 temp = op0;
6918 op0 = op1;
6919 op1 = temp;
6921 /* If adding to a sum including a constant,
6922 associate it to put the constant outside. */
6923 if (GET_CODE (op1) == PLUS
6924 && CONSTANT_P (XEXP (op1, 1)))
6926 rtx constant_term = const0_rtx;
6928 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6929 if (temp != 0)
6930 op0 = temp;
6931 /* Ensure that MULT comes first if there is one. */
6932 else if (GET_CODE (op0) == MULT)
6933 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6934 else
6935 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6937 /* Let's also eliminate constants from op0 if possible. */
6938 op0 = eliminate_constant_term (op0, &constant_term);
6940 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6941 their sum should be a constant. Form it into OP1, since the
6942 result we want will then be OP0 + OP1. */
6944 temp = simplify_binary_operation (PLUS, mode, constant_term,
6945 XEXP (op1, 1));
6946 if (temp != 0)
6947 op1 = temp;
6948 else
6949 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6952 /* Put a constant term last and put a multiplication first. */
6953 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6954 temp = op1, op1 = op0, op0 = temp;
6956 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6957 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6959 case MINUS_EXPR:
6960 /* For initializers, we are allowed to return a MINUS of two
6961 symbolic constants. Here we handle all cases when both operands
6962 are constant. */
6963 /* Handle difference of two symbolic constants,
6964 for the sake of an initializer. */
6965 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6966 && really_constant_p (TREE_OPERAND (exp, 0))
6967 && really_constant_p (TREE_OPERAND (exp, 1)))
6969 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6970 VOIDmode, ro_modifier);
6971 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6972 VOIDmode, ro_modifier);
6974 /* If the last operand is a CONST_INT, use plus_constant of
6975 the negated constant. Else make the MINUS. */
6976 if (GET_CODE (op1) == CONST_INT)
6977 return plus_constant (op0, - INTVAL (op1));
6978 else
6979 return gen_rtx_MINUS (mode, op0, op1);
6981 /* Convert A - const to A + (-const). */
6982 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6984 tree negated = fold (build1 (NEGATE_EXPR, type,
6985 TREE_OPERAND (exp, 1)));
6987 /* Deal with the case where we can't negate the constant
6988 in TYPE. */
6989 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6991 tree newtype = signed_type (type);
6992 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6993 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6994 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6996 if (! TREE_OVERFLOW (newneg))
6997 return expand_expr (convert (type,
6998 build (PLUS_EXPR, newtype,
6999 newop0, newneg)),
7000 target, tmode, ro_modifier);
7002 else
7004 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7005 goto plus_expr;
7008 this_optab = sub_optab;
7009 goto binop;
7011 case MULT_EXPR:
7012 preexpand_calls (exp);
7013 /* If first operand is constant, swap them.
7014 Thus the following special case checks need only
7015 check the second operand. */
7016 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7018 register tree t1 = TREE_OPERAND (exp, 0);
7019 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7020 TREE_OPERAND (exp, 1) = t1;
7023 /* Attempt to return something suitable for generating an
7024 indexed address, for machines that support that. */
7026 if (modifier == EXPAND_SUM && mode == ptr_mode
7027 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7028 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7030 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7031 EXPAND_SUM);
7033 /* Apply distributive law if OP0 is x+c. */
7034 if (GET_CODE (op0) == PLUS
7035 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7036 return gen_rtx_PLUS (mode,
7037 gen_rtx_MULT (mode, XEXP (op0, 0),
7038 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7039 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7040 * INTVAL (XEXP (op0, 1))));
7042 if (GET_CODE (op0) != REG)
7043 op0 = force_operand (op0, NULL_RTX);
7044 if (GET_CODE (op0) != REG)
7045 op0 = copy_to_mode_reg (mode, op0);
7047 return gen_rtx_MULT (mode, op0,
7048 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7051 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7052 subtarget = 0;
7054 /* Check for multiplying things that have been extended
7055 from a narrower type. If this machine supports multiplying
7056 in that narrower type with a result in the desired type,
7057 do it that way, and avoid the explicit type-conversion. */
7058 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7059 && TREE_CODE (type) == INTEGER_TYPE
7060 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7061 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7062 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7063 && int_fits_type_p (TREE_OPERAND (exp, 1),
7064 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7065 /* Don't use a widening multiply if a shift will do. */
7066 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7067 > HOST_BITS_PER_WIDE_INT)
7068 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7070 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7071 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7073 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7074 /* If both operands are extended, they must either both
7075 be zero-extended or both be sign-extended. */
7076 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7078 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7080 enum machine_mode innermode
7081 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7082 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7083 ? smul_widen_optab : umul_widen_optab);
7084 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7085 ? umul_widen_optab : smul_widen_optab);
7086 if (mode == GET_MODE_WIDER_MODE (innermode))
7088 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7090 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7091 NULL_RTX, VOIDmode, 0);
7092 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7093 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7094 VOIDmode, 0);
7095 else
7096 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7097 NULL_RTX, VOIDmode, 0);
7098 goto binop2;
7100 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7101 && innermode == word_mode)
7103 rtx htem;
7104 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7105 NULL_RTX, VOIDmode, 0);
7106 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7107 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7108 VOIDmode, 0);
7109 else
7110 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7111 NULL_RTX, VOIDmode, 0);
7112 temp = expand_binop (mode, other_optab, op0, op1, target,
7113 unsignedp, OPTAB_LIB_WIDEN);
7114 htem = expand_mult_highpart_adjust (innermode,
7115 gen_highpart (innermode, temp),
7116 op0, op1,
7117 gen_highpart (innermode, temp),
7118 unsignedp);
7119 emit_move_insn (gen_highpart (innermode, temp), htem);
7120 return temp;
7124 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7125 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7126 return expand_mult (mode, op0, op1, target, unsignedp);
7128 case TRUNC_DIV_EXPR:
7129 case FLOOR_DIV_EXPR:
7130 case CEIL_DIV_EXPR:
7131 case ROUND_DIV_EXPR:
7132 case EXACT_DIV_EXPR:
7133 preexpand_calls (exp);
7134 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7135 subtarget = 0;
7136 /* Possible optimization: compute the dividend with EXPAND_SUM
7137 then if the divisor is constant can optimize the case
7138 where some terms of the dividend have coeffs divisible by it. */
7139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7140 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7141 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7143 case RDIV_EXPR:
7144 this_optab = flodiv_optab;
7145 goto binop;
7147 case TRUNC_MOD_EXPR:
7148 case FLOOR_MOD_EXPR:
7149 case CEIL_MOD_EXPR:
7150 case ROUND_MOD_EXPR:
7151 preexpand_calls (exp);
7152 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7153 subtarget = 0;
7154 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7155 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7156 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7158 case FIX_ROUND_EXPR:
7159 case FIX_FLOOR_EXPR:
7160 case FIX_CEIL_EXPR:
7161 abort (); /* Not used for C. */
7163 case FIX_TRUNC_EXPR:
7164 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7165 if (target == 0)
7166 target = gen_reg_rtx (mode);
7167 expand_fix (target, op0, unsignedp);
7168 return target;
7170 case FLOAT_EXPR:
7171 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7172 if (target == 0)
7173 target = gen_reg_rtx (mode);
7174 /* expand_float can't figure out what to do if FROM has VOIDmode.
7175 So give it the correct mode. With -O, cse will optimize this. */
7176 if (GET_MODE (op0) == VOIDmode)
7177 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7178 op0);
7179 expand_float (target, op0,
7180 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7181 return target;
7183 case NEGATE_EXPR:
7184 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7185 temp = expand_unop (mode, neg_optab, op0, target, 0);
7186 if (temp == 0)
7187 abort ();
7188 return temp;
7190 case ABS_EXPR:
7191 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7193 /* Handle complex values specially. */
7194 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7195 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7196 return expand_complex_abs (mode, op0, target, unsignedp);
7198 /* Unsigned abs is simply the operand. Testing here means we don't
7199 risk generating incorrect code below. */
7200 if (TREE_UNSIGNED (type))
7201 return op0;
7203 return expand_abs (mode, op0, target, unsignedp,
7204 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7206 case MAX_EXPR:
7207 case MIN_EXPR:
7208 target = original_target;
7209 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7210 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7211 || GET_MODE (target) != mode
7212 || (GET_CODE (target) == REG
7213 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7214 target = gen_reg_rtx (mode);
7215 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7216 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7218 /* First try to do it with a special MIN or MAX instruction.
7219 If that does not win, use a conditional jump to select the proper
7220 value. */
7221 this_optab = (TREE_UNSIGNED (type)
7222 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7223 : (code == MIN_EXPR ? smin_optab : smax_optab));
7225 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7226 OPTAB_WIDEN);
7227 if (temp != 0)
7228 return temp;
7230 /* At this point, a MEM target is no longer useful; we will get better
7231 code without it. */
7233 if (GET_CODE (target) == MEM)
7234 target = gen_reg_rtx (mode);
7236 if (target != op0)
7237 emit_move_insn (target, op0);
7239 op0 = gen_label_rtx ();
7241 /* If this mode is an integer too wide to compare properly,
7242 compare word by word. Rely on cse to optimize constant cases. */
7243 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7245 if (code == MAX_EXPR)
7246 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7247 target, op1, NULL_RTX, op0);
7248 else
7249 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7250 op1, target, NULL_RTX, op0);
7251 emit_move_insn (target, op1);
7253 else
7255 if (code == MAX_EXPR)
7256 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7257 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7258 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7259 else
7260 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7261 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7262 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7263 if (temp == const0_rtx)
7264 emit_move_insn (target, op1);
7265 else if (temp != const_true_rtx)
7267 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7268 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7269 else
7270 abort ();
7271 emit_move_insn (target, op1);
7274 emit_label (op0);
7275 return target;
7277 case BIT_NOT_EXPR:
7278 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7279 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7280 if (temp == 0)
7281 abort ();
7282 return temp;
7284 case FFS_EXPR:
7285 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7286 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7287 if (temp == 0)
7288 abort ();
7289 return temp;
7291 /* ??? Can optimize bitwise operations with one arg constant.
7292 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7293 and (a bitwise1 b) bitwise2 b (etc)
7294 but that is probably not worth while. */
7296 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7297 boolean values when we want in all cases to compute both of them. In
7298 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7299 as actual zero-or-1 values and then bitwise anding. In cases where
7300 there cannot be any side effects, better code would be made by
7301 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7302 how to recognize those cases. */
7304 case TRUTH_AND_EXPR:
7305 case BIT_AND_EXPR:
7306 this_optab = and_optab;
7307 goto binop;
7309 case TRUTH_OR_EXPR:
7310 case BIT_IOR_EXPR:
7311 this_optab = ior_optab;
7312 goto binop;
7314 case TRUTH_XOR_EXPR:
7315 case BIT_XOR_EXPR:
7316 this_optab = xor_optab;
7317 goto binop;
7319 case LSHIFT_EXPR:
7320 case RSHIFT_EXPR:
7321 case LROTATE_EXPR:
7322 case RROTATE_EXPR:
7323 preexpand_calls (exp);
7324 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7325 subtarget = 0;
7326 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7327 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7328 unsignedp);
7330 /* Could determine the answer when only additive constants differ. Also,
7331 the addition of one can be handled by changing the condition. */
7332 case LT_EXPR:
7333 case LE_EXPR:
7334 case GT_EXPR:
7335 case GE_EXPR:
7336 case EQ_EXPR:
7337 case NE_EXPR:
7338 preexpand_calls (exp);
7339 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7340 if (temp != 0)
7341 return temp;
7343 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7344 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7345 && original_target
7346 && GET_CODE (original_target) == REG
7347 && (GET_MODE (original_target)
7348 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7350 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7351 VOIDmode, 0);
7353 if (temp != original_target)
7354 temp = copy_to_reg (temp);
7356 op1 = gen_label_rtx ();
7357 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
7358 GET_MODE (temp), unsignedp, 0);
7359 emit_jump_insn (gen_beq (op1));
7360 emit_move_insn (temp, const1_rtx);
7361 emit_label (op1);
7362 return temp;
7365 /* If no set-flag instruction, must generate a conditional
7366 store into a temporary variable. Drop through
7367 and handle this like && and ||. */
7369 case TRUTH_ANDIF_EXPR:
7370 case TRUTH_ORIF_EXPR:
7371 if (! ignore
7372 && (target == 0 || ! safe_from_p (target, exp, 1)
7373 /* Make sure we don't have a hard reg (such as function's return
7374 value) live across basic blocks, if not optimizing. */
7375 || (!optimize && GET_CODE (target) == REG
7376 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7377 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7379 if (target)
7380 emit_clr_insn (target);
7382 op1 = gen_label_rtx ();
7383 jumpifnot (exp, op1);
7385 if (target)
7386 emit_0_to_1_insn (target);
7388 emit_label (op1);
7389 return ignore ? const0_rtx : target;
7391 case TRUTH_NOT_EXPR:
7392 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7393 /* The parser is careful to generate TRUTH_NOT_EXPR
7394 only with operands that are always zero or one. */
7395 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7396 target, 1, OPTAB_LIB_WIDEN);
7397 if (temp == 0)
7398 abort ();
7399 return temp;
7401 case COMPOUND_EXPR:
7402 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7403 emit_queue ();
7404 return expand_expr (TREE_OPERAND (exp, 1),
7405 (ignore ? const0_rtx : target),
7406 VOIDmode, 0);
7408 case COND_EXPR:
7409 /* If we would have a "singleton" (see below) were it not for a
7410 conversion in each arm, bring that conversion back out. */
7411 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7412 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7413 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7414 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7416 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7417 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7419 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7420 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7421 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7422 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7423 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7424 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7425 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7426 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7427 return expand_expr (build1 (NOP_EXPR, type,
7428 build (COND_EXPR, TREE_TYPE (true),
7429 TREE_OPERAND (exp, 0),
7430 true, false)),
7431 target, tmode, modifier);
7435 /* Note that COND_EXPRs whose type is a structure or union
7436 are required to be constructed to contain assignments of
7437 a temporary variable, so that we can evaluate them here
7438 for side effect only. If type is void, we must do likewise. */
7440 /* If an arm of the branch requires a cleanup,
7441 only that cleanup is performed. */
7443 tree singleton = 0;
7444 tree binary_op = 0, unary_op = 0;
7446 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7447 convert it to our mode, if necessary. */
7448 if (integer_onep (TREE_OPERAND (exp, 1))
7449 && integer_zerop (TREE_OPERAND (exp, 2))
7450 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7452 if (ignore)
7454 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7455 ro_modifier);
7456 return const0_rtx;
7459 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7460 if (GET_MODE (op0) == mode)
7461 return op0;
7463 if (target == 0)
7464 target = gen_reg_rtx (mode);
7465 convert_move (target, op0, unsignedp);
7466 return target;
7469 /* Check for X ? A + B : A. If we have this, we can copy A to the
7470 output and conditionally add B. Similarly for unary operations.
7471 Don't do this if X has side-effects because those side effects
7472 might affect A or B and the "?" operation is a sequence point in
7473 ANSI. (operand_equal_p tests for side effects.) */
7475 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7476 && operand_equal_p (TREE_OPERAND (exp, 2),
7477 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7478 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7479 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7480 && operand_equal_p (TREE_OPERAND (exp, 1),
7481 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7482 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7483 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7484 && operand_equal_p (TREE_OPERAND (exp, 2),
7485 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7486 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7487 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7488 && operand_equal_p (TREE_OPERAND (exp, 1),
7489 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7490 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7492 /* If we are not to produce a result, we have no target. Otherwise,
7493 if a target was specified use it; it will not be used as an
7494 intermediate target unless it is safe. If no target, use a
7495 temporary. */
7497 if (ignore)
7498 temp = 0;
7499 else if (original_target
7500 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7501 || (singleton && GET_CODE (original_target) == REG
7502 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7503 && original_target == var_rtx (singleton)))
7504 && GET_MODE (original_target) == mode
7505 #ifdef HAVE_conditional_move
7506 && (! can_conditionally_move_p (mode)
7507 || GET_CODE (original_target) == REG
7508 || TREE_ADDRESSABLE (type))
7509 #endif
7510 && ! (GET_CODE (original_target) == MEM
7511 && MEM_VOLATILE_P (original_target)))
7512 temp = original_target;
7513 else if (TREE_ADDRESSABLE (type))
7514 abort ();
7515 else
7516 temp = assign_temp (type, 0, 0, 1);
7518 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7519 do the test of X as a store-flag operation, do this as
7520 A + ((X != 0) << log C). Similarly for other simple binary
7521 operators. Only do for C == 1 if BRANCH_COST is low. */
7522 if (temp && singleton && binary_op
7523 && (TREE_CODE (binary_op) == PLUS_EXPR
7524 || TREE_CODE (binary_op) == MINUS_EXPR
7525 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7526 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7527 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7528 : integer_onep (TREE_OPERAND (binary_op, 1)))
7529 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7531 rtx result;
7532 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7533 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7534 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7535 : xor_optab);
7537 /* If we had X ? A : A + 1, do this as A + (X == 0).
7539 We have to invert the truth value here and then put it
7540 back later if do_store_flag fails. We cannot simply copy
7541 TREE_OPERAND (exp, 0) to another variable and modify that
7542 because invert_truthvalue can modify the tree pointed to
7543 by its argument. */
7544 if (singleton == TREE_OPERAND (exp, 1))
7545 TREE_OPERAND (exp, 0)
7546 = invert_truthvalue (TREE_OPERAND (exp, 0));
7548 result = do_store_flag (TREE_OPERAND (exp, 0),
7549 (safe_from_p (temp, singleton, 1)
7550 ? temp : NULL_RTX),
7551 mode, BRANCH_COST <= 1);
7553 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7554 result = expand_shift (LSHIFT_EXPR, mode, result,
7555 build_int_2 (tree_log2
7556 (TREE_OPERAND
7557 (binary_op, 1)),
7559 (safe_from_p (temp, singleton, 1)
7560 ? temp : NULL_RTX), 0);
7562 if (result)
7564 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7565 return expand_binop (mode, boptab, op1, result, temp,
7566 unsignedp, OPTAB_LIB_WIDEN);
7568 else if (singleton == TREE_OPERAND (exp, 1))
7569 TREE_OPERAND (exp, 0)
7570 = invert_truthvalue (TREE_OPERAND (exp, 0));
7573 do_pending_stack_adjust ();
7574 NO_DEFER_POP;
7575 op0 = gen_label_rtx ();
7577 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7579 if (temp != 0)
7581 /* If the target conflicts with the other operand of the
7582 binary op, we can't use it. Also, we can't use the target
7583 if it is a hard register, because evaluating the condition
7584 might clobber it. */
7585 if ((binary_op
7586 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7587 || (GET_CODE (temp) == REG
7588 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7589 temp = gen_reg_rtx (mode);
7590 store_expr (singleton, temp, 0);
7592 else
7593 expand_expr (singleton,
7594 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7595 if (singleton == TREE_OPERAND (exp, 1))
7596 jumpif (TREE_OPERAND (exp, 0), op0);
7597 else
7598 jumpifnot (TREE_OPERAND (exp, 0), op0);
7600 start_cleanup_deferral ();
7601 if (binary_op && temp == 0)
7602 /* Just touch the other operand. */
7603 expand_expr (TREE_OPERAND (binary_op, 1),
7604 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7605 else if (binary_op)
7606 store_expr (build (TREE_CODE (binary_op), type,
7607 make_tree (type, temp),
7608 TREE_OPERAND (binary_op, 1)),
7609 temp, 0);
7610 else
7611 store_expr (build1 (TREE_CODE (unary_op), type,
7612 make_tree (type, temp)),
7613 temp, 0);
7614 op1 = op0;
7616 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7617 comparison operator. If we have one of these cases, set the
7618 output to A, branch on A (cse will merge these two references),
7619 then set the output to FOO. */
7620 else if (temp
7621 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7622 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7623 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7624 TREE_OPERAND (exp, 1), 0)
7625 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7626 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7627 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7629 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7630 temp = gen_reg_rtx (mode);
7631 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7632 jumpif (TREE_OPERAND (exp, 0), op0);
7634 start_cleanup_deferral ();
7635 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7636 op1 = op0;
7638 else if (temp
7639 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7640 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7641 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7642 TREE_OPERAND (exp, 2), 0)
7643 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7644 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7645 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7647 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7648 temp = gen_reg_rtx (mode);
7649 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7650 jumpifnot (TREE_OPERAND (exp, 0), op0);
7652 start_cleanup_deferral ();
7653 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7654 op1 = op0;
7656 else
7658 op1 = gen_label_rtx ();
7659 jumpifnot (TREE_OPERAND (exp, 0), op0);
7661 start_cleanup_deferral ();
7662 if (temp != 0)
7663 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7664 else
7665 expand_expr (TREE_OPERAND (exp, 1),
7666 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7667 end_cleanup_deferral ();
7668 emit_queue ();
7669 emit_jump_insn (gen_jump (op1));
7670 emit_barrier ();
7671 emit_label (op0);
7672 start_cleanup_deferral ();
7673 if (temp != 0)
7674 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7675 else
7676 expand_expr (TREE_OPERAND (exp, 2),
7677 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7680 end_cleanup_deferral ();
7682 emit_queue ();
7683 emit_label (op1);
7684 OK_DEFER_POP;
7686 return temp;
7689 case TARGET_EXPR:
7691 /* Something needs to be initialized, but we didn't know
7692 where that thing was when building the tree. For example,
7693 it could be the return value of a function, or a parameter
7694 to a function which lays down in the stack, or a temporary
7695 variable which must be passed by reference.
7697 We guarantee that the expression will either be constructed
7698 or copied into our original target. */
7700 tree slot = TREE_OPERAND (exp, 0);
7701 tree cleanups = NULL_TREE;
7702 tree exp1;
7704 if (TREE_CODE (slot) != VAR_DECL)
7705 abort ();
7707 if (! ignore)
7708 target = original_target;
7710 if (target == 0)
7712 if (DECL_RTL (slot) != 0)
7714 target = DECL_RTL (slot);
7715 /* If we have already expanded the slot, so don't do
7716 it again. (mrs) */
7717 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7718 return target;
7720 else
7722 target = assign_temp (type, 2, 0, 1);
7723 /* All temp slots at this level must not conflict. */
7724 preserve_temp_slots (target);
7725 DECL_RTL (slot) = target;
7726 if (TREE_ADDRESSABLE (slot))
7728 TREE_ADDRESSABLE (slot) = 0;
7729 mark_addressable (slot);
7732 /* Since SLOT is not known to the called function
7733 to belong to its stack frame, we must build an explicit
7734 cleanup. This case occurs when we must build up a reference
7735 to pass the reference as an argument. In this case,
7736 it is very likely that such a reference need not be
7737 built here. */
7739 if (TREE_OPERAND (exp, 2) == 0)
7740 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7741 cleanups = TREE_OPERAND (exp, 2);
7744 else
7746 /* This case does occur, when expanding a parameter which
7747 needs to be constructed on the stack. The target
7748 is the actual stack address that we want to initialize.
7749 The function we call will perform the cleanup in this case. */
7751 /* If we have already assigned it space, use that space,
7752 not target that we were passed in, as our target
7753 parameter is only a hint. */
7754 if (DECL_RTL (slot) != 0)
7756 target = DECL_RTL (slot);
7757 /* If we have already expanded the slot, so don't do
7758 it again. (mrs) */
7759 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7760 return target;
7762 else
7764 DECL_RTL (slot) = target;
7765 /* If we must have an addressable slot, then make sure that
7766 the RTL that we just stored in slot is OK. */
7767 if (TREE_ADDRESSABLE (slot))
7769 TREE_ADDRESSABLE (slot) = 0;
7770 mark_addressable (slot);
7775 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7776 /* Mark it as expanded. */
7777 TREE_OPERAND (exp, 1) = NULL_TREE;
7779 TREE_USED (slot) = 1;
7780 store_expr (exp1, target, 0);
7782 expand_decl_cleanup (NULL_TREE, cleanups);
7784 return target;
7787 case INIT_EXPR:
7789 tree lhs = TREE_OPERAND (exp, 0);
7790 tree rhs = TREE_OPERAND (exp, 1);
7791 tree noncopied_parts = 0;
7792 tree lhs_type = TREE_TYPE (lhs);
7794 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7795 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7796 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7797 TYPE_NONCOPIED_PARTS (lhs_type));
7798 while (noncopied_parts != 0)
7800 expand_assignment (TREE_VALUE (noncopied_parts),
7801 TREE_PURPOSE (noncopied_parts), 0, 0);
7802 noncopied_parts = TREE_CHAIN (noncopied_parts);
7804 return temp;
7807 case MODIFY_EXPR:
7809 /* If lhs is complex, expand calls in rhs before computing it.
7810 That's so we don't compute a pointer and save it over a call.
7811 If lhs is simple, compute it first so we can give it as a
7812 target if the rhs is just a call. This avoids an extra temp and copy
7813 and that prevents a partial-subsumption which makes bad code.
7814 Actually we could treat component_ref's of vars like vars. */
7816 tree lhs = TREE_OPERAND (exp, 0);
7817 tree rhs = TREE_OPERAND (exp, 1);
7818 tree noncopied_parts = 0;
7819 tree lhs_type = TREE_TYPE (lhs);
7821 temp = 0;
7823 if (TREE_CODE (lhs) != VAR_DECL
7824 && TREE_CODE (lhs) != RESULT_DECL
7825 && TREE_CODE (lhs) != PARM_DECL
7826 && ! (TREE_CODE (lhs) == INDIRECT_REF
7827 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7828 preexpand_calls (exp);
7830 /* Check for |= or &= of a bitfield of size one into another bitfield
7831 of size 1. In this case, (unless we need the result of the
7832 assignment) we can do this more efficiently with a
7833 test followed by an assignment, if necessary.
7835 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7836 things change so we do, this code should be enhanced to
7837 support it. */
7838 if (ignore
7839 && TREE_CODE (lhs) == COMPONENT_REF
7840 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7841 || TREE_CODE (rhs) == BIT_AND_EXPR)
7842 && TREE_OPERAND (rhs, 0) == lhs
7843 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7844 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7845 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7847 rtx label = gen_label_rtx ();
7849 do_jump (TREE_OPERAND (rhs, 1),
7850 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7851 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7852 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7853 (TREE_CODE (rhs) == BIT_IOR_EXPR
7854 ? integer_one_node
7855 : integer_zero_node)),
7856 0, 0);
7857 do_pending_stack_adjust ();
7858 emit_label (label);
7859 return const0_rtx;
7862 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7863 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7864 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7865 TYPE_NONCOPIED_PARTS (lhs_type));
7867 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7868 while (noncopied_parts != 0)
7870 expand_assignment (TREE_PURPOSE (noncopied_parts),
7871 TREE_VALUE (noncopied_parts), 0, 0);
7872 noncopied_parts = TREE_CHAIN (noncopied_parts);
7874 return temp;
7877 case RETURN_EXPR:
7878 if (!TREE_OPERAND (exp, 0))
7879 expand_null_return ();
7880 else
7881 expand_return (TREE_OPERAND (exp, 0));
7882 return const0_rtx;
7884 case PREINCREMENT_EXPR:
7885 case PREDECREMENT_EXPR:
7886 return expand_increment (exp, 0, ignore);
7888 case POSTINCREMENT_EXPR:
7889 case POSTDECREMENT_EXPR:
7890 /* Faster to treat as pre-increment if result is not used. */
7891 return expand_increment (exp, ! ignore, ignore);
7893 case ADDR_EXPR:
7894 /* If nonzero, TEMP will be set to the address of something that might
7895 be a MEM corresponding to a stack slot. */
7896 temp = 0;
7898 /* Are we taking the address of a nested function? */
7899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7900 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7901 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7902 && ! TREE_STATIC (exp))
7904 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7905 op0 = force_operand (op0, target);
7907 /* If we are taking the address of something erroneous, just
7908 return a zero. */
7909 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7910 return const0_rtx;
7911 else
7913 /* We make sure to pass const0_rtx down if we came in with
7914 ignore set, to avoid doing the cleanups twice for something. */
7915 op0 = expand_expr (TREE_OPERAND (exp, 0),
7916 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7917 (modifier == EXPAND_INITIALIZER
7918 ? modifier : EXPAND_CONST_ADDRESS));
7920 /* If we are going to ignore the result, OP0 will have been set
7921 to const0_rtx, so just return it. Don't get confused and
7922 think we are taking the address of the constant. */
7923 if (ignore)
7924 return op0;
7926 op0 = protect_from_queue (op0, 0);
7928 /* We would like the object in memory. If it is a constant,
7929 we can have it be statically allocated into memory. For
7930 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7931 memory and store the value into it. */
7933 if (CONSTANT_P (op0))
7934 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7935 op0);
7936 else if (GET_CODE (op0) == MEM)
7938 mark_temp_addr_taken (op0);
7939 temp = XEXP (op0, 0);
7942 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7943 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7945 /* If this object is in a register, it must be not
7946 be BLKmode. */
7947 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7948 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7950 mark_temp_addr_taken (memloc);
7951 emit_move_insn (memloc, op0);
7952 op0 = memloc;
7955 if (GET_CODE (op0) != MEM)
7956 abort ();
7958 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7960 temp = XEXP (op0, 0);
7961 #ifdef POINTERS_EXTEND_UNSIGNED
7962 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7963 && mode == ptr_mode)
7964 temp = convert_memory_address (ptr_mode, temp);
7965 #endif
7966 return temp;
7969 op0 = force_operand (XEXP (op0, 0), target);
7972 if (flag_force_addr && GET_CODE (op0) != REG)
7973 op0 = force_reg (Pmode, op0);
7975 if (GET_CODE (op0) == REG
7976 && ! REG_USERVAR_P (op0))
7977 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7979 /* If we might have had a temp slot, add an equivalent address
7980 for it. */
7981 if (temp != 0)
7982 update_temp_slot_address (temp, op0);
7984 #ifdef POINTERS_EXTEND_UNSIGNED
7985 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7986 && mode == ptr_mode)
7987 op0 = convert_memory_address (ptr_mode, op0);
7988 #endif
7990 return op0;
7992 case ENTRY_VALUE_EXPR:
7993 abort ();
7995 /* COMPLEX type for Extended Pascal & Fortran */
7996 case COMPLEX_EXPR:
7998 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7999 rtx insns;
8001 /* Get the rtx code of the operands. */
8002 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8003 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8005 if (! target)
8006 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8008 start_sequence ();
8010 /* Move the real (op0) and imaginary (op1) parts to their location. */
8011 emit_move_insn (gen_realpart (mode, target), op0);
8012 emit_move_insn (gen_imagpart (mode, target), op1);
8014 insns = get_insns ();
8015 end_sequence ();
8017 /* Complex construction should appear as a single unit. */
8018 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8019 each with a separate pseudo as destination.
8020 It's not correct for flow to treat them as a unit. */
8021 if (GET_CODE (target) != CONCAT)
8022 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8023 else
8024 emit_insns (insns);
8026 return target;
8029 case REALPART_EXPR:
8030 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8031 return gen_realpart (mode, op0);
8033 case IMAGPART_EXPR:
8034 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8035 return gen_imagpart (mode, op0);
8037 case CONJ_EXPR:
8039 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8040 rtx imag_t;
8041 rtx insns;
8043 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8045 if (! target)
8046 target = gen_reg_rtx (mode);
8048 start_sequence ();
8050 /* Store the realpart and the negated imagpart to target. */
8051 emit_move_insn (gen_realpart (partmode, target),
8052 gen_realpart (partmode, op0));
8054 imag_t = gen_imagpart (partmode, target);
8055 temp = expand_unop (partmode, neg_optab,
8056 gen_imagpart (partmode, op0), imag_t, 0);
8057 if (temp != imag_t)
8058 emit_move_insn (imag_t, temp);
8060 insns = get_insns ();
8061 end_sequence ();
8063 /* Conjugate should appear as a single unit
8064 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8065 each with a separate pseudo as destination.
8066 It's not correct for flow to treat them as a unit. */
8067 if (GET_CODE (target) != CONCAT)
8068 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8069 else
8070 emit_insns (insns);
8072 return target;
8075 case TRY_CATCH_EXPR:
8077 tree handler = TREE_OPERAND (exp, 1);
8079 expand_eh_region_start ();
8081 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8083 expand_eh_region_end (handler);
8085 return op0;
8088 case POPDCC_EXPR:
8090 rtx dcc = get_dynamic_cleanup_chain ();
8091 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8092 return const0_rtx;
8095 case POPDHC_EXPR:
8097 rtx dhc = get_dynamic_handler_chain ();
8098 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8099 return const0_rtx;
8102 case ERROR_MARK:
8103 op0 = CONST0_RTX (tmode);
8104 if (op0 != 0)
8105 return op0;
8106 return const0_rtx;
8108 default:
8109 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8112 /* Here to do an ordinary binary operator, generating an instruction
8113 from the optab already placed in `this_optab'. */
8114 binop:
8115 preexpand_calls (exp);
8116 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8117 subtarget = 0;
8118 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8119 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8120 binop2:
8121 temp = expand_binop (mode, this_optab, op0, op1, target,
8122 unsignedp, OPTAB_LIB_WIDEN);
8123 if (temp == 0)
8124 abort ();
8125 return temp;
8130 /* Return the alignment in bits of EXP, a pointer valued expression.
8131 But don't return more than MAX_ALIGN no matter what.
8132 The alignment returned is, by default, the alignment of the thing that
8133 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8135 Otherwise, look at the expression to see if we can do better, i.e., if the
8136 expression is actually pointing at an object whose alignment is tighter. */
8138 static int
8139 get_pointer_alignment (exp, max_align)
8140 tree exp;
8141 unsigned max_align;
8143 unsigned align, inner;
8145 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8146 return 0;
8148 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8149 align = MIN (align, max_align);
8151 while (1)
8153 switch (TREE_CODE (exp))
8155 case NOP_EXPR:
8156 case CONVERT_EXPR:
8157 case NON_LVALUE_EXPR:
8158 exp = TREE_OPERAND (exp, 0);
8159 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8160 return align;
8161 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8162 align = MIN (inner, max_align);
8163 break;
8165 case PLUS_EXPR:
8166 /* If sum of pointer + int, restrict our maximum alignment to that
8167 imposed by the integer. If not, we can't do any better than
8168 ALIGN. */
8169 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8170 return align;
8172 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8173 & (max_align - 1))
8174 != 0)
8175 max_align >>= 1;
8177 exp = TREE_OPERAND (exp, 0);
8178 break;
8180 case ADDR_EXPR:
8181 /* See what we are pointing at and look at its alignment. */
8182 exp = TREE_OPERAND (exp, 0);
8183 if (TREE_CODE (exp) == FUNCTION_DECL)
8184 align = FUNCTION_BOUNDARY;
8185 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8186 align = DECL_ALIGN (exp);
8187 #ifdef CONSTANT_ALIGNMENT
8188 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8189 align = CONSTANT_ALIGNMENT (exp, align);
8190 #endif
8191 return MIN (align, max_align);
8193 default:
8194 return align;
8199 /* Return the tree node and offset if a given argument corresponds to
8200 a string constant. */
8202 static tree
8203 string_constant (arg, ptr_offset)
8204 tree arg;
8205 tree *ptr_offset;
8207 STRIP_NOPS (arg);
8209 if (TREE_CODE (arg) == ADDR_EXPR
8210 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8212 *ptr_offset = integer_zero_node;
8213 return TREE_OPERAND (arg, 0);
8215 else if (TREE_CODE (arg) == PLUS_EXPR)
8217 tree arg0 = TREE_OPERAND (arg, 0);
8218 tree arg1 = TREE_OPERAND (arg, 1);
8220 STRIP_NOPS (arg0);
8221 STRIP_NOPS (arg1);
8223 if (TREE_CODE (arg0) == ADDR_EXPR
8224 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8226 *ptr_offset = arg1;
8227 return TREE_OPERAND (arg0, 0);
8229 else if (TREE_CODE (arg1) == ADDR_EXPR
8230 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8232 *ptr_offset = arg0;
8233 return TREE_OPERAND (arg1, 0);
8237 return 0;
8240 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8241 way, because it could contain a zero byte in the middle.
8242 TREE_STRING_LENGTH is the size of the character array, not the string.
8244 Unfortunately, string_constant can't access the values of const char
8245 arrays with initializers, so neither can we do so here. */
8247 static tree
8248 c_strlen (src)
8249 tree src;
8251 tree offset_node;
8252 int offset, max;
8253 char *ptr;
8255 src = string_constant (src, &offset_node);
8256 if (src == 0)
8257 return 0;
8258 max = TREE_STRING_LENGTH (src);
8259 ptr = TREE_STRING_POINTER (src);
8260 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8262 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8263 compute the offset to the following null if we don't know where to
8264 start searching for it. */
8265 int i;
8266 for (i = 0; i < max; i++)
8267 if (ptr[i] == 0)
8268 return 0;
8269 /* We don't know the starting offset, but we do know that the string
8270 has no internal zero bytes. We can assume that the offset falls
8271 within the bounds of the string; otherwise, the programmer deserves
8272 what he gets. Subtract the offset from the length of the string,
8273 and return that. */
8274 /* This would perhaps not be valid if we were dealing with named
8275 arrays in addition to literal string constants. */
8276 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8279 /* We have a known offset into the string. Start searching there for
8280 a null character. */
8281 if (offset_node == 0)
8282 offset = 0;
8283 else
8285 /* Did we get a long long offset? If so, punt. */
8286 if (TREE_INT_CST_HIGH (offset_node) != 0)
8287 return 0;
8288 offset = TREE_INT_CST_LOW (offset_node);
8290 /* If the offset is known to be out of bounds, warn, and call strlen at
8291 runtime. */
8292 if (offset < 0 || offset > max)
8294 warning ("offset outside bounds of constant string");
8295 return 0;
8297 /* Use strlen to search for the first zero byte. Since any strings
8298 constructed with build_string will have nulls appended, we win even
8299 if we get handed something like (char[4])"abcd".
8301 Since OFFSET is our starting index into the string, no further
8302 calculation is needed. */
8303 return size_int (strlen (ptr + offset));
8307 expand_builtin_return_addr (fndecl_code, count, tem)
8308 enum built_in_function fndecl_code;
8309 int count;
8310 rtx tem;
8312 int i;
8314 /* Some machines need special handling before we can access
8315 arbitrary frames. For example, on the sparc, we must first flush
8316 all register windows to the stack. */
8317 #ifdef SETUP_FRAME_ADDRESSES
8318 if (count > 0)
8319 SETUP_FRAME_ADDRESSES ();
8320 #endif
8322 /* On the sparc, the return address is not in the frame, it is in a
8323 register. There is no way to access it off of the current frame
8324 pointer, but it can be accessed off the previous frame pointer by
8325 reading the value from the register window save area. */
8326 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8327 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8328 count--;
8329 #endif
8331 /* Scan back COUNT frames to the specified frame. */
8332 for (i = 0; i < count; i++)
8334 /* Assume the dynamic chain pointer is in the word that the
8335 frame address points to, unless otherwise specified. */
8336 #ifdef DYNAMIC_CHAIN_ADDRESS
8337 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8338 #endif
8339 tem = memory_address (Pmode, tem);
8340 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8343 /* For __builtin_frame_address, return what we've got. */
8344 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8345 return tem;
8347 /* For __builtin_return_address, Get the return address from that
8348 frame. */
8349 #ifdef RETURN_ADDR_RTX
8350 tem = RETURN_ADDR_RTX (count, tem);
8351 #else
8352 tem = memory_address (Pmode,
8353 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8354 tem = gen_rtx_MEM (Pmode, tem);
8355 #endif
8356 return tem;
8359 /* __builtin_setjmp is passed a pointer to an array of five words (not
8360 all will be used on all machines). It operates similarly to the C
8361 library function of the same name, but is more efficient. Much of
8362 the code below (and for longjmp) is copied from the handling of
8363 non-local gotos.
8365 NOTE: This is intended for use by GNAT and the exception handling
8366 scheme in the compiler and will only work in the method used by
8367 them. */
8370 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8371 rtx buf_addr;
8372 rtx target;
8373 rtx first_label, next_label;
8375 rtx lab1 = gen_label_rtx ();
8376 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8377 enum machine_mode value_mode;
8378 rtx stack_save;
8380 value_mode = TYPE_MODE (integer_type_node);
8382 #ifdef POINTERS_EXTEND_UNSIGNED
8383 buf_addr = convert_memory_address (Pmode, buf_addr);
8384 #endif
8386 buf_addr = force_reg (Pmode, buf_addr);
8388 if (target == 0 || GET_CODE (target) != REG
8389 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8390 target = gen_reg_rtx (value_mode);
8392 emit_queue ();
8394 /* We store the frame pointer and the address of lab1 in the buffer
8395 and use the rest of it for the stack save area, which is
8396 machine-dependent. */
8398 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8399 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8400 #endif
8402 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8403 BUILTIN_SETJMP_FRAME_VALUE);
8404 emit_move_insn (validize_mem
8405 (gen_rtx_MEM (Pmode,
8406 plus_constant (buf_addr,
8407 GET_MODE_SIZE (Pmode)))),
8408 gen_rtx_LABEL_REF (Pmode, lab1));
8410 stack_save = gen_rtx_MEM (sa_mode,
8411 plus_constant (buf_addr,
8412 2 * GET_MODE_SIZE (Pmode)));
8413 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8415 /* If there is further processing to do, do it. */
8416 #ifdef HAVE_builtin_setjmp_setup
8417 if (HAVE_builtin_setjmp_setup)
8418 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8419 #endif
8421 /* Set TARGET to zero and branch to the first-time-through label. */
8422 emit_move_insn (target, const0_rtx);
8423 emit_jump_insn (gen_jump (first_label));
8424 emit_barrier ();
8425 emit_label (lab1);
8427 /* Tell flow about the strange goings on. */
8428 current_function_has_nonlocal_label = 1;
8430 /* Clobber the FP when we get here, so we have to make sure it's
8431 marked as used by this function. */
8432 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8434 /* Mark the static chain as clobbered here so life information
8435 doesn't get messed up for it. */
8436 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8438 /* Now put in the code to restore the frame pointer, and argument
8439 pointer, if needed. The code below is from expand_end_bindings
8440 in stmt.c; see detailed documentation there. */
8441 #ifdef HAVE_nonlocal_goto
8442 if (! HAVE_nonlocal_goto)
8443 #endif
8444 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8446 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8447 if (fixed_regs[ARG_POINTER_REGNUM])
8449 #ifdef ELIMINABLE_REGS
8450 size_t i;
8451 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8453 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8454 if (elim_regs[i].from == ARG_POINTER_REGNUM
8455 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8456 break;
8458 if (i == sizeof elim_regs / sizeof elim_regs [0])
8459 #endif
8461 /* Now restore our arg pointer from the address at which it
8462 was saved in our stack frame.
8463 If there hasn't be space allocated for it yet, make
8464 some now. */
8465 if (arg_pointer_save_area == 0)
8466 arg_pointer_save_area
8467 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8468 emit_move_insn (virtual_incoming_args_rtx,
8469 copy_to_reg (arg_pointer_save_area));
8472 #endif
8474 #ifdef HAVE_builtin_setjmp_receiver
8475 if (HAVE_builtin_setjmp_receiver)
8476 emit_insn (gen_builtin_setjmp_receiver (lab1));
8477 else
8478 #endif
8479 #ifdef HAVE_nonlocal_goto_receiver
8480 if (HAVE_nonlocal_goto_receiver)
8481 emit_insn (gen_nonlocal_goto_receiver ());
8482 else
8483 #endif
8485 ; /* Nothing */
8488 /* Set TARGET, and branch to the next-time-through label. */
8489 emit_move_insn (target, const1_rtx);
8490 emit_jump_insn (gen_jump (next_label));
8491 emit_barrier ();
8493 return target;
8496 void
8497 expand_builtin_longjmp (buf_addr, value)
8498 rtx buf_addr, value;
8500 rtx fp, lab, stack;
8501 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8503 #ifdef POINTERS_EXTEND_UNSIGNED
8504 buf_addr = convert_memory_address (Pmode, buf_addr);
8505 #endif
8506 buf_addr = force_reg (Pmode, buf_addr);
8508 /* We used to store value in static_chain_rtx, but that fails if pointers
8509 are smaller than integers. We instead require that the user must pass
8510 a second argument of 1, because that is what builtin_setjmp will
8511 return. This also makes EH slightly more efficient, since we are no
8512 longer copying around a value that we don't care about. */
8513 if (value != const1_rtx)
8514 abort ();
8516 #ifdef HAVE_builtin_longjmp
8517 if (HAVE_builtin_longjmp)
8518 emit_insn (gen_builtin_longjmp (buf_addr));
8519 else
8520 #endif
8522 fp = gen_rtx_MEM (Pmode, buf_addr);
8523 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8524 GET_MODE_SIZE (Pmode)));
8526 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8527 2 * GET_MODE_SIZE (Pmode)));
8529 /* Pick up FP, label, and SP from the block and jump. This code is
8530 from expand_goto in stmt.c; see there for detailed comments. */
8531 #if HAVE_nonlocal_goto
8532 if (HAVE_nonlocal_goto)
8533 /* We have to pass a value to the nonlocal_goto pattern that will
8534 get copied into the static_chain pointer, but it does not matter
8535 what that value is, because builtin_setjmp does not use it. */
8536 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8537 else
8538 #endif
8540 lab = copy_to_reg (lab);
8542 emit_move_insn (hard_frame_pointer_rtx, fp);
8543 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8545 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8546 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8547 emit_indirect_jump (lab);
8552 static rtx
8553 get_memory_rtx (exp)
8554 tree exp;
8556 rtx mem;
8557 int is_aggregate;
8559 mem = gen_rtx_MEM (BLKmode,
8560 memory_address (BLKmode,
8561 expand_expr (exp, NULL_RTX,
8562 ptr_mode, EXPAND_SUM)));
8564 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8566 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8567 if the value is the address of a structure or if the expression is
8568 cast to a pointer to structure type. */
8569 is_aggregate = 0;
8571 while (TREE_CODE (exp) == NOP_EXPR)
8573 tree cast_type = TREE_TYPE (exp);
8574 if (TREE_CODE (cast_type) == POINTER_TYPE
8575 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8577 is_aggregate = 1;
8578 break;
8580 exp = TREE_OPERAND (exp, 0);
8583 if (is_aggregate == 0)
8585 tree type;
8587 if (TREE_CODE (exp) == ADDR_EXPR)
8588 /* If this is the address of an object, check whether the
8589 object is an array. */
8590 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8591 else
8592 type = TREE_TYPE (TREE_TYPE (exp));
8593 is_aggregate = AGGREGATE_TYPE_P (type);
8596 MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8597 return mem;
8601 /* Expand an expression EXP that calls a built-in function,
8602 with result going to TARGET if that's convenient
8603 (and in mode MODE if that's convenient).
8604 SUBTARGET may be used as the target for computing one of EXP's operands.
8605 IGNORE is nonzero if the value is to be ignored. */
8607 #define CALLED_AS_BUILT_IN(NODE) \
8608 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8610 static rtx
8611 expand_builtin (exp, target, subtarget, mode, ignore)
8612 tree exp;
8613 rtx target;
8614 rtx subtarget;
8615 enum machine_mode mode;
8616 int ignore;
8618 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8619 tree arglist = TREE_OPERAND (exp, 1);
8620 rtx op0;
8621 rtx lab1, insns;
8622 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8623 optab builtin_optab;
8625 switch (DECL_FUNCTION_CODE (fndecl))
8627 case BUILT_IN_ABS:
8628 case BUILT_IN_LABS:
8629 case BUILT_IN_FABS:
8630 /* build_function_call changes these into ABS_EXPR. */
8631 abort ();
8633 case BUILT_IN_SIN:
8634 case BUILT_IN_COS:
8635 /* Treat these like sqrt, but only if the user asks for them. */
8636 if (! flag_fast_math)
8637 break;
8638 case BUILT_IN_FSQRT:
8639 /* If not optimizing, call the library function. */
8640 if (! optimize)
8641 break;
8643 if (arglist == 0
8644 /* Arg could be wrong type if user redeclared this fcn wrong. */
8645 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8646 break;
8648 /* Stabilize and compute the argument. */
8649 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8650 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8652 exp = copy_node (exp);
8653 arglist = copy_node (arglist);
8654 TREE_OPERAND (exp, 1) = arglist;
8655 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8657 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8659 /* Make a suitable register to place result in. */
8660 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8662 emit_queue ();
8663 start_sequence ();
8665 switch (DECL_FUNCTION_CODE (fndecl))
8667 case BUILT_IN_SIN:
8668 builtin_optab = sin_optab; break;
8669 case BUILT_IN_COS:
8670 builtin_optab = cos_optab; break;
8671 case BUILT_IN_FSQRT:
8672 builtin_optab = sqrt_optab; break;
8673 default:
8674 abort ();
8677 /* Compute into TARGET.
8678 Set TARGET to wherever the result comes back. */
8679 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8680 builtin_optab, op0, target, 0);
8682 /* If we were unable to expand via the builtin, stop the
8683 sequence (without outputting the insns) and break, causing
8684 a call to the library function. */
8685 if (target == 0)
8687 end_sequence ();
8688 break;
8691 /* Check the results by default. But if flag_fast_math is turned on,
8692 then assume sqrt will always be called with valid arguments. */
8694 if (! flag_fast_math)
8696 /* Don't define the builtin FP instructions
8697 if your machine is not IEEE. */
8698 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8699 abort ();
8701 lab1 = gen_label_rtx ();
8703 /* Test the result; if it is NaN, set errno=EDOM because
8704 the argument was not in the domain. */
8705 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8706 emit_jump_insn (gen_beq (lab1));
8708 #ifdef TARGET_EDOM
8710 #ifdef GEN_ERRNO_RTX
8711 rtx errno_rtx = GEN_ERRNO_RTX;
8712 #else
8713 rtx errno_rtx
8714 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8715 #endif
8717 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8719 #else
8720 /* We can't set errno=EDOM directly; let the library call do it.
8721 Pop the arguments right away in case the call gets deleted. */
8722 NO_DEFER_POP;
8723 expand_call (exp, target, 0);
8724 OK_DEFER_POP;
8725 #endif
8727 emit_label (lab1);
8730 /* Output the entire sequence. */
8731 insns = get_insns ();
8732 end_sequence ();
8733 emit_insns (insns);
8735 return target;
8737 case BUILT_IN_FMOD:
8738 break;
8740 /* __builtin_apply_args returns block of memory allocated on
8741 the stack into which is stored the arg pointer, structure
8742 value address, static chain, and all the registers that might
8743 possibly be used in performing a function call. The code is
8744 moved to the start of the function so the incoming values are
8745 saved. */
8746 case BUILT_IN_APPLY_ARGS:
8747 /* Don't do __builtin_apply_args more than once in a function.
8748 Save the result of the first call and reuse it. */
8749 if (apply_args_value != 0)
8750 return apply_args_value;
8752 /* When this function is called, it means that registers must be
8753 saved on entry to this function. So we migrate the
8754 call to the first insn of this function. */
8755 rtx temp;
8756 rtx seq;
8758 start_sequence ();
8759 temp = expand_builtin_apply_args ();
8760 seq = get_insns ();
8761 end_sequence ();
8763 apply_args_value = temp;
8765 /* Put the sequence after the NOTE that starts the function.
8766 If this is inside a SEQUENCE, make the outer-level insn
8767 chain current, so the code is placed at the start of the
8768 function. */
8769 push_topmost_sequence ();
8770 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8771 pop_topmost_sequence ();
8772 return temp;
8775 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8776 FUNCTION with a copy of the parameters described by
8777 ARGUMENTS, and ARGSIZE. It returns a block of memory
8778 allocated on the stack into which is stored all the registers
8779 that might possibly be used for returning the result of a
8780 function. ARGUMENTS is the value returned by
8781 __builtin_apply_args. ARGSIZE is the number of bytes of
8782 arguments that must be copied. ??? How should this value be
8783 computed? We'll also need a safe worst case value for varargs
8784 functions. */
8785 case BUILT_IN_APPLY:
8786 if (arglist == 0
8787 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8788 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8789 || TREE_CHAIN (arglist) == 0
8790 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8791 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8792 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8793 return const0_rtx;
8794 else
8796 int i;
8797 tree t;
8798 rtx ops[3];
8800 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8801 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8803 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8806 /* __builtin_return (RESULT) causes the function to return the
8807 value described by RESULT. RESULT is address of the block of
8808 memory returned by __builtin_apply. */
8809 case BUILT_IN_RETURN:
8810 if (arglist
8811 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8812 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8813 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8814 NULL_RTX, VOIDmode, 0));
8815 return const0_rtx;
8817 case BUILT_IN_SAVEREGS:
8818 /* Don't do __builtin_saveregs more than once in a function.
8819 Save the result of the first call and reuse it. */
8820 if (saveregs_value != 0)
8821 return saveregs_value;
8823 /* When this function is called, it means that registers must be
8824 saved on entry to this function. So we migrate the
8825 call to the first insn of this function. */
8826 rtx temp;
8827 rtx seq;
8829 /* Now really call the function. `expand_call' does not call
8830 expand_builtin, so there is no danger of infinite recursion here. */
8831 start_sequence ();
8833 #ifdef EXPAND_BUILTIN_SAVEREGS
8834 /* Do whatever the machine needs done in this case. */
8835 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8836 #else
8837 /* The register where the function returns its value
8838 is likely to have something else in it, such as an argument.
8839 So preserve that register around the call. */
8841 if (value_mode != VOIDmode)
8843 rtx valreg = hard_libcall_value (value_mode);
8844 rtx saved_valreg = gen_reg_rtx (value_mode);
8846 emit_move_insn (saved_valreg, valreg);
8847 temp = expand_call (exp, target, ignore);
8848 emit_move_insn (valreg, saved_valreg);
8850 else
8851 /* Generate the call, putting the value in a pseudo. */
8852 temp = expand_call (exp, target, ignore);
8853 #endif
8855 seq = get_insns ();
8856 end_sequence ();
8858 saveregs_value = temp;
8860 /* Put the sequence after the NOTE that starts the function.
8861 If this is inside a SEQUENCE, make the outer-level insn
8862 chain current, so the code is placed at the start of the
8863 function. */
8864 push_topmost_sequence ();
8865 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8866 pop_topmost_sequence ();
8867 return temp;
8870 /* __builtin_args_info (N) returns word N of the arg space info
8871 for the current function. The number and meanings of words
8872 is controlled by the definition of CUMULATIVE_ARGS. */
8873 case BUILT_IN_ARGS_INFO:
8875 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8876 int *word_ptr = (int *) &current_function_args_info;
8877 #if 0
8878 /* These are used by the code below that is if 0'ed away */
8879 int i;
8880 tree type, elts, result;
8881 #endif
8883 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8884 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8885 __FILE__, __LINE__);
8887 if (arglist != 0)
8889 tree arg = TREE_VALUE (arglist);
8890 if (TREE_CODE (arg) != INTEGER_CST)
8891 error ("argument of `__builtin_args_info' must be constant");
8892 else
8894 int wordnum = TREE_INT_CST_LOW (arg);
8896 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8897 error ("argument of `__builtin_args_info' out of range");
8898 else
8899 return GEN_INT (word_ptr[wordnum]);
8902 else
8903 error ("missing argument in `__builtin_args_info'");
8905 return const0_rtx;
8907 #if 0
8908 for (i = 0; i < nwords; i++)
8909 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8911 type = build_array_type (integer_type_node,
8912 build_index_type (build_int_2 (nwords, 0)));
8913 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8914 TREE_CONSTANT (result) = 1;
8915 TREE_STATIC (result) = 1;
8916 result = build (INDIRECT_REF, build_pointer_type (type), result);
8917 TREE_CONSTANT (result) = 1;
8918 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8919 #endif
8922 /* Return the address of the first anonymous stack arg. */
8923 case BUILT_IN_NEXT_ARG:
8925 tree fntype = TREE_TYPE (current_function_decl);
8927 if ((TYPE_ARG_TYPES (fntype) == 0
8928 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8929 == void_type_node))
8930 && ! current_function_varargs)
8932 error ("`va_start' used in function with fixed args");
8933 return const0_rtx;
8936 if (arglist)
8938 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8939 tree arg = TREE_VALUE (arglist);
8941 /* Strip off all nops for the sake of the comparison. This
8942 is not quite the same as STRIP_NOPS. It does more.
8943 We must also strip off INDIRECT_EXPR for C++ reference
8944 parameters. */
8945 while (TREE_CODE (arg) == NOP_EXPR
8946 || TREE_CODE (arg) == CONVERT_EXPR
8947 || TREE_CODE (arg) == NON_LVALUE_EXPR
8948 || TREE_CODE (arg) == INDIRECT_REF)
8949 arg = TREE_OPERAND (arg, 0);
8950 if (arg != last_parm)
8951 warning ("second parameter of `va_start' not last named argument");
8953 else if (! current_function_varargs)
8954 /* Evidently an out of date version of <stdarg.h>; can't validate
8955 va_start's second argument, but can still work as intended. */
8956 warning ("`__builtin_next_arg' called without an argument");
8959 return expand_binop (Pmode, add_optab,
8960 current_function_internal_arg_pointer,
8961 current_function_arg_offset_rtx,
8962 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8964 case BUILT_IN_CLASSIFY_TYPE:
8965 if (arglist != 0)
8967 tree type = TREE_TYPE (TREE_VALUE (arglist));
8968 enum tree_code code = TREE_CODE (type);
8969 if (code == VOID_TYPE)
8970 return GEN_INT (void_type_class);
8971 if (code == INTEGER_TYPE)
8972 return GEN_INT (integer_type_class);
8973 if (code == CHAR_TYPE)
8974 return GEN_INT (char_type_class);
8975 if (code == ENUMERAL_TYPE)
8976 return GEN_INT (enumeral_type_class);
8977 if (code == BOOLEAN_TYPE)
8978 return GEN_INT (boolean_type_class);
8979 if (code == POINTER_TYPE)
8980 return GEN_INT (pointer_type_class);
8981 if (code == REFERENCE_TYPE)
8982 return GEN_INT (reference_type_class);
8983 if (code == OFFSET_TYPE)
8984 return GEN_INT (offset_type_class);
8985 if (code == REAL_TYPE)
8986 return GEN_INT (real_type_class);
8987 if (code == COMPLEX_TYPE)
8988 return GEN_INT (complex_type_class);
8989 if (code == FUNCTION_TYPE)
8990 return GEN_INT (function_type_class);
8991 if (code == METHOD_TYPE)
8992 return GEN_INT (method_type_class);
8993 if (code == RECORD_TYPE)
8994 return GEN_INT (record_type_class);
8995 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8996 return GEN_INT (union_type_class);
8997 if (code == ARRAY_TYPE)
8999 if (TYPE_STRING_FLAG (type))
9000 return GEN_INT (string_type_class);
9001 else
9002 return GEN_INT (array_type_class);
9004 if (code == SET_TYPE)
9005 return GEN_INT (set_type_class);
9006 if (code == FILE_TYPE)
9007 return GEN_INT (file_type_class);
9008 if (code == LANG_TYPE)
9009 return GEN_INT (lang_type_class);
9011 return GEN_INT (no_type_class);
9013 case BUILT_IN_CONSTANT_P:
9014 if (arglist == 0)
9015 return const0_rtx;
9016 else
9018 tree arg = TREE_VALUE (arglist);
9019 rtx tmp;
9021 /* We return 1 for a numeric type that's known to be a constant
9022 value at compile-time or for an aggregate type that's a
9023 literal constant. */
9024 STRIP_NOPS (arg);
9026 /* If we know this is a constant, emit the constant of one. */
9027 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9028 || (TREE_CODE (arg) == CONSTRUCTOR
9029 && TREE_CONSTANT (arg))
9030 || (TREE_CODE (arg) == ADDR_EXPR
9031 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9032 return const1_rtx;
9034 /* If we aren't going to be running CSE or this expression
9035 has side effects, show we don't know it to be a constant.
9036 Likewise if it's a pointer or aggregate type since in those
9037 case we only want literals, since those are only optimized
9038 when generating RTL, not later. */
9039 if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9040 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9041 || POINTER_TYPE_P (TREE_TYPE (arg)))
9042 return const0_rtx;
9044 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9045 chance to see if it can deduce whether ARG is constant. */
9047 tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9048 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9049 return tmp;
9052 case BUILT_IN_FRAME_ADDRESS:
9053 /* The argument must be a nonnegative integer constant.
9054 It counts the number of frames to scan up the stack.
9055 The value is the address of that frame. */
9056 case BUILT_IN_RETURN_ADDRESS:
9057 /* The argument must be a nonnegative integer constant.
9058 It counts the number of frames to scan up the stack.
9059 The value is the return address saved in that frame. */
9060 if (arglist == 0)
9061 /* Warning about missing arg was already issued. */
9062 return const0_rtx;
9063 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9064 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9066 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9067 error ("invalid arg to `__builtin_frame_address'");
9068 else
9069 error ("invalid arg to `__builtin_return_address'");
9070 return const0_rtx;
9072 else
9074 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9075 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9076 hard_frame_pointer_rtx);
9078 /* Some ports cannot access arbitrary stack frames. */
9079 if (tem == NULL)
9081 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9082 warning ("unsupported arg to `__builtin_frame_address'");
9083 else
9084 warning ("unsupported arg to `__builtin_return_address'");
9085 return const0_rtx;
9088 /* For __builtin_frame_address, return what we've got. */
9089 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9090 return tem;
9092 if (GET_CODE (tem) != REG)
9093 tem = copy_to_reg (tem);
9094 return tem;
9097 /* Returns the address of the area where the structure is returned.
9098 0 otherwise. */
9099 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9100 if (arglist != 0
9101 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9102 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9103 return const0_rtx;
9104 else
9105 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9107 case BUILT_IN_ALLOCA:
9108 if (arglist == 0
9109 /* Arg could be non-integer if user redeclared this fcn wrong. */
9110 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9111 break;
9113 /* Compute the argument. */
9114 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9116 /* Allocate the desired space. */
9117 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9119 case BUILT_IN_FFS:
9120 /* If not optimizing, call the library function. */
9121 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9122 break;
9124 if (arglist == 0
9125 /* Arg could be non-integer if user redeclared this fcn wrong. */
9126 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9127 break;
9129 /* Compute the argument. */
9130 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9131 /* Compute ffs, into TARGET if possible.
9132 Set TARGET to wherever the result comes back. */
9133 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9134 ffs_optab, op0, target, 1);
9135 if (target == 0)
9136 abort ();
9137 return target;
9139 case BUILT_IN_STRLEN:
9140 /* If not optimizing, call the library function. */
9141 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9142 break;
9144 if (arglist == 0
9145 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9146 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9147 break;
9148 else
9150 tree src = TREE_VALUE (arglist);
9151 tree len = c_strlen (src);
9153 int align
9154 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9156 rtx result, src_rtx, char_rtx;
9157 enum machine_mode insn_mode = value_mode, char_mode;
9158 enum insn_code icode;
9160 /* If the length is known, just return it. */
9161 if (len != 0)
9162 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9164 /* If SRC is not a pointer type, don't do this operation inline. */
9165 if (align == 0)
9166 break;
9168 /* Call a function if we can't compute strlen in the right mode. */
9170 while (insn_mode != VOIDmode)
9172 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9173 if (icode != CODE_FOR_nothing)
9174 break;
9176 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9178 if (insn_mode == VOIDmode)
9179 break;
9181 /* Make a place to write the result of the instruction. */
9182 result = target;
9183 if (! (result != 0
9184 && GET_CODE (result) == REG
9185 && GET_MODE (result) == insn_mode
9186 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9187 result = gen_reg_rtx (insn_mode);
9189 /* Make sure the operands are acceptable to the predicates. */
9191 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9192 result = gen_reg_rtx (insn_mode);
9193 src_rtx = memory_address (BLKmode,
9194 expand_expr (src, NULL_RTX, ptr_mode,
9195 EXPAND_NORMAL));
9197 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9198 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9200 /* Check the string is readable and has an end. */
9201 if (current_function_check_memory_usage)
9202 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9203 src_rtx, ptr_mode,
9204 GEN_INT (MEMORY_USE_RO),
9205 TYPE_MODE (integer_type_node));
9207 char_rtx = const0_rtx;
9208 char_mode = insn_operand_mode[(int)icode][2];
9209 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9210 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9212 emit_insn (GEN_FCN (icode) (result,
9213 gen_rtx_MEM (BLKmode, src_rtx),
9214 char_rtx, GEN_INT (align)));
9216 /* Return the value in the proper mode for this function. */
9217 if (GET_MODE (result) == value_mode)
9218 return result;
9219 else if (target != 0)
9221 convert_move (target, result, 0);
9222 return target;
9224 else
9225 return convert_to_mode (value_mode, result, 0);
9228 case BUILT_IN_STRCPY:
9229 /* If not optimizing, call the library function. */
9230 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9231 break;
9233 if (arglist == 0
9234 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9235 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9236 || TREE_CHAIN (arglist) == 0
9237 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9238 break;
9239 else
9241 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9243 if (len == 0)
9244 break;
9246 len = size_binop (PLUS_EXPR, len, integer_one_node);
9248 chainon (arglist, build_tree_list (NULL_TREE, len));
9251 /* Drops in. */
9252 case BUILT_IN_MEMCPY:
9253 /* If not optimizing, call the library function. */
9254 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9255 break;
9257 if (arglist == 0
9258 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9259 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9260 || TREE_CHAIN (arglist) == 0
9261 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9262 != POINTER_TYPE)
9263 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9264 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9265 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9266 != INTEGER_TYPE))
9267 break;
9268 else
9270 tree dest = TREE_VALUE (arglist);
9271 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9272 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9274 int src_align
9275 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9276 int dest_align
9277 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9278 rtx dest_mem, src_mem, dest_addr, len_rtx;
9280 /* If either SRC or DEST is not a pointer type, don't do
9281 this operation in-line. */
9282 if (src_align == 0 || dest_align == 0)
9284 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9285 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9286 break;
9289 dest_mem = get_memory_rtx (dest);
9290 src_mem = get_memory_rtx (src);
9291 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9293 /* Just copy the rights of SRC to the rights of DEST. */
9294 if (current_function_check_memory_usage)
9295 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9296 XEXP (dest_mem, 0), ptr_mode,
9297 XEXP (src_mem, 0), ptr_mode,
9298 len_rtx, TYPE_MODE (sizetype));
9300 /* Copy word part most expediently. */
9301 dest_addr
9302 = emit_block_move (dest_mem, src_mem, len_rtx,
9303 MIN (src_align, dest_align));
9305 if (dest_addr == 0)
9306 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9308 return dest_addr;
9311 case BUILT_IN_MEMSET:
9312 /* If not optimizing, call the library function. */
9313 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9314 break;
9316 if (arglist == 0
9317 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9318 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9319 || TREE_CHAIN (arglist) == 0
9320 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9321 != INTEGER_TYPE)
9322 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9323 || (INTEGER_TYPE
9324 != (TREE_CODE (TREE_TYPE
9325 (TREE_VALUE
9326 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9327 break;
9328 else
9330 tree dest = TREE_VALUE (arglist);
9331 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9332 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9334 int dest_align
9335 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9336 rtx dest_mem, dest_addr, len_rtx;
9338 /* If DEST is not a pointer type, don't do this
9339 operation in-line. */
9340 if (dest_align == 0)
9341 break;
9343 /* If the arguments have side-effects, then we can only evaluate
9344 them at most once. The following code evaluates them twice if
9345 they are not constants because we break out to expand_call
9346 in that case. They can't be constants if they have side-effects
9347 so we can check for that first. Alternatively, we could call
9348 save_expr to make multiple evaluation safe. */
9349 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9350 break;
9352 /* If VAL is not 0, don't do this operation in-line. */
9353 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9354 break;
9356 /* If LEN does not expand to a constant, don't do this
9357 operation in-line. */
9358 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9359 if (GET_CODE (len_rtx) != CONST_INT)
9360 break;
9362 dest_mem = get_memory_rtx (dest);
9364 /* Just check DST is writable and mark it as readable. */
9365 if (current_function_check_memory_usage)
9366 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9367 XEXP (dest_mem, 0), ptr_mode,
9368 len_rtx, TYPE_MODE (sizetype),
9369 GEN_INT (MEMORY_USE_WO),
9370 TYPE_MODE (integer_type_node));
9373 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9375 if (dest_addr == 0)
9376 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9378 return dest_addr;
9381 /* These comparison functions need an instruction that returns an actual
9382 index. An ordinary compare that just sets the condition codes
9383 is not enough. */
9384 #ifdef HAVE_cmpstrsi
9385 case BUILT_IN_STRCMP:
9386 /* If not optimizing, call the library function. */
9387 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9388 break;
9390 /* If we need to check memory accesses, call the library function. */
9391 if (current_function_check_memory_usage)
9392 break;
9394 if (arglist == 0
9395 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9396 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9397 || TREE_CHAIN (arglist) == 0
9398 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9399 break;
9400 else if (!HAVE_cmpstrsi)
9401 break;
9403 tree arg1 = TREE_VALUE (arglist);
9404 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9405 tree len, len2;
9407 len = c_strlen (arg1);
9408 if (len)
9409 len = size_binop (PLUS_EXPR, integer_one_node, len);
9410 len2 = c_strlen (arg2);
9411 if (len2)
9412 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9414 /* If we don't have a constant length for the first, use the length
9415 of the second, if we know it. We don't require a constant for
9416 this case; some cost analysis could be done if both are available
9417 but neither is constant. For now, assume they're equally cheap.
9419 If both strings have constant lengths, use the smaller. This
9420 could arise if optimization results in strcpy being called with
9421 two fixed strings, or if the code was machine-generated. We should
9422 add some code to the `memcmp' handler below to deal with such
9423 situations, someday. */
9424 if (!len || TREE_CODE (len) != INTEGER_CST)
9426 if (len2)
9427 len = len2;
9428 else if (len == 0)
9429 break;
9431 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9433 if (tree_int_cst_lt (len2, len))
9434 len = len2;
9437 chainon (arglist, build_tree_list (NULL_TREE, len));
9440 /* Drops in. */
9441 case BUILT_IN_MEMCMP:
9442 /* If not optimizing, call the library function. */
9443 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9444 break;
9446 /* If we need to check memory accesses, call the library function. */
9447 if (current_function_check_memory_usage)
9448 break;
9450 if (arglist == 0
9451 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9452 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9453 || TREE_CHAIN (arglist) == 0
9454 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9455 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9456 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9457 break;
9458 else if (!HAVE_cmpstrsi)
9459 break;
9461 tree arg1 = TREE_VALUE (arglist);
9462 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9463 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9464 rtx result;
9466 int arg1_align
9467 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9468 int arg2_align
9469 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9470 enum machine_mode insn_mode
9471 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9473 /* If we don't have POINTER_TYPE, call the function. */
9474 if (arg1_align == 0 || arg2_align == 0)
9476 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9477 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9478 break;
9481 /* Make a place to write the result of the instruction. */
9482 result = target;
9483 if (! (result != 0
9484 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9485 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9486 result = gen_reg_rtx (insn_mode);
9488 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9489 get_memory_rtx (arg2),
9490 expand_expr (len, NULL_RTX, VOIDmode, 0),
9491 GEN_INT (MIN (arg1_align, arg2_align))));
9493 /* Return the value in the proper mode for this function. */
9494 mode = TYPE_MODE (TREE_TYPE (exp));
9495 if (GET_MODE (result) == mode)
9496 return result;
9497 else if (target != 0)
9499 convert_move (target, result, 0);
9500 return target;
9502 else
9503 return convert_to_mode (mode, result, 0);
9505 #else
9506 case BUILT_IN_STRCMP:
9507 case BUILT_IN_MEMCMP:
9508 break;
9509 #endif
9511 case BUILT_IN_SETJMP:
9512 if (arglist == 0
9513 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9514 break;
9515 else
9517 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9518 VOIDmode, 0);
9519 rtx lab = gen_label_rtx ();
9520 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9521 emit_label (lab);
9522 return ret;
9525 /* __builtin_longjmp is passed a pointer to an array of five words.
9526 It's similar to the C library longjmp function but works with
9527 __builtin_setjmp above. */
9528 case BUILT_IN_LONGJMP:
9529 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9530 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9531 break;
9532 else
9534 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9535 VOIDmode, 0);
9536 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9537 NULL_RTX, VOIDmode, 0);
9539 if (value != const1_rtx)
9541 error ("__builtin_longjmp second argument must be 1");
9542 return const0_rtx;
9545 expand_builtin_longjmp (buf_addr, value);
9546 return const0_rtx;
9549 case BUILT_IN_TRAP:
9550 #ifdef HAVE_trap
9551 if (HAVE_trap)
9552 emit_insn (gen_trap ());
9553 else
9554 #endif
9555 error ("__builtin_trap not supported by this target");
9556 emit_barrier ();
9557 return const0_rtx;
9559 /* Various hooks for the DWARF 2 __throw routine. */
9560 case BUILT_IN_UNWIND_INIT:
9561 expand_builtin_unwind_init ();
9562 return const0_rtx;
9563 case BUILT_IN_DWARF_CFA:
9564 return virtual_cfa_rtx;
9565 #ifdef DWARF2_UNWIND_INFO
9566 case BUILT_IN_DWARF_FP_REGNUM:
9567 return expand_builtin_dwarf_fp_regnum ();
9568 case BUILT_IN_DWARF_REG_SIZE:
9569 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9570 #endif
9571 case BUILT_IN_FROB_RETURN_ADDR:
9572 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9573 case BUILT_IN_EXTRACT_RETURN_ADDR:
9574 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9575 case BUILT_IN_EH_RETURN:
9576 expand_builtin_eh_return (TREE_VALUE (arglist),
9577 TREE_VALUE (TREE_CHAIN (arglist)),
9578 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9579 return const0_rtx;
9581 default: /* just do library call, if unknown builtin */
9582 error ("built-in function `%s' not currently supported",
9583 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9586 /* The switch statement above can drop through to cause the function
9587 to be called normally. */
9589 return expand_call (exp, target, ignore);
9592 /* Built-in functions to perform an untyped call and return. */
9594 /* For each register that may be used for calling a function, this
9595 gives a mode used to copy the register's value. VOIDmode indicates
9596 the register is not used for calling a function. If the machine
9597 has register windows, this gives only the outbound registers.
9598 INCOMING_REGNO gives the corresponding inbound register. */
9599 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9601 /* For each register that may be used for returning values, this gives
9602 a mode used to copy the register's value. VOIDmode indicates the
9603 register is not used for returning values. If the machine has
9604 register windows, this gives only the outbound registers.
9605 INCOMING_REGNO gives the corresponding inbound register. */
9606 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9608 /* For each register that may be used for calling a function, this
9609 gives the offset of that register into the block returned by
9610 __builtin_apply_args. 0 indicates that the register is not
9611 used for calling a function. */
9612 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9614 /* Return the offset of register REGNO into the block returned by
9615 __builtin_apply_args. This is not declared static, since it is
9616 needed in objc-act.c. */
9618 int
9619 apply_args_register_offset (regno)
9620 int regno;
9622 apply_args_size ();
9624 /* Arguments are always put in outgoing registers (in the argument
9625 block) if such make sense. */
9626 #ifdef OUTGOING_REGNO
9627 regno = OUTGOING_REGNO(regno);
9628 #endif
9629 return apply_args_reg_offset[regno];
9632 /* Return the size required for the block returned by __builtin_apply_args,
9633 and initialize apply_args_mode. */
9635 static int
9636 apply_args_size ()
9638 static int size = -1;
9639 int align, regno;
9640 enum machine_mode mode;
9642 /* The values computed by this function never change. */
9643 if (size < 0)
9645 /* The first value is the incoming arg-pointer. */
9646 size = GET_MODE_SIZE (Pmode);
9648 /* The second value is the structure value address unless this is
9649 passed as an "invisible" first argument. */
9650 if (struct_value_rtx)
9651 size += GET_MODE_SIZE (Pmode);
9653 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9654 if (FUNCTION_ARG_REGNO_P (regno))
9656 /* Search for the proper mode for copying this register's
9657 value. I'm not sure this is right, but it works so far. */
9658 enum machine_mode best_mode = VOIDmode;
9660 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9661 mode != VOIDmode;
9662 mode = GET_MODE_WIDER_MODE (mode))
9663 if (HARD_REGNO_MODE_OK (regno, mode)
9664 && HARD_REGNO_NREGS (regno, mode) == 1)
9665 best_mode = mode;
9667 if (best_mode == VOIDmode)
9668 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9669 mode != VOIDmode;
9670 mode = GET_MODE_WIDER_MODE (mode))
9671 if (HARD_REGNO_MODE_OK (regno, mode)
9672 && (mov_optab->handlers[(int) mode].insn_code
9673 != CODE_FOR_nothing))
9674 best_mode = mode;
9676 mode = best_mode;
9677 if (mode == VOIDmode)
9678 abort ();
9680 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9681 if (size % align != 0)
9682 size = CEIL (size, align) * align;
9683 apply_args_reg_offset[regno] = size;
9684 size += GET_MODE_SIZE (mode);
9685 apply_args_mode[regno] = mode;
9687 else
9689 apply_args_mode[regno] = VOIDmode;
9690 apply_args_reg_offset[regno] = 0;
9693 return size;
9696 /* Return the size required for the block returned by __builtin_apply,
9697 and initialize apply_result_mode. */
9699 static int
9700 apply_result_size ()
9702 static int size = -1;
9703 int align, regno;
9704 enum machine_mode mode;
9706 /* The values computed by this function never change. */
9707 if (size < 0)
9709 size = 0;
9711 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9712 if (FUNCTION_VALUE_REGNO_P (regno))
9714 /* Search for the proper mode for copying this register's
9715 value. I'm not sure this is right, but it works so far. */
9716 enum machine_mode best_mode = VOIDmode;
9718 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9719 mode != TImode;
9720 mode = GET_MODE_WIDER_MODE (mode))
9721 if (HARD_REGNO_MODE_OK (regno, mode))
9722 best_mode = mode;
9724 if (best_mode == VOIDmode)
9725 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9726 mode != VOIDmode;
9727 mode = GET_MODE_WIDER_MODE (mode))
9728 if (HARD_REGNO_MODE_OK (regno, mode)
9729 && (mov_optab->handlers[(int) mode].insn_code
9730 != CODE_FOR_nothing))
9731 best_mode = mode;
9733 mode = best_mode;
9734 if (mode == VOIDmode)
9735 abort ();
9737 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9738 if (size % align != 0)
9739 size = CEIL (size, align) * align;
9740 size += GET_MODE_SIZE (mode);
9741 apply_result_mode[regno] = mode;
9743 else
9744 apply_result_mode[regno] = VOIDmode;
9746 /* Allow targets that use untyped_call and untyped_return to override
9747 the size so that machine-specific information can be stored here. */
9748 #ifdef APPLY_RESULT_SIZE
9749 size = APPLY_RESULT_SIZE;
9750 #endif
9752 return size;
9755 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9756 /* Create a vector describing the result block RESULT. If SAVEP is true,
9757 the result block is used to save the values; otherwise it is used to
9758 restore the values. */
9760 static rtx
9761 result_vector (savep, result)
9762 int savep;
9763 rtx result;
9765 int regno, size, align, nelts;
9766 enum machine_mode mode;
9767 rtx reg, mem;
9768 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9770 size = nelts = 0;
9771 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9772 if ((mode = apply_result_mode[regno]) != VOIDmode)
9774 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9775 if (size % align != 0)
9776 size = CEIL (size, align) * align;
9777 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9778 mem = change_address (result, mode,
9779 plus_constant (XEXP (result, 0), size));
9780 savevec[nelts++] = (savep
9781 ? gen_rtx_SET (VOIDmode, mem, reg)
9782 : gen_rtx_SET (VOIDmode, reg, mem));
9783 size += GET_MODE_SIZE (mode);
9785 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9787 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9789 /* Save the state required to perform an untyped call with the same
9790 arguments as were passed to the current function. */
9792 static rtx
9793 expand_builtin_apply_args ()
9795 rtx registers;
9796 int size, align, regno;
9797 enum machine_mode mode;
9799 /* Create a block where the arg-pointer, structure value address,
9800 and argument registers can be saved. */
9801 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9803 /* Walk past the arg-pointer and structure value address. */
9804 size = GET_MODE_SIZE (Pmode);
9805 if (struct_value_rtx)
9806 size += GET_MODE_SIZE (Pmode);
9808 /* Save each register used in calling a function to the block. */
9809 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9810 if ((mode = apply_args_mode[regno]) != VOIDmode)
9812 rtx tem;
9814 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9815 if (size % align != 0)
9816 size = CEIL (size, align) * align;
9818 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9820 #ifdef STACK_REGS
9821 /* For reg-stack.c's stack register household.
9822 Compare with a similar piece of code in function.c. */
9824 emit_insn (gen_rtx_USE (mode, tem));
9825 #endif
9827 emit_move_insn (change_address (registers, mode,
9828 plus_constant (XEXP (registers, 0),
9829 size)),
9830 tem);
9831 size += GET_MODE_SIZE (mode);
9834 /* Save the arg pointer to the block. */
9835 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9836 copy_to_reg (virtual_incoming_args_rtx));
9837 size = GET_MODE_SIZE (Pmode);
9839 /* Save the structure value address unless this is passed as an
9840 "invisible" first argument. */
9841 if (struct_value_incoming_rtx)
9843 emit_move_insn (change_address (registers, Pmode,
9844 plus_constant (XEXP (registers, 0),
9845 size)),
9846 copy_to_reg (struct_value_incoming_rtx));
9847 size += GET_MODE_SIZE (Pmode);
9850 /* Return the address of the block. */
9851 return copy_addr_to_reg (XEXP (registers, 0));
9854 /* Perform an untyped call and save the state required to perform an
9855 untyped return of whatever value was returned by the given function. */
9857 static rtx
9858 expand_builtin_apply (function, arguments, argsize)
9859 rtx function, arguments, argsize;
9861 int size, align, regno;
9862 enum machine_mode mode;
9863 rtx incoming_args, result, reg, dest, call_insn;
9864 rtx old_stack_level = 0;
9865 rtx call_fusage = 0;
9867 /* Create a block where the return registers can be saved. */
9868 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9870 /* ??? The argsize value should be adjusted here. */
9872 /* Fetch the arg pointer from the ARGUMENTS block. */
9873 incoming_args = gen_reg_rtx (Pmode);
9874 emit_move_insn (incoming_args,
9875 gen_rtx_MEM (Pmode, arguments));
9876 #ifndef STACK_GROWS_DOWNWARD
9877 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9878 incoming_args, 0, OPTAB_LIB_WIDEN);
9879 #endif
9881 /* Perform postincrements before actually calling the function. */
9882 emit_queue ();
9884 /* Push a new argument block and copy the arguments. */
9885 do_pending_stack_adjust ();
9887 /* Save the stack with nonlocal if available */
9888 #ifdef HAVE_save_stack_nonlocal
9889 if (HAVE_save_stack_nonlocal)
9890 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9891 else
9892 #endif
9893 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9895 /* Push a block of memory onto the stack to store the memory arguments.
9896 Save the address in a register, and copy the memory arguments. ??? I
9897 haven't figured out how the calling convention macros effect this,
9898 but it's likely that the source and/or destination addresses in
9899 the block copy will need updating in machine specific ways. */
9900 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9901 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9902 gen_rtx_MEM (BLKmode, incoming_args),
9903 argsize,
9904 PARM_BOUNDARY / BITS_PER_UNIT);
9906 /* Refer to the argument block. */
9907 apply_args_size ();
9908 arguments = gen_rtx_MEM (BLKmode, arguments);
9910 /* Walk past the arg-pointer and structure value address. */
9911 size = GET_MODE_SIZE (Pmode);
9912 if (struct_value_rtx)
9913 size += GET_MODE_SIZE (Pmode);
9915 /* Restore each of the registers previously saved. Make USE insns
9916 for each of these registers for use in making the call. */
9917 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9918 if ((mode = apply_args_mode[regno]) != VOIDmode)
9920 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9921 if (size % align != 0)
9922 size = CEIL (size, align) * align;
9923 reg = gen_rtx_REG (mode, regno);
9924 emit_move_insn (reg,
9925 change_address (arguments, mode,
9926 plus_constant (XEXP (arguments, 0),
9927 size)));
9929 use_reg (&call_fusage, reg);
9930 size += GET_MODE_SIZE (mode);
9933 /* Restore the structure value address unless this is passed as an
9934 "invisible" first argument. */
9935 size = GET_MODE_SIZE (Pmode);
9936 if (struct_value_rtx)
9938 rtx value = gen_reg_rtx (Pmode);
9939 emit_move_insn (value,
9940 change_address (arguments, Pmode,
9941 plus_constant (XEXP (arguments, 0),
9942 size)));
9943 emit_move_insn (struct_value_rtx, value);
9944 if (GET_CODE (struct_value_rtx) == REG)
9945 use_reg (&call_fusage, struct_value_rtx);
9946 size += GET_MODE_SIZE (Pmode);
9949 /* All arguments and registers used for the call are set up by now! */
9950 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9952 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9953 and we don't want to load it into a register as an optimization,
9954 because prepare_call_address already did it if it should be done. */
9955 if (GET_CODE (function) != SYMBOL_REF)
9956 function = memory_address (FUNCTION_MODE, function);
9958 /* Generate the actual call instruction and save the return value. */
9959 #ifdef HAVE_untyped_call
9960 if (HAVE_untyped_call)
9961 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9962 result, result_vector (1, result)));
9963 else
9964 #endif
9965 #ifdef HAVE_call_value
9966 if (HAVE_call_value)
9968 rtx valreg = 0;
9970 /* Locate the unique return register. It is not possible to
9971 express a call that sets more than one return register using
9972 call_value; use untyped_call for that. In fact, untyped_call
9973 only needs to save the return registers in the given block. */
9974 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9975 if ((mode = apply_result_mode[regno]) != VOIDmode)
9977 if (valreg)
9978 abort (); /* HAVE_untyped_call required. */
9979 valreg = gen_rtx_REG (mode, regno);
9982 emit_call_insn (gen_call_value (valreg,
9983 gen_rtx_MEM (FUNCTION_MODE, function),
9984 const0_rtx, NULL_RTX, const0_rtx));
9986 emit_move_insn (change_address (result, GET_MODE (valreg),
9987 XEXP (result, 0)),
9988 valreg);
9990 else
9991 #endif
9992 abort ();
9994 /* Find the CALL insn we just emitted. */
9995 for (call_insn = get_last_insn ();
9996 call_insn && GET_CODE (call_insn) != CALL_INSN;
9997 call_insn = PREV_INSN (call_insn))
10000 if (! call_insn)
10001 abort ();
10003 /* Put the register usage information on the CALL. If there is already
10004 some usage information, put ours at the end. */
10005 if (CALL_INSN_FUNCTION_USAGE (call_insn))
10007 rtx link;
10009 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10010 link = XEXP (link, 1))
10013 XEXP (link, 1) = call_fusage;
10015 else
10016 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10018 /* Restore the stack. */
10019 #ifdef HAVE_save_stack_nonlocal
10020 if (HAVE_save_stack_nonlocal)
10021 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10022 else
10023 #endif
10024 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10026 /* Return the address of the result block. */
10027 return copy_addr_to_reg (XEXP (result, 0));
10030 /* Perform an untyped return. */
10032 static void
10033 expand_builtin_return (result)
10034 rtx result;
10036 int size, align, regno;
10037 enum machine_mode mode;
10038 rtx reg;
10039 rtx call_fusage = 0;
10041 apply_result_size ();
10042 result = gen_rtx_MEM (BLKmode, result);
10044 #ifdef HAVE_untyped_return
10045 if (HAVE_untyped_return)
10047 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10048 emit_barrier ();
10049 return;
10051 #endif
10053 /* Restore the return value and note that each value is used. */
10054 size = 0;
10055 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10056 if ((mode = apply_result_mode[regno]) != VOIDmode)
10058 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10059 if (size % align != 0)
10060 size = CEIL (size, align) * align;
10061 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10062 emit_move_insn (reg,
10063 change_address (result, mode,
10064 plus_constant (XEXP (result, 0),
10065 size)));
10067 push_to_sequence (call_fusage);
10068 emit_insn (gen_rtx_USE (VOIDmode, reg));
10069 call_fusage = get_insns ();
10070 end_sequence ();
10071 size += GET_MODE_SIZE (mode);
10074 /* Put the USE insns before the return. */
10075 emit_insns (call_fusage);
10077 /* Return whatever values was restored by jumping directly to the end
10078 of the function. */
10079 expand_null_return ();
10082 /* Expand code for a post- or pre- increment or decrement
10083 and return the RTX for the result.
10084 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10086 static rtx
10087 expand_increment (exp, post, ignore)
10088 register tree exp;
10089 int post, ignore;
10091 register rtx op0, op1;
10092 register rtx temp, value;
10093 register tree incremented = TREE_OPERAND (exp, 0);
10094 optab this_optab = add_optab;
10095 int icode;
10096 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10097 int op0_is_copy = 0;
10098 int single_insn = 0;
10099 /* 1 means we can't store into OP0 directly,
10100 because it is a subreg narrower than a word,
10101 and we don't dare clobber the rest of the word. */
10102 int bad_subreg = 0;
10104 /* Stabilize any component ref that might need to be
10105 evaluated more than once below. */
10106 if (!post
10107 || TREE_CODE (incremented) == BIT_FIELD_REF
10108 || (TREE_CODE (incremented) == COMPONENT_REF
10109 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10110 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10111 incremented = stabilize_reference (incremented);
10112 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10113 ones into save exprs so that they don't accidentally get evaluated
10114 more than once by the code below. */
10115 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10116 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10117 incremented = save_expr (incremented);
10119 /* Compute the operands as RTX.
10120 Note whether OP0 is the actual lvalue or a copy of it:
10121 I believe it is a copy iff it is a register or subreg
10122 and insns were generated in computing it. */
10124 temp = get_last_insn ();
10125 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10127 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10128 in place but instead must do sign- or zero-extension during assignment,
10129 so we copy it into a new register and let the code below use it as
10130 a copy.
10132 Note that we can safely modify this SUBREG since it is know not to be
10133 shared (it was made by the expand_expr call above). */
10135 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10137 if (post)
10138 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10139 else
10140 bad_subreg = 1;
10142 else if (GET_CODE (op0) == SUBREG
10143 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10145 /* We cannot increment this SUBREG in place. If we are
10146 post-incrementing, get a copy of the old value. Otherwise,
10147 just mark that we cannot increment in place. */
10148 if (post)
10149 op0 = copy_to_reg (op0);
10150 else
10151 bad_subreg = 1;
10154 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10155 && temp != get_last_insn ());
10156 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10157 EXPAND_MEMORY_USE_BAD);
10159 /* Decide whether incrementing or decrementing. */
10160 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10161 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10162 this_optab = sub_optab;
10164 /* Convert decrement by a constant into a negative increment. */
10165 if (this_optab == sub_optab
10166 && GET_CODE (op1) == CONST_INT)
10168 op1 = GEN_INT (- INTVAL (op1));
10169 this_optab = add_optab;
10172 /* For a preincrement, see if we can do this with a single instruction. */
10173 if (!post)
10175 icode = (int) this_optab->handlers[(int) mode].insn_code;
10176 if (icode != (int) CODE_FOR_nothing
10177 /* Make sure that OP0 is valid for operands 0 and 1
10178 of the insn we want to queue. */
10179 && (*insn_operand_predicate[icode][0]) (op0, mode)
10180 && (*insn_operand_predicate[icode][1]) (op0, mode)
10181 && (*insn_operand_predicate[icode][2]) (op1, mode))
10182 single_insn = 1;
10185 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10186 then we cannot just increment OP0. We must therefore contrive to
10187 increment the original value. Then, for postincrement, we can return
10188 OP0 since it is a copy of the old value. For preincrement, expand here
10189 unless we can do it with a single insn.
10191 Likewise if storing directly into OP0 would clobber high bits
10192 we need to preserve (bad_subreg). */
10193 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10195 /* This is the easiest way to increment the value wherever it is.
10196 Problems with multiple evaluation of INCREMENTED are prevented
10197 because either (1) it is a component_ref or preincrement,
10198 in which case it was stabilized above, or (2) it is an array_ref
10199 with constant index in an array in a register, which is
10200 safe to reevaluate. */
10201 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10202 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10203 ? MINUS_EXPR : PLUS_EXPR),
10204 TREE_TYPE (exp),
10205 incremented,
10206 TREE_OPERAND (exp, 1));
10208 while (TREE_CODE (incremented) == NOP_EXPR
10209 || TREE_CODE (incremented) == CONVERT_EXPR)
10211 newexp = convert (TREE_TYPE (incremented), newexp);
10212 incremented = TREE_OPERAND (incremented, 0);
10215 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10216 return post ? op0 : temp;
10219 if (post)
10221 /* We have a true reference to the value in OP0.
10222 If there is an insn to add or subtract in this mode, queue it.
10223 Queueing the increment insn avoids the register shuffling
10224 that often results if we must increment now and first save
10225 the old value for subsequent use. */
10227 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10228 op0 = stabilize (op0);
10229 #endif
10231 icode = (int) this_optab->handlers[(int) mode].insn_code;
10232 if (icode != (int) CODE_FOR_nothing
10233 /* Make sure that OP0 is valid for operands 0 and 1
10234 of the insn we want to queue. */
10235 && (*insn_operand_predicate[icode][0]) (op0, mode)
10236 && (*insn_operand_predicate[icode][1]) (op0, mode))
10238 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10239 op1 = force_reg (mode, op1);
10241 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10243 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10245 rtx addr = (general_operand (XEXP (op0, 0), mode)
10246 ? force_reg (Pmode, XEXP (op0, 0))
10247 : copy_to_reg (XEXP (op0, 0)));
10248 rtx temp, result;
10250 op0 = change_address (op0, VOIDmode, addr);
10251 temp = force_reg (GET_MODE (op0), op0);
10252 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10253 op1 = force_reg (mode, op1);
10255 /* The increment queue is LIFO, thus we have to `queue'
10256 the instructions in reverse order. */
10257 enqueue_insn (op0, gen_move_insn (op0, temp));
10258 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10259 return result;
10263 /* Preincrement, or we can't increment with one simple insn. */
10264 if (post)
10265 /* Save a copy of the value before inc or dec, to return it later. */
10266 temp = value = copy_to_reg (op0);
10267 else
10268 /* Arrange to return the incremented value. */
10269 /* Copy the rtx because expand_binop will protect from the queue,
10270 and the results of that would be invalid for us to return
10271 if our caller does emit_queue before using our result. */
10272 temp = copy_rtx (value = op0);
10274 /* Increment however we can. */
10275 op1 = expand_binop (mode, this_optab, value, op1,
10276 current_function_check_memory_usage ? NULL_RTX : op0,
10277 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10278 /* Make sure the value is stored into OP0. */
10279 if (op1 != op0)
10280 emit_move_insn (op0, op1);
10282 return temp;
10285 /* Expand all function calls contained within EXP, innermost ones first.
10286 But don't look within expressions that have sequence points.
10287 For each CALL_EXPR, record the rtx for its value
10288 in the CALL_EXPR_RTL field. */
10290 static void
10291 preexpand_calls (exp)
10292 tree exp;
10294 register int nops, i;
10295 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10297 if (! do_preexpand_calls)
10298 return;
10300 /* Only expressions and references can contain calls. */
10302 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10303 return;
10305 switch (TREE_CODE (exp))
10307 case CALL_EXPR:
10308 /* Do nothing if already expanded. */
10309 if (CALL_EXPR_RTL (exp) != 0
10310 /* Do nothing if the call returns a variable-sized object. */
10311 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10312 /* Do nothing to built-in functions. */
10313 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10314 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10315 == FUNCTION_DECL)
10316 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10317 return;
10319 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10320 return;
10322 case COMPOUND_EXPR:
10323 case COND_EXPR:
10324 case TRUTH_ANDIF_EXPR:
10325 case TRUTH_ORIF_EXPR:
10326 /* If we find one of these, then we can be sure
10327 the adjust will be done for it (since it makes jumps).
10328 Do it now, so that if this is inside an argument
10329 of a function, we don't get the stack adjustment
10330 after some other args have already been pushed. */
10331 do_pending_stack_adjust ();
10332 return;
10334 case BLOCK:
10335 case RTL_EXPR:
10336 case WITH_CLEANUP_EXPR:
10337 case CLEANUP_POINT_EXPR:
10338 case TRY_CATCH_EXPR:
10339 return;
10341 case SAVE_EXPR:
10342 if (SAVE_EXPR_RTL (exp) != 0)
10343 return;
10345 default:
10346 break;
10349 nops = tree_code_length[(int) TREE_CODE (exp)];
10350 for (i = 0; i < nops; i++)
10351 if (TREE_OPERAND (exp, i) != 0)
10353 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10354 if (type == 'e' || type == '<' || type == '1' || type == '2'
10355 || type == 'r')
10356 preexpand_calls (TREE_OPERAND (exp, i));
10360 /* At the start of a function, record that we have no previously-pushed
10361 arguments waiting to be popped. */
10363 void
10364 init_pending_stack_adjust ()
10366 pending_stack_adjust = 0;
10369 /* When exiting from function, if safe, clear out any pending stack adjust
10370 so the adjustment won't get done.
10372 Note, if the current function calls alloca, then it must have a
10373 frame pointer regardless of the value of flag_omit_frame_pointer. */
10375 void
10376 clear_pending_stack_adjust ()
10378 #ifdef EXIT_IGNORE_STACK
10379 if (optimize > 0
10380 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10381 && EXIT_IGNORE_STACK
10382 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10383 && ! flag_inline_functions)
10384 pending_stack_adjust = 0;
10385 #endif
10388 /* Pop any previously-pushed arguments that have not been popped yet. */
10390 void
10391 do_pending_stack_adjust ()
10393 if (inhibit_defer_pop == 0)
10395 if (pending_stack_adjust != 0)
10396 adjust_stack (GEN_INT (pending_stack_adjust));
10397 pending_stack_adjust = 0;
10401 /* Expand conditional expressions. */
10403 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10404 LABEL is an rtx of code CODE_LABEL, in this function and all the
10405 functions here. */
10407 void
10408 jumpifnot (exp, label)
10409 tree exp;
10410 rtx label;
10412 do_jump (exp, label, NULL_RTX);
10415 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10417 void
10418 jumpif (exp, label)
10419 tree exp;
10420 rtx label;
10422 do_jump (exp, NULL_RTX, label);
10425 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10426 the result is zero, or IF_TRUE_LABEL if the result is one.
10427 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10428 meaning fall through in that case.
10430 do_jump always does any pending stack adjust except when it does not
10431 actually perform a jump. An example where there is no jump
10432 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10434 This function is responsible for optimizing cases such as
10435 &&, || and comparison operators in EXP. */
10437 void
10438 do_jump (exp, if_false_label, if_true_label)
10439 tree exp;
10440 rtx if_false_label, if_true_label;
10442 register enum tree_code code = TREE_CODE (exp);
10443 /* Some cases need to create a label to jump to
10444 in order to properly fall through.
10445 These cases set DROP_THROUGH_LABEL nonzero. */
10446 rtx drop_through_label = 0;
10447 rtx temp;
10448 rtx comparison = 0;
10449 int i;
10450 tree type;
10451 enum machine_mode mode;
10453 #ifdef MAX_INTEGER_COMPUTATION_MODE
10454 check_max_integer_computation_mode (exp);
10455 #endif
10457 emit_queue ();
10459 switch (code)
10461 case ERROR_MARK:
10462 break;
10464 case INTEGER_CST:
10465 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10466 if (temp)
10467 emit_jump (temp);
10468 break;
10470 #if 0
10471 /* This is not true with #pragma weak */
10472 case ADDR_EXPR:
10473 /* The address of something can never be zero. */
10474 if (if_true_label)
10475 emit_jump (if_true_label);
10476 break;
10477 #endif
10479 case NOP_EXPR:
10480 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10481 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10482 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10483 goto normal;
10484 case CONVERT_EXPR:
10485 /* If we are narrowing the operand, we have to do the compare in the
10486 narrower mode. */
10487 if ((TYPE_PRECISION (TREE_TYPE (exp))
10488 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10489 goto normal;
10490 case NON_LVALUE_EXPR:
10491 case REFERENCE_EXPR:
10492 case ABS_EXPR:
10493 case NEGATE_EXPR:
10494 case LROTATE_EXPR:
10495 case RROTATE_EXPR:
10496 /* These cannot change zero->non-zero or vice versa. */
10497 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10498 break;
10500 #if 0
10501 /* This is never less insns than evaluating the PLUS_EXPR followed by
10502 a test and can be longer if the test is eliminated. */
10503 case PLUS_EXPR:
10504 /* Reduce to minus. */
10505 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10506 TREE_OPERAND (exp, 0),
10507 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10508 TREE_OPERAND (exp, 1))));
10509 /* Process as MINUS. */
10510 #endif
10512 case MINUS_EXPR:
10513 /* Non-zero iff operands of minus differ. */
10514 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10515 TREE_OPERAND (exp, 0),
10516 TREE_OPERAND (exp, 1)),
10517 NE, NE);
10518 break;
10520 case BIT_AND_EXPR:
10521 /* If we are AND'ing with a small constant, do this comparison in the
10522 smallest type that fits. If the machine doesn't have comparisons
10523 that small, it will be converted back to the wider comparison.
10524 This helps if we are testing the sign bit of a narrower object.
10525 combine can't do this for us because it can't know whether a
10526 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10528 if (! SLOW_BYTE_ACCESS
10529 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10530 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10531 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10532 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10533 && (type = type_for_mode (mode, 1)) != 0
10534 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10535 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10536 != CODE_FOR_nothing))
10538 do_jump (convert (type, exp), if_false_label, if_true_label);
10539 break;
10541 goto normal;
10543 case TRUTH_NOT_EXPR:
10544 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10545 break;
10547 case TRUTH_ANDIF_EXPR:
10548 if (if_false_label == 0)
10549 if_false_label = drop_through_label = gen_label_rtx ();
10550 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10551 start_cleanup_deferral ();
10552 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10553 end_cleanup_deferral ();
10554 break;
10556 case TRUTH_ORIF_EXPR:
10557 if (if_true_label == 0)
10558 if_true_label = drop_through_label = gen_label_rtx ();
10559 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10560 start_cleanup_deferral ();
10561 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10562 end_cleanup_deferral ();
10563 break;
10565 case COMPOUND_EXPR:
10566 push_temp_slots ();
10567 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10568 preserve_temp_slots (NULL_RTX);
10569 free_temp_slots ();
10570 pop_temp_slots ();
10571 emit_queue ();
10572 do_pending_stack_adjust ();
10573 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10574 break;
10576 case COMPONENT_REF:
10577 case BIT_FIELD_REF:
10578 case ARRAY_REF:
10580 int bitsize, bitpos, unsignedp;
10581 enum machine_mode mode;
10582 tree type;
10583 tree offset;
10584 int volatilep = 0;
10585 int alignment;
10587 /* Get description of this reference. We don't actually care
10588 about the underlying object here. */
10589 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10590 &mode, &unsignedp, &volatilep,
10591 &alignment);
10593 type = type_for_size (bitsize, unsignedp);
10594 if (! SLOW_BYTE_ACCESS
10595 && type != 0 && bitsize >= 0
10596 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10597 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10598 != CODE_FOR_nothing))
10600 do_jump (convert (type, exp), if_false_label, if_true_label);
10601 break;
10603 goto normal;
10606 case COND_EXPR:
10607 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10608 if (integer_onep (TREE_OPERAND (exp, 1))
10609 && integer_zerop (TREE_OPERAND (exp, 2)))
10610 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10612 else if (integer_zerop (TREE_OPERAND (exp, 1))
10613 && integer_onep (TREE_OPERAND (exp, 2)))
10614 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10616 else
10618 register rtx label1 = gen_label_rtx ();
10619 drop_through_label = gen_label_rtx ();
10621 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10623 start_cleanup_deferral ();
10624 /* Now the THEN-expression. */
10625 do_jump (TREE_OPERAND (exp, 1),
10626 if_false_label ? if_false_label : drop_through_label,
10627 if_true_label ? if_true_label : drop_through_label);
10628 /* In case the do_jump just above never jumps. */
10629 do_pending_stack_adjust ();
10630 emit_label (label1);
10632 /* Now the ELSE-expression. */
10633 do_jump (TREE_OPERAND (exp, 2),
10634 if_false_label ? if_false_label : drop_through_label,
10635 if_true_label ? if_true_label : drop_through_label);
10636 end_cleanup_deferral ();
10638 break;
10640 case EQ_EXPR:
10642 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10644 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10645 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10647 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10648 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10649 do_jump
10650 (fold
10651 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10652 fold (build (EQ_EXPR, TREE_TYPE (exp),
10653 fold (build1 (REALPART_EXPR,
10654 TREE_TYPE (inner_type),
10655 exp0)),
10656 fold (build1 (REALPART_EXPR,
10657 TREE_TYPE (inner_type),
10658 exp1)))),
10659 fold (build (EQ_EXPR, TREE_TYPE (exp),
10660 fold (build1 (IMAGPART_EXPR,
10661 TREE_TYPE (inner_type),
10662 exp0)),
10663 fold (build1 (IMAGPART_EXPR,
10664 TREE_TYPE (inner_type),
10665 exp1)))))),
10666 if_false_label, if_true_label);
10669 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10670 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10672 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10673 && !can_compare_p (TYPE_MODE (inner_type)))
10674 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10675 else
10676 comparison = compare (exp, EQ, EQ);
10677 break;
10680 case NE_EXPR:
10682 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10684 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10685 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10687 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10688 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10689 do_jump
10690 (fold
10691 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10692 fold (build (NE_EXPR, TREE_TYPE (exp),
10693 fold (build1 (REALPART_EXPR,
10694 TREE_TYPE (inner_type),
10695 exp0)),
10696 fold (build1 (REALPART_EXPR,
10697 TREE_TYPE (inner_type),
10698 exp1)))),
10699 fold (build (NE_EXPR, TREE_TYPE (exp),
10700 fold (build1 (IMAGPART_EXPR,
10701 TREE_TYPE (inner_type),
10702 exp0)),
10703 fold (build1 (IMAGPART_EXPR,
10704 TREE_TYPE (inner_type),
10705 exp1)))))),
10706 if_false_label, if_true_label);
10709 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10710 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10712 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10713 && !can_compare_p (TYPE_MODE (inner_type)))
10714 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10715 else
10716 comparison = compare (exp, NE, NE);
10717 break;
10720 case LT_EXPR:
10721 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10722 == MODE_INT)
10723 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10724 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10725 else
10726 comparison = compare (exp, LT, LTU);
10727 break;
10729 case LE_EXPR:
10730 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10731 == MODE_INT)
10732 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10733 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10734 else
10735 comparison = compare (exp, LE, LEU);
10736 break;
10738 case GT_EXPR:
10739 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10740 == MODE_INT)
10741 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10742 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10743 else
10744 comparison = compare (exp, GT, GTU);
10745 break;
10747 case GE_EXPR:
10748 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10749 == MODE_INT)
10750 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10751 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10752 else
10753 comparison = compare (exp, GE, GEU);
10754 break;
10756 default:
10757 normal:
10758 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10759 #if 0
10760 /* This is not needed any more and causes poor code since it causes
10761 comparisons and tests from non-SI objects to have different code
10762 sequences. */
10763 /* Copy to register to avoid generating bad insns by cse
10764 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10765 if (!cse_not_expected && GET_CODE (temp) == MEM)
10766 temp = copy_to_reg (temp);
10767 #endif
10768 do_pending_stack_adjust ();
10769 if (GET_CODE (temp) == CONST_INT)
10770 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10771 else if (GET_CODE (temp) == LABEL_REF)
10772 comparison = const_true_rtx;
10773 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10774 && !can_compare_p (GET_MODE (temp)))
10775 /* Note swapping the labels gives us not-equal. */
10776 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10777 else if (GET_MODE (temp) != VOIDmode)
10778 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10779 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10780 GET_MODE (temp), NULL_RTX, 0);
10781 else
10782 abort ();
10785 /* Do any postincrements in the expression that was tested. */
10786 emit_queue ();
10788 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10789 straight into a conditional jump instruction as the jump condition.
10790 Otherwise, all the work has been done already. */
10792 if (comparison == const_true_rtx)
10794 if (if_true_label)
10795 emit_jump (if_true_label);
10797 else if (comparison == const0_rtx)
10799 if (if_false_label)
10800 emit_jump (if_false_label);
10802 else if (comparison)
10803 do_jump_for_compare (comparison, if_false_label, if_true_label);
10805 if (drop_through_label)
10807 /* If do_jump produces code that might be jumped around,
10808 do any stack adjusts from that code, before the place
10809 where control merges in. */
10810 do_pending_stack_adjust ();
10811 emit_label (drop_through_label);
10815 /* Given a comparison expression EXP for values too wide to be compared
10816 with one insn, test the comparison and jump to the appropriate label.
10817 The code of EXP is ignored; we always test GT if SWAP is 0,
10818 and LT if SWAP is 1. */
10820 static void
10821 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10822 tree exp;
10823 int swap;
10824 rtx if_false_label, if_true_label;
10826 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10827 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10828 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10829 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10830 rtx drop_through_label = 0;
10831 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10832 int i;
10834 if (! if_true_label || ! if_false_label)
10835 drop_through_label = gen_label_rtx ();
10836 if (! if_true_label)
10837 if_true_label = drop_through_label;
10838 if (! if_false_label)
10839 if_false_label = drop_through_label;
10841 /* Compare a word at a time, high order first. */
10842 for (i = 0; i < nwords; i++)
10844 rtx comp;
10845 rtx op0_word, op1_word;
10847 if (WORDS_BIG_ENDIAN)
10849 op0_word = operand_subword_force (op0, i, mode);
10850 op1_word = operand_subword_force (op1, i, mode);
10852 else
10854 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10855 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10858 /* All but high-order word must be compared as unsigned. */
10859 comp = compare_from_rtx (op0_word, op1_word,
10860 (unsignedp || i > 0) ? GTU : GT,
10861 unsignedp, word_mode, NULL_RTX, 0);
10862 if (comp == const_true_rtx)
10863 emit_jump (if_true_label);
10864 else if (comp != const0_rtx)
10865 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10867 /* Consider lower words only if these are equal. */
10868 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10869 NULL_RTX, 0);
10870 if (comp == const_true_rtx)
10871 emit_jump (if_false_label);
10872 else if (comp != const0_rtx)
10873 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10876 if (if_false_label)
10877 emit_jump (if_false_label);
10878 if (drop_through_label)
10879 emit_label (drop_through_label);
10882 /* Compare OP0 with OP1, word at a time, in mode MODE.
10883 UNSIGNEDP says to do unsigned comparison.
10884 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10886 void
10887 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10888 enum machine_mode mode;
10889 int unsignedp;
10890 rtx op0, op1;
10891 rtx if_false_label, if_true_label;
10893 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10894 rtx drop_through_label = 0;
10895 int i;
10897 if (! if_true_label || ! if_false_label)
10898 drop_through_label = gen_label_rtx ();
10899 if (! if_true_label)
10900 if_true_label = drop_through_label;
10901 if (! if_false_label)
10902 if_false_label = drop_through_label;
10904 /* Compare a word at a time, high order first. */
10905 for (i = 0; i < nwords; i++)
10907 rtx comp;
10908 rtx op0_word, op1_word;
10910 if (WORDS_BIG_ENDIAN)
10912 op0_word = operand_subword_force (op0, i, mode);
10913 op1_word = operand_subword_force (op1, i, mode);
10915 else
10917 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10918 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10921 /* All but high-order word must be compared as unsigned. */
10922 comp = compare_from_rtx (op0_word, op1_word,
10923 (unsignedp || i > 0) ? GTU : GT,
10924 unsignedp, word_mode, NULL_RTX, 0);
10925 if (comp == const_true_rtx)
10926 emit_jump (if_true_label);
10927 else if (comp != const0_rtx)
10928 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10930 /* Consider lower words only if these are equal. */
10931 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10932 NULL_RTX, 0);
10933 if (comp == const_true_rtx)
10934 emit_jump (if_false_label);
10935 else if (comp != const0_rtx)
10936 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10939 if (if_false_label)
10940 emit_jump (if_false_label);
10941 if (drop_through_label)
10942 emit_label (drop_through_label);
10945 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10946 with one insn, test the comparison and jump to the appropriate label. */
10948 static void
10949 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10950 tree exp;
10951 rtx if_false_label, if_true_label;
10953 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10954 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10955 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10956 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10957 int i;
10958 rtx drop_through_label = 0;
10960 if (! if_false_label)
10961 drop_through_label = if_false_label = gen_label_rtx ();
10963 for (i = 0; i < nwords; i++)
10965 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10966 operand_subword_force (op1, i, mode),
10967 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10968 word_mode, NULL_RTX, 0);
10969 if (comp == const_true_rtx)
10970 emit_jump (if_false_label);
10971 else if (comp != const0_rtx)
10972 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10975 if (if_true_label)
10976 emit_jump (if_true_label);
10977 if (drop_through_label)
10978 emit_label (drop_through_label);
10981 /* Jump according to whether OP0 is 0.
10982 We assume that OP0 has an integer mode that is too wide
10983 for the available compare insns. */
10985 void
10986 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10987 rtx op0;
10988 rtx if_false_label, if_true_label;
10990 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10991 rtx part;
10992 int i;
10993 rtx drop_through_label = 0;
10995 /* The fastest way of doing this comparison on almost any machine is to
10996 "or" all the words and compare the result. If all have to be loaded
10997 from memory and this is a very wide item, it's possible this may
10998 be slower, but that's highly unlikely. */
11000 part = gen_reg_rtx (word_mode);
11001 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11002 for (i = 1; i < nwords && part != 0; i++)
11003 part = expand_binop (word_mode, ior_optab, part,
11004 operand_subword_force (op0, i, GET_MODE (op0)),
11005 part, 1, OPTAB_WIDEN);
11007 if (part != 0)
11009 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11010 NULL_RTX, 0);
11012 if (comp == const_true_rtx)
11013 emit_jump (if_false_label);
11014 else if (comp == const0_rtx)
11015 emit_jump (if_true_label);
11016 else
11017 do_jump_for_compare (comp, if_false_label, if_true_label);
11019 return;
11022 /* If we couldn't do the "or" simply, do this with a series of compares. */
11023 if (! if_false_label)
11024 drop_through_label = if_false_label = gen_label_rtx ();
11026 for (i = 0; i < nwords; i++)
11028 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11029 GET_MODE (op0)),
11030 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11031 if (comp == const_true_rtx)
11032 emit_jump (if_false_label);
11033 else if (comp != const0_rtx)
11034 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11037 if (if_true_label)
11038 emit_jump (if_true_label);
11040 if (drop_through_label)
11041 emit_label (drop_through_label);
11044 /* Given a comparison expression in rtl form, output conditional branches to
11045 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11047 static void
11048 do_jump_for_compare (comparison, if_false_label, if_true_label)
11049 rtx comparison, if_false_label, if_true_label;
11051 if (if_true_label)
11053 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11054 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11055 (if_true_label));
11056 else
11057 abort ();
11059 if (if_false_label)
11060 emit_jump (if_false_label);
11062 else if (if_false_label)
11064 rtx first = get_last_insn (), insn, branch;
11065 int br_count;
11067 /* Output the branch with the opposite condition. Then try to invert
11068 what is generated. If more than one insn is a branch, or if the
11069 branch is not the last insn written, abort. If we can't invert
11070 the branch, emit make a true label, redirect this jump to that,
11071 emit a jump to the false label and define the true label. */
11072 /* ??? Note that we wouldn't have to do any of this nonsense if
11073 we passed both labels into a combined compare-and-branch.
11074 Ah well, jump threading does a good job of repairing the damage. */
11076 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11077 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11078 (if_false_label));
11079 else
11080 abort ();
11082 /* Here we get the first insn that was just emitted. It used to be the
11083 case that, on some machines, emitting the branch would discard
11084 the previous compare insn and emit a replacement. This isn't
11085 done anymore, but abort if we see that FIRST is deleted. */
11087 if (first == 0)
11088 first = get_insns ();
11089 else if (INSN_DELETED_P (first))
11090 abort ();
11091 else
11092 first = NEXT_INSN (first);
11094 /* Look for multiple branches in this sequence, as might be generated
11095 for a multi-word integer comparison. */
11097 br_count = 0;
11098 branch = NULL_RTX;
11099 for (insn = first; insn ; insn = NEXT_INSN (insn))
11100 if (GET_CODE (insn) == JUMP_INSN)
11102 branch = insn;
11103 br_count += 1;
11106 /* If we've got one branch at the end of the sequence,
11107 we can try to reverse it. */
11109 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11111 rtx insn_label;
11112 insn_label = XEXP (condjump_label (branch), 0);
11113 JUMP_LABEL (branch) = insn_label;
11115 if (insn_label != if_false_label)
11116 abort ();
11118 if (invert_jump (branch, if_false_label))
11119 return;
11122 /* Multiple branches, or reversion failed. Convert to branches
11123 around an unconditional jump. */
11125 if_true_label = gen_label_rtx ();
11126 for (insn = first; insn; insn = NEXT_INSN (insn))
11127 if (GET_CODE (insn) == JUMP_INSN)
11129 rtx insn_label;
11130 insn_label = XEXP (condjump_label (insn), 0);
11131 JUMP_LABEL (insn) = insn_label;
11133 if (insn_label == if_false_label)
11134 redirect_jump (insn, if_true_label);
11136 emit_jump (if_false_label);
11137 emit_label (if_true_label);
11141 /* Generate code for a comparison expression EXP
11142 (including code to compute the values to be compared)
11143 and set (CC0) according to the result.
11144 SIGNED_CODE should be the rtx operation for this comparison for
11145 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11147 We force a stack adjustment unless there are currently
11148 things pushed on the stack that aren't yet used. */
11150 static rtx
11151 compare (exp, signed_code, unsigned_code)
11152 register tree exp;
11153 enum rtx_code signed_code, unsigned_code;
11155 register rtx op0
11156 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11157 register rtx op1
11158 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11159 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
11160 register enum machine_mode mode = TYPE_MODE (type);
11161 int unsignedp = TREE_UNSIGNED (type);
11162 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
11164 #ifdef HAVE_canonicalize_funcptr_for_compare
11165 /* If function pointers need to be "canonicalized" before they can
11166 be reliably compared, then canonicalize them. */
11167 if (HAVE_canonicalize_funcptr_for_compare
11168 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11169 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11170 == FUNCTION_TYPE))
11172 rtx new_op0 = gen_reg_rtx (mode);
11174 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11175 op0 = new_op0;
11178 if (HAVE_canonicalize_funcptr_for_compare
11179 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11180 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11181 == FUNCTION_TYPE))
11183 rtx new_op1 = gen_reg_rtx (mode);
11185 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11186 op1 = new_op1;
11188 #endif
11190 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11191 ((mode == BLKmode)
11192 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11193 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11196 /* Like compare but expects the values to compare as two rtx's.
11197 The decision as to signed or unsigned comparison must be made by the caller.
11199 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11200 compared.
11202 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11203 size of MODE should be used. */
11206 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11207 register rtx op0, op1;
11208 enum rtx_code code;
11209 int unsignedp;
11210 enum machine_mode mode;
11211 rtx size;
11212 int align;
11214 rtx tem;
11216 /* If one operand is constant, make it the second one. Only do this
11217 if the other operand is not constant as well. */
11219 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11220 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11222 tem = op0;
11223 op0 = op1;
11224 op1 = tem;
11225 code = swap_condition (code);
11228 if (flag_force_mem)
11230 op0 = force_not_mem (op0);
11231 op1 = force_not_mem (op1);
11234 do_pending_stack_adjust ();
11236 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11237 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11238 return tem;
11240 #if 0
11241 /* There's no need to do this now that combine.c can eliminate lots of
11242 sign extensions. This can be less efficient in certain cases on other
11243 machines. */
11245 /* If this is a signed equality comparison, we can do it as an
11246 unsigned comparison since zero-extension is cheaper than sign
11247 extension and comparisons with zero are done as unsigned. This is
11248 the case even on machines that can do fast sign extension, since
11249 zero-extension is easier to combine with other operations than
11250 sign-extension is. If we are comparing against a constant, we must
11251 convert it to what it would look like unsigned. */
11252 if ((code == EQ || code == NE) && ! unsignedp
11253 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11255 if (GET_CODE (op1) == CONST_INT
11256 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11257 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11258 unsignedp = 1;
11260 #endif
11262 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11264 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11267 /* Generate code to calculate EXP using a store-flag instruction
11268 and return an rtx for the result. EXP is either a comparison
11269 or a TRUTH_NOT_EXPR whose operand is a comparison.
11271 If TARGET is nonzero, store the result there if convenient.
11273 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11274 cheap.
11276 Return zero if there is no suitable set-flag instruction
11277 available on this machine.
11279 Once expand_expr has been called on the arguments of the comparison,
11280 we are committed to doing the store flag, since it is not safe to
11281 re-evaluate the expression. We emit the store-flag insn by calling
11282 emit_store_flag, but only expand the arguments if we have a reason
11283 to believe that emit_store_flag will be successful. If we think that
11284 it will, but it isn't, we have to simulate the store-flag with a
11285 set/jump/set sequence. */
11287 static rtx
11288 do_store_flag (exp, target, mode, only_cheap)
11289 tree exp;
11290 rtx target;
11291 enum machine_mode mode;
11292 int only_cheap;
11294 enum rtx_code code;
11295 tree arg0, arg1, type;
11296 tree tem;
11297 enum machine_mode operand_mode;
11298 int invert = 0;
11299 int unsignedp;
11300 rtx op0, op1;
11301 enum insn_code icode;
11302 rtx subtarget = target;
11303 rtx result, label;
11305 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11306 result at the end. We can't simply invert the test since it would
11307 have already been inverted if it were valid. This case occurs for
11308 some floating-point comparisons. */
11310 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11311 invert = 1, exp = TREE_OPERAND (exp, 0);
11313 arg0 = TREE_OPERAND (exp, 0);
11314 arg1 = TREE_OPERAND (exp, 1);
11315 type = TREE_TYPE (arg0);
11316 operand_mode = TYPE_MODE (type);
11317 unsignedp = TREE_UNSIGNED (type);
11319 /* We won't bother with BLKmode store-flag operations because it would mean
11320 passing a lot of information to emit_store_flag. */
11321 if (operand_mode == BLKmode)
11322 return 0;
11324 /* We won't bother with store-flag operations involving function pointers
11325 when function pointers must be canonicalized before comparisons. */
11326 #ifdef HAVE_canonicalize_funcptr_for_compare
11327 if (HAVE_canonicalize_funcptr_for_compare
11328 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11329 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11330 == FUNCTION_TYPE))
11331 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11332 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11333 == FUNCTION_TYPE))))
11334 return 0;
11335 #endif
11337 STRIP_NOPS (arg0);
11338 STRIP_NOPS (arg1);
11340 /* Get the rtx comparison code to use. We know that EXP is a comparison
11341 operation of some type. Some comparisons against 1 and -1 can be
11342 converted to comparisons with zero. Do so here so that the tests
11343 below will be aware that we have a comparison with zero. These
11344 tests will not catch constants in the first operand, but constants
11345 are rarely passed as the first operand. */
11347 switch (TREE_CODE (exp))
11349 case EQ_EXPR:
11350 code = EQ;
11351 break;
11352 case NE_EXPR:
11353 code = NE;
11354 break;
11355 case LT_EXPR:
11356 if (integer_onep (arg1))
11357 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11358 else
11359 code = unsignedp ? LTU : LT;
11360 break;
11361 case LE_EXPR:
11362 if (! unsignedp && integer_all_onesp (arg1))
11363 arg1 = integer_zero_node, code = LT;
11364 else
11365 code = unsignedp ? LEU : LE;
11366 break;
11367 case GT_EXPR:
11368 if (! unsignedp && integer_all_onesp (arg1))
11369 arg1 = integer_zero_node, code = GE;
11370 else
11371 code = unsignedp ? GTU : GT;
11372 break;
11373 case GE_EXPR:
11374 if (integer_onep (arg1))
11375 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11376 else
11377 code = unsignedp ? GEU : GE;
11378 break;
11379 default:
11380 abort ();
11383 /* Put a constant second. */
11384 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11386 tem = arg0; arg0 = arg1; arg1 = tem;
11387 code = swap_condition (code);
11390 /* If this is an equality or inequality test of a single bit, we can
11391 do this by shifting the bit being tested to the low-order bit and
11392 masking the result with the constant 1. If the condition was EQ,
11393 we xor it with 1. This does not require an scc insn and is faster
11394 than an scc insn even if we have it. */
11396 if ((code == NE || code == EQ)
11397 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11398 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11400 tree inner = TREE_OPERAND (arg0, 0);
11401 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11402 int ops_unsignedp;
11404 /* If INNER is a right shift of a constant and it plus BITNUM does
11405 not overflow, adjust BITNUM and INNER. */
11407 if (TREE_CODE (inner) == RSHIFT_EXPR
11408 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11409 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11410 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11411 < TYPE_PRECISION (type)))
11413 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11414 inner = TREE_OPERAND (inner, 0);
11417 /* If we are going to be able to omit the AND below, we must do our
11418 operations as unsigned. If we must use the AND, we have a choice.
11419 Normally unsigned is faster, but for some machines signed is. */
11420 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11421 #ifdef LOAD_EXTEND_OP
11422 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11423 #else
11425 #endif
11428 if (subtarget == 0 || GET_CODE (subtarget) != REG
11429 || GET_MODE (subtarget) != operand_mode
11430 || ! safe_from_p (subtarget, inner, 1))
11431 subtarget = 0;
11433 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11435 if (bitnum != 0)
11436 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11437 size_int (bitnum), subtarget, ops_unsignedp);
11439 if (GET_MODE (op0) != mode)
11440 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11442 if ((code == EQ && ! invert) || (code == NE && invert))
11443 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11444 ops_unsignedp, OPTAB_LIB_WIDEN);
11446 /* Put the AND last so it can combine with more things. */
11447 if (bitnum != TYPE_PRECISION (type) - 1)
11448 op0 = expand_and (op0, const1_rtx, subtarget);
11450 return op0;
11453 /* Now see if we are likely to be able to do this. Return if not. */
11454 if (! can_compare_p (operand_mode))
11455 return 0;
11456 icode = setcc_gen_code[(int) code];
11457 if (icode == CODE_FOR_nothing
11458 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11460 /* We can only do this if it is one of the special cases that
11461 can be handled without an scc insn. */
11462 if ((code == LT && integer_zerop (arg1))
11463 || (! only_cheap && code == GE && integer_zerop (arg1)))
11465 else if (BRANCH_COST >= 0
11466 && ! only_cheap && (code == NE || code == EQ)
11467 && TREE_CODE (type) != REAL_TYPE
11468 && ((abs_optab->handlers[(int) operand_mode].insn_code
11469 != CODE_FOR_nothing)
11470 || (ffs_optab->handlers[(int) operand_mode].insn_code
11471 != CODE_FOR_nothing)))
11473 else
11474 return 0;
11477 preexpand_calls (exp);
11478 if (subtarget == 0 || GET_CODE (subtarget) != REG
11479 || GET_MODE (subtarget) != operand_mode
11480 || ! safe_from_p (subtarget, arg1, 1))
11481 subtarget = 0;
11483 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11484 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11486 if (target == 0)
11487 target = gen_reg_rtx (mode);
11489 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11490 because, if the emit_store_flag does anything it will succeed and
11491 OP0 and OP1 will not be used subsequently. */
11493 result = emit_store_flag (target, code,
11494 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11495 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11496 operand_mode, unsignedp, 1);
11498 if (result)
11500 if (invert)
11501 result = expand_binop (mode, xor_optab, result, const1_rtx,
11502 result, 0, OPTAB_LIB_WIDEN);
11503 return result;
11506 /* If this failed, we have to do this with set/compare/jump/set code. */
11507 if (GET_CODE (target) != REG
11508 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11509 target = gen_reg_rtx (GET_MODE (target));
11511 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11512 result = compare_from_rtx (op0, op1, code, unsignedp,
11513 operand_mode, NULL_RTX, 0);
11514 if (GET_CODE (result) == CONST_INT)
11515 return (((result == const0_rtx && ! invert)
11516 || (result != const0_rtx && invert))
11517 ? const0_rtx : const1_rtx);
11519 label = gen_label_rtx ();
11520 if (bcc_gen_fctn[(int) code] == 0)
11521 abort ();
11523 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11524 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11525 emit_label (label);
11527 return target;
11530 /* Generate a tablejump instruction (used for switch statements). */
11532 #ifdef HAVE_tablejump
11534 /* INDEX is the value being switched on, with the lowest value
11535 in the table already subtracted.
11536 MODE is its expected mode (needed if INDEX is constant).
11537 RANGE is the length of the jump table.
11538 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11540 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11541 index value is out of range. */
11543 void
11544 do_tablejump (index, mode, range, table_label, default_label)
11545 rtx index, range, table_label, default_label;
11546 enum machine_mode mode;
11548 register rtx temp, vector;
11550 /* Do an unsigned comparison (in the proper mode) between the index
11551 expression and the value which represents the length of the range.
11552 Since we just finished subtracting the lower bound of the range
11553 from the index expression, this comparison allows us to simultaneously
11554 check that the original index expression value is both greater than
11555 or equal to the minimum value of the range and less than or equal to
11556 the maximum value of the range. */
11558 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11559 emit_jump_insn (gen_bgtu (default_label));
11561 /* If index is in range, it must fit in Pmode.
11562 Convert to Pmode so we can index with it. */
11563 if (mode != Pmode)
11564 index = convert_to_mode (Pmode, index, 1);
11566 /* Don't let a MEM slip thru, because then INDEX that comes
11567 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11568 and break_out_memory_refs will go to work on it and mess it up. */
11569 #ifdef PIC_CASE_VECTOR_ADDRESS
11570 if (flag_pic && GET_CODE (index) != REG)
11571 index = copy_to_mode_reg (Pmode, index);
11572 #endif
11574 /* If flag_force_addr were to affect this address
11575 it could interfere with the tricky assumptions made
11576 about addresses that contain label-refs,
11577 which may be valid only very near the tablejump itself. */
11578 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11579 GET_MODE_SIZE, because this indicates how large insns are. The other
11580 uses should all be Pmode, because they are addresses. This code
11581 could fail if addresses and insns are not the same size. */
11582 index = gen_rtx_PLUS (Pmode,
11583 gen_rtx_MULT (Pmode, index,
11584 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11585 gen_rtx_LABEL_REF (Pmode, table_label));
11586 #ifdef PIC_CASE_VECTOR_ADDRESS
11587 if (flag_pic)
11588 index = PIC_CASE_VECTOR_ADDRESS (index);
11589 else
11590 #endif
11591 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11592 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11593 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11594 RTX_UNCHANGING_P (vector) = 1;
11595 convert_move (temp, vector, 0);
11597 emit_jump_insn (gen_tablejump (temp, table_label));
11599 /* If we are generating PIC code or if the table is PC-relative, the
11600 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11601 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11602 emit_barrier ();
11605 #endif /* HAVE_tablejump */