* expr.c (emit_block_move): Use copy_to_mode_reg for
[official-gcc.git] / gcc / expr.c
bloba5c6f887c0d44b40cdff18ad5a3234b4ff177a69
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
52 #ifdef PUSH_ROUNDING
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
58 #endif
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
71 #endif
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
90 /* Under some ABIs, it is the caller's responsibility to pop arguments
91 pushed for function calls. A naive implementation would simply pop
92 the arguments immediately after each call. However, if several
93 function calls are made in a row, it is typically cheaper to pop
94 all the arguments after all of the calls are complete since a
95 single pop instruction can be used. Therefore, GCC attempts to
96 defer popping the arguments until absolutely necessary. (For
97 example, at the end of a conditional, the arguments must be popped,
98 since code outside the conditional won't know whether or not the
99 arguments need to be popped.)
101 When INHIBIT_DEFER_POP is non-zero, however, the compiler does not
102 attempt to defer pops. Instead, the stack is popped immediately
103 after each call. Rather then setting this variable directly, use
104 NO_DEFER_POP and OK_DEFER_POP. */
105 int inhibit_defer_pop;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110 static rtx saveregs_value;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
115 /* Don't check memory usage, since code is being emitted to check a memory
116 usage. Used when current_function_check_memory_usage is true, to avoid
117 infinite recursion. */
118 static int in_check_memory_usage;
120 /* Postincrements that still need to be expanded. */
121 static rtx pending_chain;
123 /* This structure is used by move_by_pieces to describe the move to
124 be performed. */
125 struct move_by_pieces
127 rtx to;
128 rtx to_addr;
129 int autinc_to;
130 int explicit_inc_to;
131 int to_struct;
132 rtx from;
133 rtx from_addr;
134 int autinc_from;
135 int explicit_inc_from;
136 int from_struct;
137 int len;
138 int offset;
139 int reverse;
142 /* This structure is used by clear_by_pieces to describe the clear to
143 be performed. */
145 struct clear_by_pieces
147 rtx to;
148 rtx to_addr;
149 int autinc_to;
150 int explicit_inc_to;
151 int to_struct;
152 int len;
153 int offset;
154 int reverse;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
160 static rtx get_push_address PROTO ((int));
162 static rtx enqueue_insn PROTO((rtx, rtx));
163 static void init_queue PROTO((void));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
173 tree, tree, int));
174 static void store_constructor PROTO((tree, rtx, int));
175 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
176 enum machine_mode, int, int,
177 int, int));
178 static enum memory_use_mode
179 get_memory_usage_from_modifier PROTO((enum expand_modifier));
180 static tree save_noncopied_parts PROTO((tree, tree));
181 static tree init_noncopied_parts PROTO((tree, tree));
182 static int safe_from_p PROTO((rtx, tree, int));
183 static int fixed_type_p PROTO((tree));
184 static rtx var_rtx PROTO((tree));
185 static int get_pointer_alignment PROTO((tree, unsigned));
186 static tree string_constant PROTO((tree, tree *));
187 static tree c_strlen PROTO((tree));
188 static rtx get_memory_rtx PROTO((tree));
189 static rtx expand_builtin PROTO((tree, rtx, rtx,
190 enum machine_mode, int));
191 static int apply_args_size PROTO((void));
192 static int apply_result_size PROTO((void));
193 static rtx result_vector PROTO((int, rtx));
194 static rtx expand_builtin_apply_args PROTO((void));
195 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
196 static void expand_builtin_return PROTO((rtx));
197 static rtx expand_increment PROTO((tree, int, int));
198 static void preexpand_calls PROTO((tree));
199 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
200 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
201 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
202 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
203 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
205 /* Record for each mode whether we can move a register directly to or
206 from an object of that mode in memory. If we can't, we won't try
207 to use that mode directly when accessing a field of that mode. */
209 static char direct_load[NUM_MACHINE_MODES];
210 static char direct_store[NUM_MACHINE_MODES];
212 /* If a memory-to-memory move would take MOVE_RATIO or more simple
213 move-instruction sequences, we will do a movstr or libcall instead. */
215 #ifndef MOVE_RATIO
216 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
217 #define MOVE_RATIO 2
218 #else
219 /* If we are optimizing for space (-Os), cut down the default move ratio */
220 #define MOVE_RATIO (optimize_size ? 3 : 15)
221 #endif
222 #endif
224 /* This macro is used to determine whether move_by_pieces should be called
225 to perform a structure copy. */
226 #ifndef MOVE_BY_PIECES_P
227 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
228 (SIZE, ALIGN) < MOVE_RATIO)
229 #endif
231 /* This array records the insn_code of insns to perform block moves. */
232 enum insn_code movstr_optab[NUM_MACHINE_MODES];
234 /* This array records the insn_code of insns to perform block clears. */
235 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
237 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
239 #ifndef SLOW_UNALIGNED_ACCESS
240 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
241 #endif
243 /* Register mappings for target machines without register windows. */
244 #ifndef INCOMING_REGNO
245 #define INCOMING_REGNO(OUT) (OUT)
246 #endif
247 #ifndef OUTGOING_REGNO
248 #define OUTGOING_REGNO(IN) (IN)
249 #endif
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
254 void
255 init_expr_once ()
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 char *free_point;
263 start_sequence ();
265 /* Since we are on the permanent obstack, we must be sure we save this
266 spot AFTER we call start_sequence, since it will reuse the rtl it
267 makes. */
268 free_point = (char *) oballoc (0);
270 /* Try indexing by frame ptr and try by stack ptr.
271 It is known that on the Convex the stack ptr isn't a valid index.
272 With luck, one or the other is valid on any machine. */
273 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
274 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
276 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
277 pat = PATTERN (insn);
279 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
280 mode = (enum machine_mode) ((int) mode + 1))
282 int regno;
283 rtx reg;
285 direct_load[(int) mode] = direct_store[(int) mode] = 0;
286 PUT_MODE (mem, mode);
287 PUT_MODE (mem1, mode);
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
292 if (mode != VOIDmode && mode != BLKmode)
293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
294 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 regno++)
297 if (! HARD_REGNO_MODE_OK (regno, mode))
298 continue;
300 reg = gen_rtx_REG (mode, regno);
302 SET_SRC (pat) = mem;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = mem1;
308 SET_DEST (pat) = reg;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_load[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
317 SET_SRC (pat) = reg;
318 SET_DEST (pat) = mem1;
319 if (recog (pat, insn, &num_clobbers) >= 0)
320 direct_store[(int) mode] = 1;
324 end_sequence ();
325 obfree (free_point);
328 /* This is run at the start of compiling a function. */
330 void
331 init_expr ()
333 init_queue ();
335 pending_stack_adjust = 0;
336 inhibit_defer_pop = 0;
337 saveregs_value = 0;
338 apply_args_value = 0;
339 forced_labels = 0;
342 /* Save all variables describing the current status into the structure *P.
343 This is used before starting a nested function. */
345 void
346 save_expr_status (p)
347 struct function *p;
349 p->pending_chain = pending_chain;
350 p->pending_stack_adjust = pending_stack_adjust;
351 p->inhibit_defer_pop = inhibit_defer_pop;
352 p->saveregs_value = saveregs_value;
353 p->apply_args_value = apply_args_value;
354 p->forced_labels = forced_labels;
356 pending_chain = NULL_RTX;
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 saveregs_value = 0;
360 apply_args_value = 0;
361 forced_labels = 0;
364 /* Restore all variables describing the current status from the structure *P.
365 This is used after a nested function. */
367 void
368 restore_expr_status (p)
369 struct function *p;
371 pending_chain = p->pending_chain;
372 pending_stack_adjust = p->pending_stack_adjust;
373 inhibit_defer_pop = p->inhibit_defer_pop;
374 saveregs_value = p->saveregs_value;
375 apply_args_value = p->apply_args_value;
376 forced_labels = p->forced_labels;
379 /* Manage the queue of increment instructions to be output
380 for POSTINCREMENT_EXPR expressions, etc. */
382 /* Queue up to increment (or change) VAR later. BODY says how:
383 BODY should be the same thing you would pass to emit_insn
384 to increment right away. It will go to emit_insn later on.
386 The value is a QUEUED expression to be used in place of VAR
387 where you want to guarantee the pre-incrementation value of VAR. */
389 static rtx
390 enqueue_insn (var, body)
391 rtx var, body;
393 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
394 var, NULL_RTX, NULL_RTX, body,
395 pending_chain);
396 return pending_chain;
399 /* Use protect_from_queue to convert a QUEUED expression
400 into something that you can put immediately into an instruction.
401 If the queued incrementation has not happened yet,
402 protect_from_queue returns the variable itself.
403 If the incrementation has happened, protect_from_queue returns a temp
404 that contains a copy of the old value of the variable.
406 Any time an rtx which might possibly be a QUEUED is to be put
407 into an instruction, it must be passed through protect_from_queue first.
408 QUEUED expressions are not meaningful in instructions.
410 Do not pass a value through protect_from_queue and then hold
411 on to it for a while before putting it in an instruction!
412 If the queue is flushed in between, incorrect code will result. */
415 protect_from_queue (x, modify)
416 register rtx x;
417 int modify;
419 register RTX_CODE code = GET_CODE (x);
421 #if 0 /* A QUEUED can hang around after the queue is forced out. */
422 /* Shortcut for most common case. */
423 if (pending_chain == 0)
424 return x;
425 #endif
427 if (code != QUEUED)
429 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
430 use of autoincrement. Make a copy of the contents of the memory
431 location rather than a copy of the address, but not if the value is
432 of mode BLKmode. Don't modify X in place since it might be
433 shared. */
434 if (code == MEM && GET_MODE (x) != BLKmode
435 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
437 register rtx y = XEXP (x, 0);
438 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
440 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
441 MEM_COPY_ATTRIBUTES (new, x);
442 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
444 if (QUEUED_INSN (y))
446 register rtx temp = gen_reg_rtx (GET_MODE (new));
447 emit_insn_before (gen_move_insn (temp, new),
448 QUEUED_INSN (y));
449 return temp;
451 return new;
453 /* Otherwise, recursively protect the subexpressions of all
454 the kinds of rtx's that can contain a QUEUED. */
455 if (code == MEM)
457 rtx tem = protect_from_queue (XEXP (x, 0), 0);
458 if (tem != XEXP (x, 0))
460 x = copy_rtx (x);
461 XEXP (x, 0) = tem;
464 else if (code == PLUS || code == MULT)
466 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
467 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
468 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
470 x = copy_rtx (x);
471 XEXP (x, 0) = new0;
472 XEXP (x, 1) = new1;
475 return x;
477 /* If the increment has not happened, use the variable itself. */
478 if (QUEUED_INSN (x) == 0)
479 return QUEUED_VAR (x);
480 /* If the increment has happened and a pre-increment copy exists,
481 use that copy. */
482 if (QUEUED_COPY (x) != 0)
483 return QUEUED_COPY (x);
484 /* The increment has happened but we haven't set up a pre-increment copy.
485 Set one up now, and use it. */
486 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
487 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
488 QUEUED_INSN (x));
489 return QUEUED_COPY (x);
492 /* Return nonzero if X contains a QUEUED expression:
493 if it contains anything that will be altered by a queued increment.
494 We handle only combinations of MEM, PLUS, MINUS and MULT operators
495 since memory addresses generally contain only those. */
498 queued_subexp_p (x)
499 rtx x;
501 register enum rtx_code code = GET_CODE (x);
502 switch (code)
504 case QUEUED:
505 return 1;
506 case MEM:
507 return queued_subexp_p (XEXP (x, 0));
508 case MULT:
509 case PLUS:
510 case MINUS:
511 return (queued_subexp_p (XEXP (x, 0))
512 || queued_subexp_p (XEXP (x, 1)));
513 default:
514 return 0;
518 /* Perform all the pending incrementations. */
520 void
521 emit_queue ()
523 register rtx p;
524 while ((p = pending_chain))
526 rtx body = QUEUED_BODY (p);
528 if (GET_CODE (body) == SEQUENCE)
530 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
531 emit_insn (QUEUED_BODY (p));
533 else
534 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
535 pending_chain = QUEUED_NEXT (p);
539 static void
540 init_queue ()
542 if (pending_chain)
543 abort ();
546 /* Copy data from FROM to TO, where the machine modes are not the same.
547 Both modes may be integer, or both may be floating.
548 UNSIGNEDP should be nonzero if FROM is an unsigned type.
549 This causes zero-extension instead of sign-extension. */
551 void
552 convert_move (to, from, unsignedp)
553 register rtx to, from;
554 int unsignedp;
556 enum machine_mode to_mode = GET_MODE (to);
557 enum machine_mode from_mode = GET_MODE (from);
558 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
559 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
560 enum insn_code code;
561 rtx libcall;
563 /* rtx code for making an equivalent value. */
564 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
566 to = protect_from_queue (to, 1);
567 from = protect_from_queue (from, 0);
569 if (to_real != from_real)
570 abort ();
572 /* If FROM is a SUBREG that indicates that we have already done at least
573 the required extension, strip it. We don't handle such SUBREGs as
574 TO here. */
576 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
577 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
578 >= GET_MODE_SIZE (to_mode))
579 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
580 from = gen_lowpart (to_mode, from), from_mode = to_mode;
582 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
583 abort ();
585 if (to_mode == from_mode
586 || (from_mode == VOIDmode && CONSTANT_P (from)))
588 emit_move_insn (to, from);
589 return;
592 if (to_real)
594 rtx value;
596 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
598 /* Try converting directly if the insn is supported. */
599 if ((code = can_extend_p (to_mode, from_mode, 0))
600 != CODE_FOR_nothing)
602 emit_unop_insn (code, to, from, UNKNOWN);
603 return;
607 #ifdef HAVE_trunchfqf2
608 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
610 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
611 return;
613 #endif
614 #ifdef HAVE_trunctqfqf2
615 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
617 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
618 return;
620 #endif
621 #ifdef HAVE_truncsfqf2
622 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
624 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
625 return;
627 #endif
628 #ifdef HAVE_truncdfqf2
629 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
631 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
632 return;
634 #endif
635 #ifdef HAVE_truncxfqf2
636 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
638 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
639 return;
641 #endif
642 #ifdef HAVE_trunctfqf2
643 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
645 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
646 return;
648 #endif
650 #ifdef HAVE_trunctqfhf2
651 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
653 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
654 return;
656 #endif
657 #ifdef HAVE_truncsfhf2
658 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
660 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
661 return;
663 #endif
664 #ifdef HAVE_truncdfhf2
665 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
667 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
668 return;
670 #endif
671 #ifdef HAVE_truncxfhf2
672 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
674 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
675 return;
677 #endif
678 #ifdef HAVE_trunctfhf2
679 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
681 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
682 return;
684 #endif
686 #ifdef HAVE_truncsftqf2
687 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
689 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
690 return;
692 #endif
693 #ifdef HAVE_truncdftqf2
694 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
696 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
697 return;
699 #endif
700 #ifdef HAVE_truncxftqf2
701 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
703 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
704 return;
706 #endif
707 #ifdef HAVE_trunctftqf2
708 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
710 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
711 return;
713 #endif
715 #ifdef HAVE_truncdfsf2
716 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
718 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
719 return;
721 #endif
722 #ifdef HAVE_truncxfsf2
723 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
725 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
726 return;
728 #endif
729 #ifdef HAVE_trunctfsf2
730 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
732 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
733 return;
735 #endif
736 #ifdef HAVE_truncxfdf2
737 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
739 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
740 return;
742 #endif
743 #ifdef HAVE_trunctfdf2
744 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
746 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
747 return;
749 #endif
751 libcall = (rtx) 0;
752 switch (from_mode)
754 case SFmode:
755 switch (to_mode)
757 case DFmode:
758 libcall = extendsfdf2_libfunc;
759 break;
761 case XFmode:
762 libcall = extendsfxf2_libfunc;
763 break;
765 case TFmode:
766 libcall = extendsftf2_libfunc;
767 break;
769 default:
770 break;
772 break;
774 case DFmode:
775 switch (to_mode)
777 case SFmode:
778 libcall = truncdfsf2_libfunc;
779 break;
781 case XFmode:
782 libcall = extenddfxf2_libfunc;
783 break;
785 case TFmode:
786 libcall = extenddftf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 case XFmode:
795 switch (to_mode)
797 case SFmode:
798 libcall = truncxfsf2_libfunc;
799 break;
801 case DFmode:
802 libcall = truncxfdf2_libfunc;
803 break;
805 default:
806 break;
808 break;
810 case TFmode:
811 switch (to_mode)
813 case SFmode:
814 libcall = trunctfsf2_libfunc;
815 break;
817 case DFmode:
818 libcall = trunctfdf2_libfunc;
819 break;
821 default:
822 break;
824 break;
826 default:
827 break;
830 if (libcall == (rtx) 0)
831 /* This conversion is not implemented yet. */
832 abort ();
834 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
835 1, from, from_mode);
836 emit_move_insn (to, value);
837 return;
840 /* Now both modes are integers. */
842 /* Handle expanding beyond a word. */
843 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
844 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
846 rtx insns;
847 rtx lowpart;
848 rtx fill_value;
849 rtx lowfrom;
850 int i;
851 enum machine_mode lowpart_mode;
852 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
854 /* Try converting directly if the insn is supported. */
855 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
856 != CODE_FOR_nothing)
858 /* If FROM is a SUBREG, put it into a register. Do this
859 so that we always generate the same set of insns for
860 better cse'ing; if an intermediate assignment occurred,
861 we won't be doing the operation directly on the SUBREG. */
862 if (optimize > 0 && GET_CODE (from) == SUBREG)
863 from = force_reg (from_mode, from);
864 emit_unop_insn (code, to, from, equiv_code);
865 return;
867 /* Next, try converting via full word. */
868 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
869 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
870 != CODE_FOR_nothing))
872 if (GET_CODE (to) == REG)
873 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
874 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
875 emit_unop_insn (code, to,
876 gen_lowpart (word_mode, to), equiv_code);
877 return;
880 /* No special multiword conversion insn; do it by hand. */
881 start_sequence ();
883 /* Since we will turn this into a no conflict block, we must ensure
884 that the source does not overlap the target. */
886 if (reg_overlap_mentioned_p (to, from))
887 from = force_reg (from_mode, from);
889 /* Get a copy of FROM widened to a word, if necessary. */
890 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
891 lowpart_mode = word_mode;
892 else
893 lowpart_mode = from_mode;
895 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
897 lowpart = gen_lowpart (lowpart_mode, to);
898 emit_move_insn (lowpart, lowfrom);
900 /* Compute the value to put in each remaining word. */
901 if (unsignedp)
902 fill_value = const0_rtx;
903 else
905 #ifdef HAVE_slt
906 if (HAVE_slt
907 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
908 && STORE_FLAG_VALUE == -1)
910 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
911 lowpart_mode, 0, 0);
912 fill_value = gen_reg_rtx (word_mode);
913 emit_insn (gen_slt (fill_value));
915 else
916 #endif
918 fill_value
919 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
920 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
921 NULL_RTX, 0);
922 fill_value = convert_to_mode (word_mode, fill_value, 1);
926 /* Fill the remaining words. */
927 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
929 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
930 rtx subword = operand_subword (to, index, 1, to_mode);
932 if (subword == 0)
933 abort ();
935 if (fill_value != subword)
936 emit_move_insn (subword, fill_value);
939 insns = get_insns ();
940 end_sequence ();
942 emit_no_conflict_block (insns, to, from, NULL_RTX,
943 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
944 return;
947 /* Truncating multi-word to a word or less. */
948 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
949 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
951 if (!((GET_CODE (from) == MEM
952 && ! MEM_VOLATILE_P (from)
953 && direct_load[(int) to_mode]
954 && ! mode_dependent_address_p (XEXP (from, 0)))
955 || GET_CODE (from) == REG
956 || GET_CODE (from) == SUBREG))
957 from = force_reg (from_mode, from);
958 convert_move (to, gen_lowpart (word_mode, from), 0);
959 return;
962 /* Handle pointer conversion */ /* SPEE 900220 */
963 if (to_mode == PQImode)
965 if (from_mode != QImode)
966 from = convert_to_mode (QImode, from, unsignedp);
968 #ifdef HAVE_truncqipqi2
969 if (HAVE_truncqipqi2)
971 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
972 return;
974 #endif /* HAVE_truncqipqi2 */
975 abort ();
978 if (from_mode == PQImode)
980 if (to_mode != QImode)
982 from = convert_to_mode (QImode, from, unsignedp);
983 from_mode = QImode;
985 else
987 #ifdef HAVE_extendpqiqi2
988 if (HAVE_extendpqiqi2)
990 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
991 return;
993 #endif /* HAVE_extendpqiqi2 */
994 abort ();
998 if (to_mode == PSImode)
1000 if (from_mode != SImode)
1001 from = convert_to_mode (SImode, from, unsignedp);
1003 #ifdef HAVE_truncsipsi2
1004 if (HAVE_truncsipsi2)
1006 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1007 return;
1009 #endif /* HAVE_truncsipsi2 */
1010 abort ();
1013 if (from_mode == PSImode)
1015 if (to_mode != SImode)
1017 from = convert_to_mode (SImode, from, unsignedp);
1018 from_mode = SImode;
1020 else
1022 #ifdef HAVE_extendpsisi2
1023 if (HAVE_extendpsisi2)
1025 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1026 return;
1028 #endif /* HAVE_extendpsisi2 */
1029 abort ();
1033 if (to_mode == PDImode)
1035 if (from_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1038 #ifdef HAVE_truncdipdi2
1039 if (HAVE_truncdipdi2)
1041 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1042 return;
1044 #endif /* HAVE_truncdipdi2 */
1045 abort ();
1048 if (from_mode == PDImode)
1050 if (to_mode != DImode)
1052 from = convert_to_mode (DImode, from, unsignedp);
1053 from_mode = DImode;
1055 else
1057 #ifdef HAVE_extendpdidi2
1058 if (HAVE_extendpdidi2)
1060 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1061 return;
1063 #endif /* HAVE_extendpdidi2 */
1064 abort ();
1068 /* Now follow all the conversions between integers
1069 no more than a word long. */
1071 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1072 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (from_mode)))
1076 if (!((GET_CODE (from) == MEM
1077 && ! MEM_VOLATILE_P (from)
1078 && direct_load[(int) to_mode]
1079 && ! mode_dependent_address_p (XEXP (from, 0)))
1080 || GET_CODE (from) == REG
1081 || GET_CODE (from) == SUBREG))
1082 from = force_reg (from_mode, from);
1083 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1084 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1085 from = copy_to_reg (from);
1086 emit_move_insn (to, gen_lowpart (to_mode, from));
1087 return;
1090 /* Handle extension. */
1091 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1093 /* Convert directly if that works. */
1094 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1095 != CODE_FOR_nothing)
1097 emit_unop_insn (code, to, from, equiv_code);
1098 return;
1100 else
1102 enum machine_mode intermediate;
1103 rtx tmp;
1104 tree shift_amount;
1106 /* Search for a mode to convert via. */
1107 for (intermediate = from_mode; intermediate != VOIDmode;
1108 intermediate = GET_MODE_WIDER_MODE (intermediate))
1109 if (((can_extend_p (to_mode, intermediate, unsignedp)
1110 != CODE_FOR_nothing)
1111 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1112 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1113 && (can_extend_p (intermediate, from_mode, unsignedp)
1114 != CODE_FOR_nothing))
1116 convert_move (to, convert_to_mode (intermediate, from,
1117 unsignedp), unsignedp);
1118 return;
1121 /* No suitable intermediate mode.
1122 Generate what we need with shifts. */
1123 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1124 - GET_MODE_BITSIZE (from_mode), 0);
1125 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1126 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1127 to, unsignedp);
1128 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1129 to, unsignedp);
1130 if (tmp != to)
1131 emit_move_insn (to, tmp);
1132 return;
1136 /* Support special truncate insns for certain modes. */
1138 if (from_mode == DImode && to_mode == SImode)
1140 #ifdef HAVE_truncdisi2
1141 if (HAVE_truncdisi2)
1143 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1144 return;
1146 #endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1151 if (from_mode == DImode && to_mode == HImode)
1153 #ifdef HAVE_truncdihi2
1154 if (HAVE_truncdihi2)
1156 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1157 return;
1159 #endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1164 if (from_mode == DImode && to_mode == QImode)
1166 #ifdef HAVE_truncdiqi2
1167 if (HAVE_truncdiqi2)
1169 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1170 return;
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1177 if (from_mode == SImode && to_mode == HImode)
1179 #ifdef HAVE_truncsihi2
1180 if (HAVE_truncsihi2)
1182 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1183 return;
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1190 if (from_mode == SImode && to_mode == QImode)
1192 #ifdef HAVE_truncsiqi2
1193 if (HAVE_truncsiqi2)
1195 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1196 return;
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1203 if (from_mode == HImode && to_mode == QImode)
1205 #ifdef HAVE_trunchiqi2
1206 if (HAVE_trunchiqi2)
1208 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1209 return;
1211 #endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1216 if (from_mode == TImode && to_mode == DImode)
1218 #ifdef HAVE_trunctidi2
1219 if (HAVE_trunctidi2)
1221 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1222 return;
1224 #endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1229 if (from_mode == TImode && to_mode == SImode)
1231 #ifdef HAVE_trunctisi2
1232 if (HAVE_trunctisi2)
1234 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1235 return;
1237 #endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1242 if (from_mode == TImode && to_mode == HImode)
1244 #ifdef HAVE_trunctihi2
1245 if (HAVE_trunctihi2)
1247 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1248 return;
1250 #endif
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 return;
1255 if (from_mode == TImode && to_mode == QImode)
1257 #ifdef HAVE_trunctiqi2
1258 if (HAVE_trunctiqi2)
1260 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1261 return;
1263 #endif
1264 convert_move (to, force_reg (from_mode, from), unsignedp);
1265 return;
1268 /* Handle truncation of volatile memrefs, and so on;
1269 the things that couldn't be truncated directly,
1270 and for which there was no special instruction. */
1271 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1273 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1274 emit_move_insn (to, temp);
1275 return;
1278 /* Mode combination is not recognized. */
1279 abort ();
1282 /* Return an rtx for a value that would result
1283 from converting X to mode MODE.
1284 Both X and MODE may be floating, or both integer.
1285 UNSIGNEDP is nonzero if X is an unsigned value.
1286 This can be done by referring to a part of X in place
1287 or by copying to a new temporary with conversion.
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1293 convert_to_mode (mode, x, unsignedp)
1294 enum machine_mode mode;
1295 rtx x;
1296 int unsignedp;
1298 return convert_modes (mode, VOIDmode, x, unsignedp);
1301 /* Return an rtx for a value that would result
1302 from converting X from mode OLDMODE to mode MODE.
1303 Both modes may be floating, or both integer.
1304 UNSIGNEDP is nonzero if X is an unsigned value.
1306 This can be done by referring to a part of X in place
1307 or by copying to a new temporary with conversion.
1309 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1311 This function *must not* call protect_from_queue
1312 except when putting X into an insn (in which case convert_move does it). */
1315 convert_modes (mode, oldmode, x, unsignedp)
1316 enum machine_mode mode, oldmode;
1317 rtx x;
1318 int unsignedp;
1320 register rtx temp;
1322 /* If FROM is a SUBREG that indicates that we have already done at least
1323 the required extension, strip it. */
1325 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1326 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1327 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1328 x = gen_lowpart (mode, x);
1330 if (GET_MODE (x) != VOIDmode)
1331 oldmode = GET_MODE (x);
1333 if (mode == oldmode)
1334 return x;
1336 /* There is one case that we must handle specially: If we are converting
1337 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1338 we are to interpret the constant as unsigned, gen_lowpart will do
1339 the wrong if the constant appears negative. What we want to do is
1340 make the high-order word of the constant zero, not all ones. */
1342 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1344 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1346 HOST_WIDE_INT val = INTVAL (x);
1348 if (oldmode != VOIDmode
1349 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1351 int width = GET_MODE_BITSIZE (oldmode);
1353 /* We need to zero extend VAL. */
1354 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1357 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1360 /* We can do this with a gen_lowpart if both desired and current modes
1361 are integer, and this is either a constant integer, a register, or a
1362 non-volatile MEM. Except for the constant case where MODE is no
1363 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1365 if ((GET_CODE (x) == CONST_INT
1366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1367 || (GET_MODE_CLASS (mode) == MODE_INT
1368 && GET_MODE_CLASS (oldmode) == MODE_INT
1369 && (GET_CODE (x) == CONST_DOUBLE
1370 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1371 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1372 && direct_load[(int) mode])
1373 || (GET_CODE (x) == REG
1374 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1375 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1377 /* ?? If we don't know OLDMODE, we have to assume here that
1378 X does not need sign- or zero-extension. This may not be
1379 the case, but it's the best we can do. */
1380 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1381 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1383 HOST_WIDE_INT val = INTVAL (x);
1384 int width = GET_MODE_BITSIZE (oldmode);
1386 /* We must sign or zero-extend in this case. Start by
1387 zero-extending, then sign extend if we need to. */
1388 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1389 if (! unsignedp
1390 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1391 val |= (HOST_WIDE_INT) (-1) << width;
1393 return GEN_INT (val);
1396 return gen_lowpart (mode, x);
1399 temp = gen_reg_rtx (mode);
1400 convert_move (temp, x, unsignedp);
1401 return temp;
1405 /* This macro is used to determine what the largest unit size that
1406 move_by_pieces can use is. */
1408 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1409 move efficiently, as opposed to MOVE_MAX which is the maximum
1410 number of bhytes we can move with a single instruction. */
1412 #ifndef MOVE_MAX_PIECES
1413 #define MOVE_MAX_PIECES MOVE_MAX
1414 #endif
1416 /* Generate several move instructions to copy LEN bytes
1417 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1418 The caller must pass FROM and TO
1419 through protect_from_queue before calling.
1420 ALIGN (in bytes) is maximum alignment we can assume. */
1422 void
1423 move_by_pieces (to, from, len, align)
1424 rtx to, from;
1425 int len, align;
1427 struct move_by_pieces data;
1428 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1429 int max_size = MOVE_MAX_PIECES + 1;
1430 enum machine_mode mode = VOIDmode, tmode;
1431 enum insn_code icode;
1433 data.offset = 0;
1434 data.to_addr = to_addr;
1435 data.from_addr = from_addr;
1436 data.to = to;
1437 data.from = from;
1438 data.autinc_to
1439 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1440 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1441 data.autinc_from
1442 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1443 || GET_CODE (from_addr) == POST_INC
1444 || GET_CODE (from_addr) == POST_DEC);
1446 data.explicit_inc_from = 0;
1447 data.explicit_inc_to = 0;
1448 data.reverse
1449 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1450 if (data.reverse) data.offset = len;
1451 data.len = len;
1453 data.to_struct = MEM_IN_STRUCT_P (to);
1454 data.from_struct = MEM_IN_STRUCT_P (from);
1456 /* If copying requires more than two move insns,
1457 copy addresses to registers (to make displacements shorter)
1458 and use post-increment if available. */
1459 if (!(data.autinc_from && data.autinc_to)
1460 && move_by_pieces_ninsns (len, align) > 2)
1462 /* Find the mode of the largest move... */
1463 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1464 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1465 if (GET_MODE_SIZE (tmode) < max_size)
1466 mode = tmode;
1468 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1470 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1471 data.autinc_from = 1;
1472 data.explicit_inc_from = -1;
1474 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1476 data.from_addr = copy_addr_to_reg (from_addr);
1477 data.autinc_from = 1;
1478 data.explicit_inc_from = 1;
1480 if (!data.autinc_from && CONSTANT_P (from_addr))
1481 data.from_addr = copy_addr_to_reg (from_addr);
1482 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1484 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1485 data.autinc_to = 1;
1486 data.explicit_inc_to = -1;
1488 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1490 data.to_addr = copy_addr_to_reg (to_addr);
1491 data.autinc_to = 1;
1492 data.explicit_inc_to = 1;
1494 if (!data.autinc_to && CONSTANT_P (to_addr))
1495 data.to_addr = copy_addr_to_reg (to_addr);
1498 if (! SLOW_UNALIGNED_ACCESS
1499 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1500 align = MOVE_MAX;
1502 /* First move what we can in the largest integer mode, then go to
1503 successively smaller modes. */
1505 while (max_size > 1)
1507 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1508 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1509 if (GET_MODE_SIZE (tmode) < max_size)
1510 mode = tmode;
1512 if (mode == VOIDmode)
1513 break;
1515 icode = mov_optab->handlers[(int) mode].insn_code;
1516 if (icode != CODE_FOR_nothing
1517 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1518 GET_MODE_SIZE (mode)))
1519 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1521 max_size = GET_MODE_SIZE (mode);
1524 /* The code above should have handled everything. */
1525 if (data.len > 0)
1526 abort ();
1529 /* Return number of insns required to move L bytes by pieces.
1530 ALIGN (in bytes) is maximum alignment we can assume. */
1532 static int
1533 move_by_pieces_ninsns (l, align)
1534 unsigned int l;
1535 int align;
1537 register int n_insns = 0;
1538 int max_size = MOVE_MAX + 1;
1540 if (! SLOW_UNALIGNED_ACCESS
1541 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1542 align = MOVE_MAX;
1544 while (max_size > 1)
1546 enum machine_mode mode = VOIDmode, tmode;
1547 enum insn_code icode;
1549 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1550 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1551 if (GET_MODE_SIZE (tmode) < max_size)
1552 mode = tmode;
1554 if (mode == VOIDmode)
1555 break;
1557 icode = mov_optab->handlers[(int) mode].insn_code;
1558 if (icode != CODE_FOR_nothing
1559 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1560 GET_MODE_SIZE (mode)))
1561 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1563 max_size = GET_MODE_SIZE (mode);
1566 return n_insns;
1569 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1570 with move instructions for mode MODE. GENFUN is the gen_... function
1571 to make a move insn for that mode. DATA has all the other info. */
1573 static void
1574 move_by_pieces_1 (genfun, mode, data)
1575 rtx (*genfun) PROTO ((rtx, ...));
1576 enum machine_mode mode;
1577 struct move_by_pieces *data;
1579 register int size = GET_MODE_SIZE (mode);
1580 register rtx to1, from1;
1582 while (data->len >= size)
1584 if (data->reverse) data->offset -= size;
1586 to1 = (data->autinc_to
1587 ? gen_rtx_MEM (mode, data->to_addr)
1588 : copy_rtx (change_address (data->to, mode,
1589 plus_constant (data->to_addr,
1590 data->offset))));
1591 MEM_IN_STRUCT_P (to1) = data->to_struct;
1593 from1
1594 = (data->autinc_from
1595 ? gen_rtx_MEM (mode, data->from_addr)
1596 : copy_rtx (change_address (data->from, mode,
1597 plus_constant (data->from_addr,
1598 data->offset))));
1599 MEM_IN_STRUCT_P (from1) = data->from_struct;
1601 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1602 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1603 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1604 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1606 emit_insn ((*genfun) (to1, from1));
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1608 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1609 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1610 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1612 if (! data->reverse) data->offset += size;
1614 data->len -= size;
1618 /* Emit code to move a block Y to a block X.
1619 This may be done with string-move instructions,
1620 with multiple scalar move instructions, or with a library call.
1622 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1623 with mode BLKmode.
1624 SIZE is an rtx that says how long they are.
1625 ALIGN is the maximum alignment we can assume they have,
1626 measured in bytes.
1628 Return the address of the new block, if memcpy is called and returns it,
1629 0 otherwise. */
1632 emit_block_move (x, y, size, align)
1633 rtx x, y;
1634 rtx size;
1635 int align;
1637 rtx retval = 0;
1638 #ifdef TARGET_MEM_FUNCTIONS
1639 static tree fn;
1640 tree call_expr, arg_list;
1641 #endif
1643 if (GET_MODE (x) != BLKmode)
1644 abort ();
1646 if (GET_MODE (y) != BLKmode)
1647 abort ();
1649 x = protect_from_queue (x, 1);
1650 y = protect_from_queue (y, 0);
1651 size = protect_from_queue (size, 0);
1653 if (GET_CODE (x) != MEM)
1654 abort ();
1655 if (GET_CODE (y) != MEM)
1656 abort ();
1657 if (size == 0)
1658 abort ();
1660 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1661 move_by_pieces (x, y, INTVAL (size), align);
1662 else
1664 /* Try the most limited insn first, because there's no point
1665 including more than one in the machine description unless
1666 the more limited one has some advantage. */
1668 rtx opalign = GEN_INT (align);
1669 enum machine_mode mode;
1671 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1672 mode = GET_MODE_WIDER_MODE (mode))
1674 enum insn_code code = movstr_optab[(int) mode];
1676 if (code != CODE_FOR_nothing
1677 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1678 here because if SIZE is less than the mode mask, as it is
1679 returned by the macro, it will definitely be less than the
1680 actual mode mask. */
1681 && ((GET_CODE (size) == CONST_INT
1682 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1683 <= (GET_MODE_MASK (mode) >> 1)))
1684 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1685 && (insn_operand_predicate[(int) code][0] == 0
1686 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1687 && (insn_operand_predicate[(int) code][1] == 0
1688 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1689 && (insn_operand_predicate[(int) code][3] == 0
1690 || (*insn_operand_predicate[(int) code][3]) (opalign,
1691 VOIDmode)))
1693 rtx op2;
1694 rtx last = get_last_insn ();
1695 rtx pat;
1697 op2 = convert_to_mode (mode, size, 1);
1698 if (insn_operand_predicate[(int) code][2] != 0
1699 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1700 op2 = copy_to_mode_reg (mode, op2);
1702 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1703 if (pat)
1705 emit_insn (pat);
1706 return 0;
1708 else
1709 delete_insns_since (last);
1713 /* X, Y, or SIZE may have been passed through protect_from_queue.
1715 It is unsafe to save the value generated by protect_from_queue
1716 and reuse it later. Consider what happens if emit_queue is
1717 called before the return value from protect_from_queue is used.
1719 Expansion of the CALL_EXPR below will call emit_queue before
1720 we are finished emitting RTL for argument setup. So if we are
1721 not careful we could get the wrong value for an argument.
1723 To avoid this problem we go ahead and emit code to copy X, Y &
1724 SIZE into new pseudos. We can then place those new pseudos
1725 into an RTL_EXPR and use them later, even after a call to
1726 emit_queue.
1728 Note this is not strictly needed for library calls since they
1729 do not call emit_queue before loading their arguments. However,
1730 we may need to have library calls call emit_queue in the future
1731 since failing to do so could cause problems for targets which
1732 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1733 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1734 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1736 #ifdef TARGET_MEM_FUNCTIONS
1737 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1738 #else
1739 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1740 TREE_UNSIGNED (integer_type_node));
1741 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1742 #endif
1744 #ifdef TARGET_MEM_FUNCTIONS
1745 /* It is incorrect to use the libcall calling conventions to call
1746 memcpy in this context.
1748 This could be a user call to memcpy and the user may wish to
1749 examine the return value from memcpy.
1751 For targets where libcalls and normal calls have different conventions
1752 for returning pointers, we could end up generating incorrect code.
1754 So instead of using a libcall sequence we build up a suitable
1755 CALL_EXPR and expand the call in the normal fashion. */
1756 if (fn == NULL_TREE)
1758 tree fntype;
1760 /* This was copied from except.c, I don't know if all this is
1761 necessary in this context or not. */
1762 fn = get_identifier ("memcpy");
1763 push_obstacks_nochange ();
1764 end_temporary_allocation ();
1765 fntype = build_pointer_type (void_type_node);
1766 fntype = build_function_type (fntype, NULL_TREE);
1767 fn = build_decl (FUNCTION_DECL, fn, fntype);
1768 DECL_EXTERNAL (fn) = 1;
1769 TREE_PUBLIC (fn) = 1;
1770 DECL_ARTIFICIAL (fn) = 1;
1771 make_decl_rtl (fn, NULL_PTR, 1);
1772 assemble_external (fn);
1773 pop_obstacks ();
1776 /* We need to make an argument list for the function call.
1778 memcpy has three arguments, the first two are void * addresses and
1779 the last is a size_t byte count for the copy. */
1780 arg_list
1781 = build_tree_list (NULL_TREE,
1782 make_tree (build_pointer_type (void_type_node), x));
1783 TREE_CHAIN (arg_list)
1784 = build_tree_list (NULL_TREE,
1785 make_tree (build_pointer_type (void_type_node), y));
1786 TREE_CHAIN (TREE_CHAIN (arg_list))
1787 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1788 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1790 /* Now we have to build up the CALL_EXPR itself. */
1791 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1792 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1793 call_expr, arg_list, NULL_TREE);
1794 TREE_SIDE_EFFECTS (call_expr) = 1;
1796 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1797 #else
1798 emit_library_call (bcopy_libfunc, 0,
1799 VOIDmode, 3, y, Pmode, x, Pmode,
1800 convert_to_mode (TYPE_MODE (integer_type_node), size,
1801 TREE_UNSIGNED (integer_type_node)),
1802 TYPE_MODE (integer_type_node));
1803 #endif
1806 return retval;
1809 /* Copy all or part of a value X into registers starting at REGNO.
1810 The number of registers to be filled is NREGS. */
1812 void
1813 move_block_to_reg (regno, x, nregs, mode)
1814 int regno;
1815 rtx x;
1816 int nregs;
1817 enum machine_mode mode;
1819 int i;
1820 #ifdef HAVE_load_multiple
1821 rtx pat;
1822 rtx last;
1823 #endif
1825 if (nregs == 0)
1826 return;
1828 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1829 x = validize_mem (force_const_mem (mode, x));
1831 /* See if the machine can do this with a load multiple insn. */
1832 #ifdef HAVE_load_multiple
1833 if (HAVE_load_multiple)
1835 last = get_last_insn ();
1836 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1837 GEN_INT (nregs));
1838 if (pat)
1840 emit_insn (pat);
1841 return;
1843 else
1844 delete_insns_since (last);
1846 #endif
1848 for (i = 0; i < nregs; i++)
1849 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1850 operand_subword_force (x, i, mode));
1853 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1854 The number of registers to be filled is NREGS. SIZE indicates the number
1855 of bytes in the object X. */
1858 void
1859 move_block_from_reg (regno, x, nregs, size)
1860 int regno;
1861 rtx x;
1862 int nregs;
1863 int size;
1865 int i;
1866 #ifdef HAVE_store_multiple
1867 rtx pat;
1868 rtx last;
1869 #endif
1870 enum machine_mode mode;
1872 /* If SIZE is that of a mode no bigger than a word, just use that
1873 mode's store operation. */
1874 if (size <= UNITS_PER_WORD
1875 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1877 emit_move_insn (change_address (x, mode, NULL),
1878 gen_rtx_REG (mode, regno));
1879 return;
1882 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1883 to the left before storing to memory. Note that the previous test
1884 doesn't handle all cases (e.g. SIZE == 3). */
1885 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1887 rtx tem = operand_subword (x, 0, 1, BLKmode);
1888 rtx shift;
1890 if (tem == 0)
1891 abort ();
1893 shift = expand_shift (LSHIFT_EXPR, word_mode,
1894 gen_rtx_REG (word_mode, regno),
1895 build_int_2 ((UNITS_PER_WORD - size)
1896 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1897 emit_move_insn (tem, shift);
1898 return;
1901 /* See if the machine can do this with a store multiple insn. */
1902 #ifdef HAVE_store_multiple
1903 if (HAVE_store_multiple)
1905 last = get_last_insn ();
1906 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1907 GEN_INT (nregs));
1908 if (pat)
1910 emit_insn (pat);
1911 return;
1913 else
1914 delete_insns_since (last);
1916 #endif
1918 for (i = 0; i < nregs; i++)
1920 rtx tem = operand_subword (x, i, 1, BLKmode);
1922 if (tem == 0)
1923 abort ();
1925 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1929 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1930 registers represented by a PARALLEL. SSIZE represents the total size of
1931 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1932 SRC in bits. */
1933 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1934 the balance will be in what would be the low-order memory addresses, i.e.
1935 left justified for big endian, right justified for little endian. This
1936 happens to be true for the targets currently using this support. If this
1937 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1938 would be needed. */
1940 void
1941 emit_group_load (dst, orig_src, ssize, align)
1942 rtx dst, orig_src;
1943 int align, ssize;
1945 rtx *tmps, src;
1946 int start, i;
1948 if (GET_CODE (dst) != PARALLEL)
1949 abort ();
1951 /* Check for a NULL entry, used to indicate that the parameter goes
1952 both on the stack and in registers. */
1953 if (XEXP (XVECEXP (dst, 0, 0), 0))
1954 start = 0;
1955 else
1956 start = 1;
1958 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1960 /* If we won't be loading directly from memory, protect the real source
1961 from strange tricks we might play. */
1962 src = orig_src;
1963 if (GET_CODE (src) != MEM)
1965 src = gen_reg_rtx (GET_MODE (orig_src));
1966 emit_move_insn (src, orig_src);
1969 /* Process the pieces. */
1970 for (i = start; i < XVECLEN (dst, 0); i++)
1972 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1973 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1974 int bytelen = GET_MODE_SIZE (mode);
1975 int shift = 0;
1977 /* Handle trailing fragments that run over the size of the struct. */
1978 if (ssize >= 0 && bytepos + bytelen > ssize)
1980 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1981 bytelen = ssize - bytepos;
1982 if (bytelen <= 0)
1983 abort();
1986 /* Optimize the access just a bit. */
1987 if (GET_CODE (src) == MEM
1988 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1989 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1990 && bytelen == GET_MODE_SIZE (mode))
1992 tmps[i] = gen_reg_rtx (mode);
1993 emit_move_insn (tmps[i],
1994 change_address (src, mode,
1995 plus_constant (XEXP (src, 0),
1996 bytepos)));
1998 else
2000 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
2001 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
2002 mode, mode, align, ssize);
2005 if (BYTES_BIG_ENDIAN && shift)
2007 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2008 tmps[i], 0, OPTAB_WIDEN);
2011 emit_queue();
2013 /* Copy the extracted pieces into the proper (probable) hard regs. */
2014 for (i = start; i < XVECLEN (dst, 0); i++)
2015 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2018 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2019 registers represented by a PARALLEL. SSIZE represents the total size of
2020 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2022 void
2023 emit_group_store (orig_dst, src, ssize, align)
2024 rtx orig_dst, src;
2025 int ssize, align;
2027 rtx *tmps, dst;
2028 int start, i;
2030 if (GET_CODE (src) != PARALLEL)
2031 abort ();
2033 /* Check for a NULL entry, used to indicate that the parameter goes
2034 both on the stack and in registers. */
2035 if (XEXP (XVECEXP (src, 0, 0), 0))
2036 start = 0;
2037 else
2038 start = 1;
2040 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2042 /* Copy the (probable) hard regs into pseudos. */
2043 for (i = start; i < XVECLEN (src, 0); i++)
2045 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2046 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2047 emit_move_insn (tmps[i], reg);
2049 emit_queue();
2051 /* If we won't be storing directly into memory, protect the real destination
2052 from strange tricks we might play. */
2053 dst = orig_dst;
2054 if (GET_CODE (dst) == PARALLEL)
2056 rtx temp;
2058 /* We can get a PARALLEL dst if there is a conditional expression in
2059 a return statement. In that case, the dst and src are the same,
2060 so no action is necessary. */
2061 if (rtx_equal_p (dst, src))
2062 return;
2064 /* It is unclear if we can ever reach here, but we may as well handle
2065 it. Allocate a temporary, and split this into a store/load to/from
2066 the temporary. */
2068 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2069 emit_group_store (temp, src, ssize, align);
2070 emit_group_load (dst, temp, ssize, align);
2071 return;
2073 else if (GET_CODE (dst) != MEM)
2075 dst = gen_reg_rtx (GET_MODE (orig_dst));
2076 /* Make life a bit easier for combine. */
2077 emit_move_insn (dst, const0_rtx);
2079 else if (! MEM_IN_STRUCT_P (dst))
2081 /* store_bit_field requires that memory operations have
2082 mem_in_struct_p set; we might not. */
2084 dst = copy_rtx (orig_dst);
2085 MEM_SET_IN_STRUCT_P (dst, 1);
2088 /* Process the pieces. */
2089 for (i = start; i < XVECLEN (src, 0); i++)
2091 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2092 enum machine_mode mode = GET_MODE (tmps[i]);
2093 int bytelen = GET_MODE_SIZE (mode);
2095 /* Handle trailing fragments that run over the size of the struct. */
2096 if (ssize >= 0 && bytepos + bytelen > ssize)
2098 if (BYTES_BIG_ENDIAN)
2100 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2101 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2102 tmps[i], 0, OPTAB_WIDEN);
2104 bytelen = ssize - bytepos;
2107 /* Optimize the access just a bit. */
2108 if (GET_CODE (dst) == MEM
2109 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2110 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2111 && bytelen == GET_MODE_SIZE (mode))
2113 emit_move_insn (change_address (dst, mode,
2114 plus_constant (XEXP (dst, 0),
2115 bytepos)),
2116 tmps[i]);
2118 else
2120 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2121 mode, tmps[i], align, ssize);
2124 emit_queue();
2126 /* Copy from the pseudo into the (probable) hard reg. */
2127 if (GET_CODE (dst) == REG)
2128 emit_move_insn (orig_dst, dst);
2131 /* Generate code to copy a BLKmode object of TYPE out of a
2132 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2133 is null, a stack temporary is created. TGTBLK is returned.
2135 The primary purpose of this routine is to handle functions
2136 that return BLKmode structures in registers. Some machines
2137 (the PA for example) want to return all small structures
2138 in registers regardless of the structure's alignment.
2142 copy_blkmode_from_reg(tgtblk,srcreg,type)
2143 rtx tgtblk;
2144 rtx srcreg;
2145 tree type;
2147 int bytes = int_size_in_bytes (type);
2148 rtx src = NULL, dst = NULL;
2149 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2150 int bitpos, xbitpos, big_endian_correction = 0;
2152 if (tgtblk == 0)
2154 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2155 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2156 preserve_temp_slots (tgtblk);
2159 /* This code assumes srcreg is at least a full word. If it isn't,
2160 copy it into a new pseudo which is a full word. */
2161 if (GET_MODE (srcreg) != BLKmode
2162 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2163 srcreg = convert_to_mode (word_mode, srcreg,
2164 TREE_UNSIGNED (type));
2166 /* Structures whose size is not a multiple of a word are aligned
2167 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2168 machine, this means we must skip the empty high order bytes when
2169 calculating the bit offset. */
2170 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2171 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2172 * BITS_PER_UNIT));
2174 /* Copy the structure BITSIZE bites at a time.
2176 We could probably emit more efficient code for machines
2177 which do not use strict alignment, but it doesn't seem
2178 worth the effort at the current time. */
2179 for (bitpos = 0, xbitpos = big_endian_correction;
2180 bitpos < bytes * BITS_PER_UNIT;
2181 bitpos += bitsize, xbitpos += bitsize)
2184 /* We need a new source operand each time xbitpos is on a
2185 word boundary and when xbitpos == big_endian_correction
2186 (the first time through). */
2187 if (xbitpos % BITS_PER_WORD == 0
2188 || xbitpos == big_endian_correction)
2189 src = operand_subword_force (srcreg,
2190 xbitpos / BITS_PER_WORD,
2191 BLKmode);
2193 /* We need a new destination operand each time bitpos is on
2194 a word boundary. */
2195 if (bitpos % BITS_PER_WORD == 0)
2196 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2198 /* Use xbitpos for the source extraction (right justified) and
2199 xbitpos for the destination store (left justified). */
2200 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2201 extract_bit_field (src, bitsize,
2202 xbitpos % BITS_PER_WORD, 1,
2203 NULL_RTX, word_mode,
2204 word_mode,
2205 bitsize / BITS_PER_UNIT,
2206 BITS_PER_WORD),
2207 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2209 return tgtblk;
2213 /* Add a USE expression for REG to the (possibly empty) list pointed
2214 to by CALL_FUSAGE. REG must denote a hard register. */
2216 void
2217 use_reg (call_fusage, reg)
2218 rtx *call_fusage, reg;
2220 if (GET_CODE (reg) != REG
2221 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2222 abort();
2224 *call_fusage
2225 = gen_rtx_EXPR_LIST (VOIDmode,
2226 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2229 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2230 starting at REGNO. All of these registers must be hard registers. */
2232 void
2233 use_regs (call_fusage, regno, nregs)
2234 rtx *call_fusage;
2235 int regno;
2236 int nregs;
2238 int i;
2240 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2241 abort ();
2243 for (i = 0; i < nregs; i++)
2244 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2247 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2248 PARALLEL REGS. This is for calls that pass values in multiple
2249 non-contiguous locations. The Irix 6 ABI has examples of this. */
2251 void
2252 use_group_regs (call_fusage, regs)
2253 rtx *call_fusage;
2254 rtx regs;
2256 int i;
2258 for (i = 0; i < XVECLEN (regs, 0); i++)
2260 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2262 /* A NULL entry means the parameter goes both on the stack and in
2263 registers. This can also be a MEM for targets that pass values
2264 partially on the stack and partially in registers. */
2265 if (reg != 0 && GET_CODE (reg) == REG)
2266 use_reg (call_fusage, reg);
2270 /* Generate several move instructions to clear LEN bytes of block TO.
2271 (A MEM rtx with BLKmode). The caller must pass TO through
2272 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2273 we can assume. */
2275 static void
2276 clear_by_pieces (to, len, align)
2277 rtx to;
2278 int len, align;
2280 struct clear_by_pieces data;
2281 rtx to_addr = XEXP (to, 0);
2282 int max_size = MOVE_MAX_PIECES + 1;
2283 enum machine_mode mode = VOIDmode, tmode;
2284 enum insn_code icode;
2286 data.offset = 0;
2287 data.to_addr = to_addr;
2288 data.to = to;
2289 data.autinc_to
2290 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2291 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2293 data.explicit_inc_to = 0;
2294 data.reverse
2295 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2296 if (data.reverse) data.offset = len;
2297 data.len = len;
2299 data.to_struct = MEM_IN_STRUCT_P (to);
2301 /* If copying requires more than two move insns,
2302 copy addresses to registers (to make displacements shorter)
2303 and use post-increment if available. */
2304 if (!data.autinc_to
2305 && move_by_pieces_ninsns (len, align) > 2)
2307 /* Determine the main mode we'll be using */
2308 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2309 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2310 if (GET_MODE_SIZE (tmode) < max_size)
2311 mode = tmode;
2313 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2315 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2316 data.autinc_to = 1;
2317 data.explicit_inc_to = -1;
2319 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2321 data.to_addr = copy_addr_to_reg (to_addr);
2322 data.autinc_to = 1;
2323 data.explicit_inc_to = 1;
2325 if (!data.autinc_to && CONSTANT_P (to_addr))
2326 data.to_addr = copy_addr_to_reg (to_addr);
2329 if (! SLOW_UNALIGNED_ACCESS
2330 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2331 align = MOVE_MAX;
2333 /* First move what we can in the largest integer mode, then go to
2334 successively smaller modes. */
2336 while (max_size > 1)
2338 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2339 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2340 if (GET_MODE_SIZE (tmode) < max_size)
2341 mode = tmode;
2343 if (mode == VOIDmode)
2344 break;
2346 icode = mov_optab->handlers[(int) mode].insn_code;
2347 if (icode != CODE_FOR_nothing
2348 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2349 GET_MODE_SIZE (mode)))
2350 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2352 max_size = GET_MODE_SIZE (mode);
2355 /* The code above should have handled everything. */
2356 if (data.len != 0)
2357 abort ();
2360 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2361 with move instructions for mode MODE. GENFUN is the gen_... function
2362 to make a move insn for that mode. DATA has all the other info. */
2364 static void
2365 clear_by_pieces_1 (genfun, mode, data)
2366 rtx (*genfun) PROTO ((rtx, ...));
2367 enum machine_mode mode;
2368 struct clear_by_pieces *data;
2370 register int size = GET_MODE_SIZE (mode);
2371 register rtx to1;
2373 while (data->len >= size)
2375 if (data->reverse) data->offset -= size;
2377 to1 = (data->autinc_to
2378 ? gen_rtx_MEM (mode, data->to_addr)
2379 : copy_rtx (change_address (data->to, mode,
2380 plus_constant (data->to_addr,
2381 data->offset))));
2382 MEM_IN_STRUCT_P (to1) = data->to_struct;
2384 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2385 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2387 emit_insn ((*genfun) (to1, const0_rtx));
2388 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2389 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2391 if (! data->reverse) data->offset += size;
2393 data->len -= size;
2397 /* Write zeros through the storage of OBJECT.
2398 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2399 the maximum alignment we can is has, measured in bytes.
2401 If we call a function that returns the length of the block, return it. */
2404 clear_storage (object, size, align)
2405 rtx object;
2406 rtx size;
2407 int align;
2409 #ifdef TARGET_MEM_FUNCTIONS
2410 static tree fn;
2411 tree call_expr, arg_list;
2412 #endif
2413 rtx retval = 0;
2415 if (GET_MODE (object) == BLKmode)
2417 object = protect_from_queue (object, 1);
2418 size = protect_from_queue (size, 0);
2420 if (GET_CODE (size) == CONST_INT
2421 && MOVE_BY_PIECES_P (INTVAL (size), align))
2422 clear_by_pieces (object, INTVAL (size), align);
2424 else
2426 /* Try the most limited insn first, because there's no point
2427 including more than one in the machine description unless
2428 the more limited one has some advantage. */
2430 rtx opalign = GEN_INT (align);
2431 enum machine_mode mode;
2433 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2434 mode = GET_MODE_WIDER_MODE (mode))
2436 enum insn_code code = clrstr_optab[(int) mode];
2438 if (code != CODE_FOR_nothing
2439 /* We don't need MODE to be narrower than
2440 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2441 the mode mask, as it is returned by the macro, it will
2442 definitely be less than the actual mode mask. */
2443 && ((GET_CODE (size) == CONST_INT
2444 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2445 <= (GET_MODE_MASK (mode) >> 1)))
2446 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2447 && (insn_operand_predicate[(int) code][0] == 0
2448 || (*insn_operand_predicate[(int) code][0]) (object,
2449 BLKmode))
2450 && (insn_operand_predicate[(int) code][2] == 0
2451 || (*insn_operand_predicate[(int) code][2]) (opalign,
2452 VOIDmode)))
2454 rtx op1;
2455 rtx last = get_last_insn ();
2456 rtx pat;
2458 op1 = convert_to_mode (mode, size, 1);
2459 if (insn_operand_predicate[(int) code][1] != 0
2460 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2461 mode))
2462 op1 = copy_to_mode_reg (mode, op1);
2464 pat = GEN_FCN ((int) code) (object, op1, opalign);
2465 if (pat)
2467 emit_insn (pat);
2468 return 0;
2470 else
2471 delete_insns_since (last);
2475 /* OBJECT or SIZE may have been passed through protect_from_queue.
2477 It is unsafe to save the value generated by protect_from_queue
2478 and reuse it later. Consider what happens if emit_queue is
2479 called before the return value from protect_from_queue is used.
2481 Expansion of the CALL_EXPR below will call emit_queue before
2482 we are finished emitting RTL for argument setup. So if we are
2483 not careful we could get the wrong value for an argument.
2485 To avoid this problem we go ahead and emit code to copy OBJECT
2486 and SIZE into new pseudos. We can then place those new pseudos
2487 into an RTL_EXPR and use them later, even after a call to
2488 emit_queue.
2490 Note this is not strictly needed for library calls since they
2491 do not call emit_queue before loading their arguments. However,
2492 we may need to have library calls call emit_queue in the future
2493 since failing to do so could cause problems for targets which
2494 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2495 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2497 #ifdef TARGET_MEM_FUNCTIONS
2498 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2499 #else
2500 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2501 TREE_UNSIGNED (integer_type_node));
2502 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2503 #endif
2506 #ifdef TARGET_MEM_FUNCTIONS
2507 /* It is incorrect to use the libcall calling conventions to call
2508 memset in this context.
2510 This could be a user call to memset and the user may wish to
2511 examine the return value from memset.
2513 For targets where libcalls and normal calls have different
2514 conventions for returning pointers, we could end up generating
2515 incorrect code.
2517 So instead of using a libcall sequence we build up a suitable
2518 CALL_EXPR and expand the call in the normal fashion. */
2519 if (fn == NULL_TREE)
2521 tree fntype;
2523 /* This was copied from except.c, I don't know if all this is
2524 necessary in this context or not. */
2525 fn = get_identifier ("memset");
2526 push_obstacks_nochange ();
2527 end_temporary_allocation ();
2528 fntype = build_pointer_type (void_type_node);
2529 fntype = build_function_type (fntype, NULL_TREE);
2530 fn = build_decl (FUNCTION_DECL, fn, fntype);
2531 DECL_EXTERNAL (fn) = 1;
2532 TREE_PUBLIC (fn) = 1;
2533 DECL_ARTIFICIAL (fn) = 1;
2534 make_decl_rtl (fn, NULL_PTR, 1);
2535 assemble_external (fn);
2536 pop_obstacks ();
2539 /* We need to make an argument list for the function call.
2541 memset has three arguments, the first is a void * addresses, the
2542 second a integer with the initialization value, the last is a
2543 size_t byte count for the copy. */
2544 arg_list
2545 = build_tree_list (NULL_TREE,
2546 make_tree (build_pointer_type (void_type_node),
2547 object));
2548 TREE_CHAIN (arg_list)
2549 = build_tree_list (NULL_TREE,
2550 make_tree (integer_type_node, const0_rtx));
2551 TREE_CHAIN (TREE_CHAIN (arg_list))
2552 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2553 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2555 /* Now we have to build up the CALL_EXPR itself. */
2556 call_expr = build1 (ADDR_EXPR,
2557 build_pointer_type (TREE_TYPE (fn)), fn);
2558 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2559 call_expr, arg_list, NULL_TREE);
2560 TREE_SIDE_EFFECTS (call_expr) = 1;
2562 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2563 #else
2564 emit_library_call (bzero_libfunc, 0,
2565 VOIDmode, 2, object, Pmode, size,
2566 TYPE_MODE (integer_type_node));
2567 #endif
2570 else
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2573 return retval;
2576 /* Generate code to copy Y into X.
2577 Both Y and X must have the same mode, except that
2578 Y can be a constant with VOIDmode.
2579 This mode cannot be BLKmode; use emit_block_move for that.
2581 Return the last instruction emitted. */
2584 emit_move_insn (x, y)
2585 rtx x, y;
2587 enum machine_mode mode = GET_MODE (x);
2589 x = protect_from_queue (x, 1);
2590 y = protect_from_queue (y, 0);
2592 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2593 abort ();
2595 /* Never force constant_p_rtx to memory. */
2596 if (GET_CODE (y) == CONSTANT_P_RTX)
2598 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2599 y = force_const_mem (mode, y);
2601 /* If X or Y are memory references, verify that their addresses are valid
2602 for the machine. */
2603 if (GET_CODE (x) == MEM
2604 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2605 && ! push_operand (x, GET_MODE (x)))
2606 || (flag_force_addr
2607 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2608 x = change_address (x, VOIDmode, XEXP (x, 0));
2610 if (GET_CODE (y) == MEM
2611 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2612 || (flag_force_addr
2613 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2614 y = change_address (y, VOIDmode, XEXP (y, 0));
2616 if (mode == BLKmode)
2617 abort ();
2619 return emit_move_insn_1 (x, y);
2622 /* Low level part of emit_move_insn.
2623 Called just like emit_move_insn, but assumes X and Y
2624 are basically valid. */
2627 emit_move_insn_1 (x, y)
2628 rtx x, y;
2630 enum machine_mode mode = GET_MODE (x);
2631 enum machine_mode submode;
2632 enum mode_class class = GET_MODE_CLASS (mode);
2633 int i;
2635 if (mode >= MAX_MACHINE_MODE)
2636 abort ();
2638 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2639 return
2640 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2642 /* Expand complex moves by moving real part and imag part, if possible. */
2643 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2644 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2645 * BITS_PER_UNIT),
2646 (class == MODE_COMPLEX_INT
2647 ? MODE_INT : MODE_FLOAT),
2649 && (mov_optab->handlers[(int) submode].insn_code
2650 != CODE_FOR_nothing))
2652 /* Don't split destination if it is a stack push. */
2653 int stack = push_operand (x, GET_MODE (x));
2655 /* If this is a stack, push the highpart first, so it
2656 will be in the argument order.
2658 In that case, change_address is used only to convert
2659 the mode, not to change the address. */
2660 if (stack)
2662 /* Note that the real part always precedes the imag part in memory
2663 regardless of machine's endianness. */
2664 #ifdef STACK_GROWS_DOWNWARD
2665 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2666 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2667 gen_imagpart (submode, y)));
2668 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2669 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2670 gen_realpart (submode, y)));
2671 #else
2672 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2673 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2674 gen_realpart (submode, y)));
2675 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2676 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2677 gen_imagpart (submode, y)));
2678 #endif
2680 else
2682 /* Show the output dies here. This is necessary for pseudos;
2683 hard regs shouldn't appear here except as return values.
2684 We never want to emit such a clobber after reload. */
2685 if (x != y
2686 && ! (reload_in_progress || reload_completed))
2688 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2691 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2692 (gen_realpart (submode, x), gen_realpart (submode, y)));
2693 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2694 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2697 return get_last_insn ();
2700 /* This will handle any multi-word mode that lacks a move_insn pattern.
2701 However, you will get better code if you define such patterns,
2702 even if they must turn into multiple assembler instructions. */
2703 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2705 rtx last_insn = 0;
2707 #ifdef PUSH_ROUNDING
2709 /* If X is a push on the stack, do the push now and replace
2710 X with a reference to the stack pointer. */
2711 if (push_operand (x, GET_MODE (x)))
2713 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2714 x = change_address (x, VOIDmode, stack_pointer_rtx);
2716 #endif
2718 /* Show the output dies here. This is necessary for pseudos;
2719 hard regs shouldn't appear here except as return values.
2720 We never want to emit such a clobber after reload. */
2721 if (x != y
2722 && ! (reload_in_progress || reload_completed))
2724 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2727 for (i = 0;
2728 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2729 i++)
2731 rtx xpart = operand_subword (x, i, 1, mode);
2732 rtx ypart = operand_subword (y, i, 1, mode);
2734 /* If we can't get a part of Y, put Y into memory if it is a
2735 constant. Otherwise, force it into a register. If we still
2736 can't get a part of Y, abort. */
2737 if (ypart == 0 && CONSTANT_P (y))
2739 y = force_const_mem (mode, y);
2740 ypart = operand_subword (y, i, 1, mode);
2742 else if (ypart == 0)
2743 ypart = operand_subword_force (y, i, mode);
2745 if (xpart == 0 || ypart == 0)
2746 abort ();
2748 last_insn = emit_move_insn (xpart, ypart);
2751 return last_insn;
2753 else
2754 abort ();
2757 /* Pushing data onto the stack. */
2759 /* Push a block of length SIZE (perhaps variable)
2760 and return an rtx to address the beginning of the block.
2761 Note that it is not possible for the value returned to be a QUEUED.
2762 The value may be virtual_outgoing_args_rtx.
2764 EXTRA is the number of bytes of padding to push in addition to SIZE.
2765 BELOW nonzero means this padding comes at low addresses;
2766 otherwise, the padding comes at high addresses. */
2769 push_block (size, extra, below)
2770 rtx size;
2771 int extra, below;
2773 register rtx temp;
2775 size = convert_modes (Pmode, ptr_mode, size, 1);
2776 if (CONSTANT_P (size))
2777 anti_adjust_stack (plus_constant (size, extra));
2778 else if (GET_CODE (size) == REG && extra == 0)
2779 anti_adjust_stack (size);
2780 else
2782 rtx temp = copy_to_mode_reg (Pmode, size);
2783 if (extra != 0)
2784 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2785 temp, 0, OPTAB_LIB_WIDEN);
2786 anti_adjust_stack (temp);
2789 #if defined (STACK_GROWS_DOWNWARD) \
2790 || (defined (ARGS_GROW_DOWNWARD) \
2791 && !defined (ACCUMULATE_OUTGOING_ARGS))
2793 /* Return the lowest stack address when STACK or ARGS grow downward and
2794 we are not aaccumulating outgoing arguments (the c4x port uses such
2795 conventions). */
2796 temp = virtual_outgoing_args_rtx;
2797 if (extra != 0 && below)
2798 temp = plus_constant (temp, extra);
2799 #else
2800 if (GET_CODE (size) == CONST_INT)
2801 temp = plus_constant (virtual_outgoing_args_rtx,
2802 - INTVAL (size) - (below ? 0 : extra));
2803 else if (extra != 0 && !below)
2804 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2805 negate_rtx (Pmode, plus_constant (size, extra)));
2806 else
2807 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2808 negate_rtx (Pmode, size));
2809 #endif
2811 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2815 gen_push_operand ()
2817 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2820 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2821 block of SIZE bytes. */
2823 static rtx
2824 get_push_address (size)
2825 int size;
2827 register rtx temp;
2829 if (STACK_PUSH_CODE == POST_DEC)
2830 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2831 else if (STACK_PUSH_CODE == POST_INC)
2832 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2833 else
2834 temp = stack_pointer_rtx;
2836 return copy_to_reg (temp);
2839 /* Generate code to push X onto the stack, assuming it has mode MODE and
2840 type TYPE.
2841 MODE is redundant except when X is a CONST_INT (since they don't
2842 carry mode info).
2843 SIZE is an rtx for the size of data to be copied (in bytes),
2844 needed only if X is BLKmode.
2846 ALIGN (in bytes) is maximum alignment we can assume.
2848 If PARTIAL and REG are both nonzero, then copy that many of the first
2849 words of X into registers starting with REG, and push the rest of X.
2850 The amount of space pushed is decreased by PARTIAL words,
2851 rounded *down* to a multiple of PARM_BOUNDARY.
2852 REG must be a hard register in this case.
2853 If REG is zero but PARTIAL is not, take any all others actions for an
2854 argument partially in registers, but do not actually load any
2855 registers.
2857 EXTRA is the amount in bytes of extra space to leave next to this arg.
2858 This is ignored if an argument block has already been allocated.
2860 On a machine that lacks real push insns, ARGS_ADDR is the address of
2861 the bottom of the argument block for this call. We use indexing off there
2862 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2863 argument block has not been preallocated.
2865 ARGS_SO_FAR is the size of args previously pushed for this call.
2867 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2868 for arguments passed in registers. If nonzero, it will be the number
2869 of bytes required. */
2871 void
2872 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2873 args_addr, args_so_far, reg_parm_stack_space)
2874 register rtx x;
2875 enum machine_mode mode;
2876 tree type;
2877 rtx size;
2878 int align;
2879 int partial;
2880 rtx reg;
2881 int extra;
2882 rtx args_addr;
2883 rtx args_so_far;
2884 int reg_parm_stack_space;
2886 rtx xinner;
2887 enum direction stack_direction
2888 #ifdef STACK_GROWS_DOWNWARD
2889 = downward;
2890 #else
2891 = upward;
2892 #endif
2894 /* Decide where to pad the argument: `downward' for below,
2895 `upward' for above, or `none' for don't pad it.
2896 Default is below for small data on big-endian machines; else above. */
2897 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2899 /* Invert direction if stack is post-update. */
2900 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2901 if (where_pad != none)
2902 where_pad = (where_pad == downward ? upward : downward);
2904 xinner = x = protect_from_queue (x, 0);
2906 if (mode == BLKmode)
2908 /* Copy a block into the stack, entirely or partially. */
2910 register rtx temp;
2911 int used = partial * UNITS_PER_WORD;
2912 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2913 int skip;
2915 if (size == 0)
2916 abort ();
2918 used -= offset;
2920 /* USED is now the # of bytes we need not copy to the stack
2921 because registers will take care of them. */
2923 if (partial != 0)
2924 xinner = change_address (xinner, BLKmode,
2925 plus_constant (XEXP (xinner, 0), used));
2927 /* If the partial register-part of the arg counts in its stack size,
2928 skip the part of stack space corresponding to the registers.
2929 Otherwise, start copying to the beginning of the stack space,
2930 by setting SKIP to 0. */
2931 skip = (reg_parm_stack_space == 0) ? 0 : used;
2933 #ifdef PUSH_ROUNDING
2934 /* Do it with several push insns if that doesn't take lots of insns
2935 and if there is no difficulty with push insns that skip bytes
2936 on the stack for alignment purposes. */
2937 if (args_addr == 0
2938 && GET_CODE (size) == CONST_INT
2939 && skip == 0
2940 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2941 /* Here we avoid the case of a structure whose weak alignment
2942 forces many pushes of a small amount of data,
2943 and such small pushes do rounding that causes trouble. */
2944 && ((! SLOW_UNALIGNED_ACCESS)
2945 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2946 || PUSH_ROUNDING (align) == align)
2947 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2949 /* Push padding now if padding above and stack grows down,
2950 or if padding below and stack grows up.
2951 But if space already allocated, this has already been done. */
2952 if (extra && args_addr == 0
2953 && where_pad != none && where_pad != stack_direction)
2954 anti_adjust_stack (GEN_INT (extra));
2956 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2957 INTVAL (size) - used, align);
2959 if (current_function_check_memory_usage && ! in_check_memory_usage)
2961 rtx temp;
2963 in_check_memory_usage = 1;
2964 temp = get_push_address (INTVAL(size) - used);
2965 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2966 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2967 temp, Pmode,
2968 XEXP (xinner, 0), Pmode,
2969 GEN_INT (INTVAL(size) - used),
2970 TYPE_MODE (sizetype));
2971 else
2972 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2973 temp, Pmode,
2974 GEN_INT (INTVAL(size) - used),
2975 TYPE_MODE (sizetype),
2976 GEN_INT (MEMORY_USE_RW),
2977 TYPE_MODE (integer_type_node));
2978 in_check_memory_usage = 0;
2981 else
2982 #endif /* PUSH_ROUNDING */
2984 /* Otherwise make space on the stack and copy the data
2985 to the address of that space. */
2987 /* Deduct words put into registers from the size we must copy. */
2988 if (partial != 0)
2990 if (GET_CODE (size) == CONST_INT)
2991 size = GEN_INT (INTVAL (size) - used);
2992 else
2993 size = expand_binop (GET_MODE (size), sub_optab, size,
2994 GEN_INT (used), NULL_RTX, 0,
2995 OPTAB_LIB_WIDEN);
2998 /* Get the address of the stack space.
2999 In this case, we do not deal with EXTRA separately.
3000 A single stack adjust will do. */
3001 if (! args_addr)
3003 temp = push_block (size, extra, where_pad == downward);
3004 extra = 0;
3006 else if (GET_CODE (args_so_far) == CONST_INT)
3007 temp = memory_address (BLKmode,
3008 plus_constant (args_addr,
3009 skip + INTVAL (args_so_far)));
3010 else
3011 temp = memory_address (BLKmode,
3012 plus_constant (gen_rtx_PLUS (Pmode,
3013 args_addr,
3014 args_so_far),
3015 skip));
3016 if (current_function_check_memory_usage && ! in_check_memory_usage)
3018 rtx target;
3020 in_check_memory_usage = 1;
3021 target = copy_to_reg (temp);
3022 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3023 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3024 target, Pmode,
3025 XEXP (xinner, 0), Pmode,
3026 size, TYPE_MODE (sizetype));
3027 else
3028 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3029 target, Pmode,
3030 size, TYPE_MODE (sizetype),
3031 GEN_INT (MEMORY_USE_RW),
3032 TYPE_MODE (integer_type_node));
3033 in_check_memory_usage = 0;
3036 /* TEMP is the address of the block. Copy the data there. */
3037 if (GET_CODE (size) == CONST_INT
3038 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3040 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3041 INTVAL (size), align);
3042 goto ret;
3044 else
3046 rtx opalign = GEN_INT (align);
3047 enum machine_mode mode;
3048 rtx target = gen_rtx_MEM (BLKmode, temp);
3050 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3051 mode != VOIDmode;
3052 mode = GET_MODE_WIDER_MODE (mode))
3054 enum insn_code code = movstr_optab[(int) mode];
3056 if (code != CODE_FOR_nothing
3057 && ((GET_CODE (size) == CONST_INT
3058 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3059 <= (GET_MODE_MASK (mode) >> 1)))
3060 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3061 && (insn_operand_predicate[(int) code][0] == 0
3062 || ((*insn_operand_predicate[(int) code][0])
3063 (target, BLKmode)))
3064 && (insn_operand_predicate[(int) code][1] == 0
3065 || ((*insn_operand_predicate[(int) code][1])
3066 (xinner, BLKmode)))
3067 && (insn_operand_predicate[(int) code][3] == 0
3068 || ((*insn_operand_predicate[(int) code][3])
3069 (opalign, VOIDmode))))
3071 rtx op2 = convert_to_mode (mode, size, 1);
3072 rtx last = get_last_insn ();
3073 rtx pat;
3075 if (insn_operand_predicate[(int) code][2] != 0
3076 && ! ((*insn_operand_predicate[(int) code][2])
3077 (op2, mode)))
3078 op2 = copy_to_mode_reg (mode, op2);
3080 pat = GEN_FCN ((int) code) (target, xinner,
3081 op2, opalign);
3082 if (pat)
3084 emit_insn (pat);
3085 goto ret;
3087 else
3088 delete_insns_since (last);
3093 #ifndef ACCUMULATE_OUTGOING_ARGS
3094 /* If the source is referenced relative to the stack pointer,
3095 copy it to another register to stabilize it. We do not need
3096 to do this if we know that we won't be changing sp. */
3098 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3099 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3100 temp = copy_to_reg (temp);
3101 #endif
3103 /* Make inhibit_defer_pop nonzero around the library call
3104 to force it to pop the bcopy-arguments right away. */
3105 NO_DEFER_POP;
3106 #ifdef TARGET_MEM_FUNCTIONS
3107 emit_library_call (memcpy_libfunc, 0,
3108 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3109 convert_to_mode (TYPE_MODE (sizetype),
3110 size, TREE_UNSIGNED (sizetype)),
3111 TYPE_MODE (sizetype));
3112 #else
3113 emit_library_call (bcopy_libfunc, 0,
3114 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3115 convert_to_mode (TYPE_MODE (integer_type_node),
3116 size,
3117 TREE_UNSIGNED (integer_type_node)),
3118 TYPE_MODE (integer_type_node));
3119 #endif
3120 OK_DEFER_POP;
3123 else if (partial > 0)
3125 /* Scalar partly in registers. */
3127 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3128 int i;
3129 int not_stack;
3130 /* # words of start of argument
3131 that we must make space for but need not store. */
3132 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3133 int args_offset = INTVAL (args_so_far);
3134 int skip;
3136 /* Push padding now if padding above and stack grows down,
3137 or if padding below and stack grows up.
3138 But if space already allocated, this has already been done. */
3139 if (extra && args_addr == 0
3140 && where_pad != none && where_pad != stack_direction)
3141 anti_adjust_stack (GEN_INT (extra));
3143 /* If we make space by pushing it, we might as well push
3144 the real data. Otherwise, we can leave OFFSET nonzero
3145 and leave the space uninitialized. */
3146 if (args_addr == 0)
3147 offset = 0;
3149 /* Now NOT_STACK gets the number of words that we don't need to
3150 allocate on the stack. */
3151 not_stack = partial - offset;
3153 /* If the partial register-part of the arg counts in its stack size,
3154 skip the part of stack space corresponding to the registers.
3155 Otherwise, start copying to the beginning of the stack space,
3156 by setting SKIP to 0. */
3157 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3159 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3160 x = validize_mem (force_const_mem (mode, x));
3162 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3163 SUBREGs of such registers are not allowed. */
3164 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3165 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3166 x = copy_to_reg (x);
3168 /* Loop over all the words allocated on the stack for this arg. */
3169 /* We can do it by words, because any scalar bigger than a word
3170 has a size a multiple of a word. */
3171 #ifndef PUSH_ARGS_REVERSED
3172 for (i = not_stack; i < size; i++)
3173 #else
3174 for (i = size - 1; i >= not_stack; i--)
3175 #endif
3176 if (i >= not_stack + offset)
3177 emit_push_insn (operand_subword_force (x, i, mode),
3178 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3179 0, args_addr,
3180 GEN_INT (args_offset + ((i - not_stack + skip)
3181 * UNITS_PER_WORD)),
3182 reg_parm_stack_space);
3184 else
3186 rtx addr;
3187 rtx target = NULL_RTX;
3189 /* Push padding now if padding above and stack grows down,
3190 or if padding below and stack grows up.
3191 But if space already allocated, this has already been done. */
3192 if (extra && args_addr == 0
3193 && where_pad != none && where_pad != stack_direction)
3194 anti_adjust_stack (GEN_INT (extra));
3196 #ifdef PUSH_ROUNDING
3197 if (args_addr == 0)
3198 addr = gen_push_operand ();
3199 else
3200 #endif
3202 if (GET_CODE (args_so_far) == CONST_INT)
3203 addr
3204 = memory_address (mode,
3205 plus_constant (args_addr,
3206 INTVAL (args_so_far)));
3207 else
3208 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3209 args_so_far));
3210 target = addr;
3213 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3215 if (current_function_check_memory_usage && ! in_check_memory_usage)
3217 in_check_memory_usage = 1;
3218 if (target == 0)
3219 target = get_push_address (GET_MODE_SIZE (mode));
3221 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3222 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3223 target, Pmode,
3224 XEXP (x, 0), Pmode,
3225 GEN_INT (GET_MODE_SIZE (mode)),
3226 TYPE_MODE (sizetype));
3227 else
3228 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3229 target, Pmode,
3230 GEN_INT (GET_MODE_SIZE (mode)),
3231 TYPE_MODE (sizetype),
3232 GEN_INT (MEMORY_USE_RW),
3233 TYPE_MODE (integer_type_node));
3234 in_check_memory_usage = 0;
3238 ret:
3239 /* If part should go in registers, copy that part
3240 into the appropriate registers. Do this now, at the end,
3241 since mem-to-mem copies above may do function calls. */
3242 if (partial > 0 && reg != 0)
3244 /* Handle calls that pass values in multiple non-contiguous locations.
3245 The Irix 6 ABI has examples of this. */
3246 if (GET_CODE (reg) == PARALLEL)
3247 emit_group_load (reg, x, -1, align); /* ??? size? */
3248 else
3249 move_block_to_reg (REGNO (reg), x, partial, mode);
3252 if (extra && args_addr == 0 && where_pad == stack_direction)
3253 anti_adjust_stack (GEN_INT (extra));
3256 /* Expand an assignment that stores the value of FROM into TO.
3257 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3258 (This may contain a QUEUED rtx;
3259 if the value is constant, this rtx is a constant.)
3260 Otherwise, the returned value is NULL_RTX.
3262 SUGGEST_REG is no longer actually used.
3263 It used to mean, copy the value through a register
3264 and return that register, if that is possible.
3265 We now use WANT_VALUE to decide whether to do this. */
3268 expand_assignment (to, from, want_value, suggest_reg)
3269 tree to, from;
3270 int want_value;
3271 int suggest_reg;
3273 register rtx to_rtx = 0;
3274 rtx result;
3276 /* Don't crash if the lhs of the assignment was erroneous. */
3278 if (TREE_CODE (to) == ERROR_MARK)
3280 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3281 return want_value ? result : NULL_RTX;
3284 /* Assignment of a structure component needs special treatment
3285 if the structure component's rtx is not simply a MEM.
3286 Assignment of an array element at a constant index, and assignment of
3287 an array element in an unaligned packed structure field, has the same
3288 problem. */
3290 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3291 || TREE_CODE (to) == ARRAY_REF)
3293 enum machine_mode mode1;
3294 int bitsize;
3295 int bitpos;
3296 tree offset;
3297 int unsignedp;
3298 int volatilep = 0;
3299 tree tem;
3300 int alignment;
3302 push_temp_slots ();
3303 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3304 &unsignedp, &volatilep, &alignment);
3306 /* If we are going to use store_bit_field and extract_bit_field,
3307 make sure to_rtx will be safe for multiple use. */
3309 if (mode1 == VOIDmode && want_value)
3310 tem = stabilize_reference (tem);
3312 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3313 if (offset != 0)
3315 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3317 if (GET_CODE (to_rtx) != MEM)
3318 abort ();
3320 if (GET_MODE (offset_rtx) != ptr_mode)
3322 #ifdef POINTERS_EXTEND_UNSIGNED
3323 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3324 #else
3325 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3326 #endif
3329 /* A constant address in TO_RTX can have VOIDmode, we must not try
3330 to call force_reg for that case. Avoid that case. */
3331 if (GET_CODE (to_rtx) == MEM
3332 && GET_MODE (to_rtx) == BLKmode
3333 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3334 && bitsize
3335 && (bitpos % bitsize) == 0
3336 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3337 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3339 rtx temp = change_address (to_rtx, mode1,
3340 plus_constant (XEXP (to_rtx, 0),
3341 (bitpos /
3342 BITS_PER_UNIT)));
3343 if (GET_CODE (XEXP (temp, 0)) == REG)
3344 to_rtx = temp;
3345 else
3346 to_rtx = change_address (to_rtx, mode1,
3347 force_reg (GET_MODE (XEXP (temp, 0)),
3348 XEXP (temp, 0)));
3349 bitpos = 0;
3352 to_rtx = change_address (to_rtx, VOIDmode,
3353 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3354 force_reg (ptr_mode, offset_rtx)));
3356 if (volatilep)
3358 if (GET_CODE (to_rtx) == MEM)
3360 /* When the offset is zero, to_rtx is the address of the
3361 structure we are storing into, and hence may be shared.
3362 We must make a new MEM before setting the volatile bit. */
3363 if (offset == 0)
3364 to_rtx = copy_rtx (to_rtx);
3366 MEM_VOLATILE_P (to_rtx) = 1;
3368 #if 0 /* This was turned off because, when a field is volatile
3369 in an object which is not volatile, the object may be in a register,
3370 and then we would abort over here. */
3371 else
3372 abort ();
3373 #endif
3376 if (TREE_CODE (to) == COMPONENT_REF
3377 && TREE_READONLY (TREE_OPERAND (to, 1)))
3379 if (offset == 0)
3380 to_rtx = copy_rtx (to_rtx);
3382 RTX_UNCHANGING_P (to_rtx) = 1;
3385 /* Check the access. */
3386 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3388 rtx to_addr;
3389 int size;
3390 int best_mode_size;
3391 enum machine_mode best_mode;
3393 best_mode = get_best_mode (bitsize, bitpos,
3394 TYPE_ALIGN (TREE_TYPE (tem)),
3395 mode1, volatilep);
3396 if (best_mode == VOIDmode)
3397 best_mode = QImode;
3399 best_mode_size = GET_MODE_BITSIZE (best_mode);
3400 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3401 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3402 size *= GET_MODE_SIZE (best_mode);
3404 /* Check the access right of the pointer. */
3405 if (size)
3406 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3407 to_addr, Pmode,
3408 GEN_INT (size), TYPE_MODE (sizetype),
3409 GEN_INT (MEMORY_USE_WO),
3410 TYPE_MODE (integer_type_node));
3413 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3414 (want_value
3415 /* Spurious cast makes HPUX compiler happy. */
3416 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3417 : VOIDmode),
3418 unsignedp,
3419 /* Required alignment of containing datum. */
3420 alignment,
3421 int_size_in_bytes (TREE_TYPE (tem)),
3422 get_alias_set (to));
3423 preserve_temp_slots (result);
3424 free_temp_slots ();
3425 pop_temp_slots ();
3427 /* If the value is meaningful, convert RESULT to the proper mode.
3428 Otherwise, return nothing. */
3429 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3430 TYPE_MODE (TREE_TYPE (from)),
3431 result,
3432 TREE_UNSIGNED (TREE_TYPE (to)))
3433 : NULL_RTX);
3436 /* If the rhs is a function call and its value is not an aggregate,
3437 call the function before we start to compute the lhs.
3438 This is needed for correct code for cases such as
3439 val = setjmp (buf) on machines where reference to val
3440 requires loading up part of an address in a separate insn.
3442 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3443 a promoted variable where the zero- or sign- extension needs to be done.
3444 Handling this in the normal way is safe because no computation is done
3445 before the call. */
3446 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3447 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3448 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3450 rtx value;
3452 push_temp_slots ();
3453 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3454 if (to_rtx == 0)
3455 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3457 /* Handle calls that return values in multiple non-contiguous locations.
3458 The Irix 6 ABI has examples of this. */
3459 if (GET_CODE (to_rtx) == PARALLEL)
3460 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3461 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3462 else if (GET_MODE (to_rtx) == BLKmode)
3463 emit_block_move (to_rtx, value, expr_size (from),
3464 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3465 else
3467 #ifdef POINTERS_EXTEND_UNSIGNED
3468 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3469 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3470 value = convert_memory_address (GET_MODE (to_rtx), value);
3471 #endif
3472 emit_move_insn (to_rtx, value);
3474 preserve_temp_slots (to_rtx);
3475 free_temp_slots ();
3476 pop_temp_slots ();
3477 return want_value ? to_rtx : NULL_RTX;
3480 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3481 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3483 if (to_rtx == 0)
3485 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3486 if (GET_CODE (to_rtx) == MEM)
3487 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3490 /* Don't move directly into a return register. */
3491 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3493 rtx temp;
3495 push_temp_slots ();
3496 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3497 emit_move_insn (to_rtx, temp);
3498 preserve_temp_slots (to_rtx);
3499 free_temp_slots ();
3500 pop_temp_slots ();
3501 return want_value ? to_rtx : NULL_RTX;
3504 /* In case we are returning the contents of an object which overlaps
3505 the place the value is being stored, use a safe function when copying
3506 a value through a pointer into a structure value return block. */
3507 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3508 && current_function_returns_struct
3509 && !current_function_returns_pcc_struct)
3511 rtx from_rtx, size;
3513 push_temp_slots ();
3514 size = expr_size (from);
3515 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3516 EXPAND_MEMORY_USE_DONT);
3518 /* Copy the rights of the bitmap. */
3519 if (current_function_check_memory_usage)
3520 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3521 XEXP (to_rtx, 0), Pmode,
3522 XEXP (from_rtx, 0), Pmode,
3523 convert_to_mode (TYPE_MODE (sizetype),
3524 size, TREE_UNSIGNED (sizetype)),
3525 TYPE_MODE (sizetype));
3527 #ifdef TARGET_MEM_FUNCTIONS
3528 emit_library_call (memcpy_libfunc, 0,
3529 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3530 XEXP (from_rtx, 0), Pmode,
3531 convert_to_mode (TYPE_MODE (sizetype),
3532 size, TREE_UNSIGNED (sizetype)),
3533 TYPE_MODE (sizetype));
3534 #else
3535 emit_library_call (bcopy_libfunc, 0,
3536 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3537 XEXP (to_rtx, 0), Pmode,
3538 convert_to_mode (TYPE_MODE (integer_type_node),
3539 size, TREE_UNSIGNED (integer_type_node)),
3540 TYPE_MODE (integer_type_node));
3541 #endif
3543 preserve_temp_slots (to_rtx);
3544 free_temp_slots ();
3545 pop_temp_slots ();
3546 return want_value ? to_rtx : NULL_RTX;
3549 /* Compute FROM and store the value in the rtx we got. */
3551 push_temp_slots ();
3552 result = store_expr (from, to_rtx, want_value);
3553 preserve_temp_slots (result);
3554 free_temp_slots ();
3555 pop_temp_slots ();
3556 return want_value ? result : NULL_RTX;
3559 /* Generate code for computing expression EXP,
3560 and storing the value into TARGET.
3561 TARGET may contain a QUEUED rtx.
3563 If WANT_VALUE is nonzero, return a copy of the value
3564 not in TARGET, so that we can be sure to use the proper
3565 value in a containing expression even if TARGET has something
3566 else stored in it. If possible, we copy the value through a pseudo
3567 and return that pseudo. Or, if the value is constant, we try to
3568 return the constant. In some cases, we return a pseudo
3569 copied *from* TARGET.
3571 If the mode is BLKmode then we may return TARGET itself.
3572 It turns out that in BLKmode it doesn't cause a problem.
3573 because C has no operators that could combine two different
3574 assignments into the same BLKmode object with different values
3575 with no sequence point. Will other languages need this to
3576 be more thorough?
3578 If WANT_VALUE is 0, we return NULL, to make sure
3579 to catch quickly any cases where the caller uses the value
3580 and fails to set WANT_VALUE. */
3583 store_expr (exp, target, want_value)
3584 register tree exp;
3585 register rtx target;
3586 int want_value;
3588 register rtx temp;
3589 int dont_return_target = 0;
3591 if (TREE_CODE (exp) == COMPOUND_EXPR)
3593 /* Perform first part of compound expression, then assign from second
3594 part. */
3595 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3596 emit_queue ();
3597 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3599 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3601 /* For conditional expression, get safe form of the target. Then
3602 test the condition, doing the appropriate assignment on either
3603 side. This avoids the creation of unnecessary temporaries.
3604 For non-BLKmode, it is more efficient not to do this. */
3606 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3608 emit_queue ();
3609 target = protect_from_queue (target, 1);
3611 do_pending_stack_adjust ();
3612 NO_DEFER_POP;
3613 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3614 start_cleanup_deferral ();
3615 store_expr (TREE_OPERAND (exp, 1), target, 0);
3616 end_cleanup_deferral ();
3617 emit_queue ();
3618 emit_jump_insn (gen_jump (lab2));
3619 emit_barrier ();
3620 emit_label (lab1);
3621 start_cleanup_deferral ();
3622 store_expr (TREE_OPERAND (exp, 2), target, 0);
3623 end_cleanup_deferral ();
3624 emit_queue ();
3625 emit_label (lab2);
3626 OK_DEFER_POP;
3628 return want_value ? target : NULL_RTX;
3630 else if (queued_subexp_p (target))
3631 /* If target contains a postincrement, let's not risk
3632 using it as the place to generate the rhs. */
3634 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3636 /* Expand EXP into a new pseudo. */
3637 temp = gen_reg_rtx (GET_MODE (target));
3638 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3640 else
3641 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3643 /* If target is volatile, ANSI requires accessing the value
3644 *from* the target, if it is accessed. So make that happen.
3645 In no case return the target itself. */
3646 if (! MEM_VOLATILE_P (target) && want_value)
3647 dont_return_target = 1;
3649 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3650 && GET_MODE (target) != BLKmode)
3651 /* If target is in memory and caller wants value in a register instead,
3652 arrange that. Pass TARGET as target for expand_expr so that,
3653 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3654 We know expand_expr will not use the target in that case.
3655 Don't do this if TARGET is volatile because we are supposed
3656 to write it and then read it. */
3658 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3659 GET_MODE (target), 0);
3660 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3661 temp = copy_to_reg (temp);
3662 dont_return_target = 1;
3664 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3665 /* If this is an scalar in a register that is stored in a wider mode
3666 than the declared mode, compute the result into its declared mode
3667 and then convert to the wider mode. Our value is the computed
3668 expression. */
3670 /* If we don't want a value, we can do the conversion inside EXP,
3671 which will often result in some optimizations. Do the conversion
3672 in two steps: first change the signedness, if needed, then
3673 the extend. But don't do this if the type of EXP is a subtype
3674 of something else since then the conversion might involve
3675 more than just converting modes. */
3676 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3677 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3679 if (TREE_UNSIGNED (TREE_TYPE (exp))
3680 != SUBREG_PROMOTED_UNSIGNED_P (target))
3682 = convert
3683 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3684 TREE_TYPE (exp)),
3685 exp);
3687 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3688 SUBREG_PROMOTED_UNSIGNED_P (target)),
3689 exp);
3692 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3694 /* If TEMP is a volatile MEM and we want a result value, make
3695 the access now so it gets done only once. Likewise if
3696 it contains TARGET. */
3697 if (GET_CODE (temp) == MEM && want_value
3698 && (MEM_VOLATILE_P (temp)
3699 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3700 temp = copy_to_reg (temp);
3702 /* If TEMP is a VOIDmode constant, use convert_modes to make
3703 sure that we properly convert it. */
3704 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3705 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3706 TYPE_MODE (TREE_TYPE (exp)), temp,
3707 SUBREG_PROMOTED_UNSIGNED_P (target));
3709 convert_move (SUBREG_REG (target), temp,
3710 SUBREG_PROMOTED_UNSIGNED_P (target));
3712 /* If we promoted a constant, change the mode back down to match
3713 target. Otherwise, the caller might get confused by a result whose
3714 mode is larger than expected. */
3716 if (want_value && GET_MODE (temp) != GET_MODE (target)
3717 && GET_MODE (temp) != VOIDmode)
3719 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3720 SUBREG_PROMOTED_VAR_P (temp) = 1;
3721 SUBREG_PROMOTED_UNSIGNED_P (temp)
3722 = SUBREG_PROMOTED_UNSIGNED_P (target);
3725 return want_value ? temp : NULL_RTX;
3727 else
3729 temp = expand_expr (exp, target, GET_MODE (target), 0);
3730 /* Return TARGET if it's a specified hardware register.
3731 If TARGET is a volatile mem ref, either return TARGET
3732 or return a reg copied *from* TARGET; ANSI requires this.
3734 Otherwise, if TEMP is not TARGET, return TEMP
3735 if it is constant (for efficiency),
3736 or if we really want the correct value. */
3737 if (!(target && GET_CODE (target) == REG
3738 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3739 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3740 && ! rtx_equal_p (temp, target)
3741 && (CONSTANT_P (temp) || want_value))
3742 dont_return_target = 1;
3745 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3746 the same as that of TARGET, adjust the constant. This is needed, for
3747 example, in case it is a CONST_DOUBLE and we want only a word-sized
3748 value. */
3749 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3750 && TREE_CODE (exp) != ERROR_MARK
3751 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3752 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3753 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3755 if (current_function_check_memory_usage
3756 && GET_CODE (target) == MEM
3757 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3759 if (GET_CODE (temp) == MEM)
3760 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3761 XEXP (target, 0), Pmode,
3762 XEXP (temp, 0), Pmode,
3763 expr_size (exp), TYPE_MODE (sizetype));
3764 else
3765 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3766 XEXP (target, 0), Pmode,
3767 expr_size (exp), TYPE_MODE (sizetype),
3768 GEN_INT (MEMORY_USE_WO),
3769 TYPE_MODE (integer_type_node));
3772 /* If value was not generated in the target, store it there.
3773 Convert the value to TARGET's type first if nec. */
3774 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3775 one or both of them are volatile memory refs, we have to distinguish
3776 two cases:
3777 - expand_expr has used TARGET. In this case, we must not generate
3778 another copy. This can be detected by TARGET being equal according
3779 to == .
3780 - expand_expr has not used TARGET - that means that the source just
3781 happens to have the same RTX form. Since temp will have been created
3782 by expand_expr, it will compare unequal according to == .
3783 We must generate a copy in this case, to reach the correct number
3784 of volatile memory references. */
3786 if ((! rtx_equal_p (temp, target)
3787 || (temp != target && (side_effects_p (temp)
3788 || side_effects_p (target))))
3789 && TREE_CODE (exp) != ERROR_MARK)
3791 target = protect_from_queue (target, 1);
3792 if (GET_MODE (temp) != GET_MODE (target)
3793 && GET_MODE (temp) != VOIDmode)
3795 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3796 if (dont_return_target)
3798 /* In this case, we will return TEMP,
3799 so make sure it has the proper mode.
3800 But don't forget to store the value into TARGET. */
3801 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3802 emit_move_insn (target, temp);
3804 else
3805 convert_move (target, temp, unsignedp);
3808 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3810 /* Handle copying a string constant into an array.
3811 The string constant may be shorter than the array.
3812 So copy just the string's actual length, and clear the rest. */
3813 rtx size;
3814 rtx addr;
3816 /* Get the size of the data type of the string,
3817 which is actually the size of the target. */
3818 size = expr_size (exp);
3819 if (GET_CODE (size) == CONST_INT
3820 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3821 emit_block_move (target, temp, size,
3822 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3823 else
3825 /* Compute the size of the data to copy from the string. */
3826 tree copy_size
3827 = size_binop (MIN_EXPR,
3828 make_tree (sizetype, size),
3829 convert (sizetype,
3830 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3831 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3832 VOIDmode, 0);
3833 rtx label = 0;
3835 /* Copy that much. */
3836 emit_block_move (target, temp, copy_size_rtx,
3837 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3839 /* Figure out how much is left in TARGET that we have to clear.
3840 Do all calculations in ptr_mode. */
3842 addr = XEXP (target, 0);
3843 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3845 if (GET_CODE (copy_size_rtx) == CONST_INT)
3847 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3848 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3850 else
3852 addr = force_reg (ptr_mode, addr);
3853 addr = expand_binop (ptr_mode, add_optab, addr,
3854 copy_size_rtx, NULL_RTX, 0,
3855 OPTAB_LIB_WIDEN);
3857 size = expand_binop (ptr_mode, sub_optab, size,
3858 copy_size_rtx, NULL_RTX, 0,
3859 OPTAB_LIB_WIDEN);
3861 label = gen_label_rtx ();
3862 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3863 GET_MODE (size), 0, 0, label);
3866 if (size != const0_rtx)
3868 /* Be sure we can write on ADDR. */
3869 if (current_function_check_memory_usage)
3870 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3871 addr, Pmode,
3872 size, TYPE_MODE (sizetype),
3873 GEN_INT (MEMORY_USE_WO),
3874 TYPE_MODE (integer_type_node));
3875 #ifdef TARGET_MEM_FUNCTIONS
3876 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3877 addr, ptr_mode,
3878 const0_rtx, TYPE_MODE (integer_type_node),
3879 convert_to_mode (TYPE_MODE (sizetype),
3880 size,
3881 TREE_UNSIGNED (sizetype)),
3882 TYPE_MODE (sizetype));
3883 #else
3884 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3885 addr, ptr_mode,
3886 convert_to_mode (TYPE_MODE (integer_type_node),
3887 size,
3888 TREE_UNSIGNED (integer_type_node)),
3889 TYPE_MODE (integer_type_node));
3890 #endif
3893 if (label)
3894 emit_label (label);
3897 /* Handle calls that return values in multiple non-contiguous locations.
3898 The Irix 6 ABI has examples of this. */
3899 else if (GET_CODE (target) == PARALLEL)
3900 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3901 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3902 else if (GET_MODE (temp) == BLKmode)
3903 emit_block_move (target, temp, expr_size (exp),
3904 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3905 else
3906 emit_move_insn (target, temp);
3909 /* If we don't want a value, return NULL_RTX. */
3910 if (! want_value)
3911 return NULL_RTX;
3913 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3914 ??? The latter test doesn't seem to make sense. */
3915 else if (dont_return_target && GET_CODE (temp) != MEM)
3916 return temp;
3918 /* Return TARGET itself if it is a hard register. */
3919 else if (want_value && GET_MODE (target) != BLKmode
3920 && ! (GET_CODE (target) == REG
3921 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3922 return copy_to_reg (target);
3924 else
3925 return target;
3928 /* Return 1 if EXP just contains zeros. */
3930 static int
3931 is_zeros_p (exp)
3932 tree exp;
3934 tree elt;
3936 switch (TREE_CODE (exp))
3938 case CONVERT_EXPR:
3939 case NOP_EXPR:
3940 case NON_LVALUE_EXPR:
3941 return is_zeros_p (TREE_OPERAND (exp, 0));
3943 case INTEGER_CST:
3944 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3946 case COMPLEX_CST:
3947 return
3948 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3950 case REAL_CST:
3951 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3953 case CONSTRUCTOR:
3954 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3955 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3956 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3957 if (! is_zeros_p (TREE_VALUE (elt)))
3958 return 0;
3960 return 1;
3962 default:
3963 return 0;
3967 /* Return 1 if EXP contains mostly (3/4) zeros. */
3969 static int
3970 mostly_zeros_p (exp)
3971 tree exp;
3973 if (TREE_CODE (exp) == CONSTRUCTOR)
3975 int elts = 0, zeros = 0;
3976 tree elt = CONSTRUCTOR_ELTS (exp);
3977 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3979 /* If there are no ranges of true bits, it is all zero. */
3980 return elt == NULL_TREE;
3982 for (; elt; elt = TREE_CHAIN (elt))
3984 /* We do not handle the case where the index is a RANGE_EXPR,
3985 so the statistic will be somewhat inaccurate.
3986 We do make a more accurate count in store_constructor itself,
3987 so since this function is only used for nested array elements,
3988 this should be close enough. */
3989 if (mostly_zeros_p (TREE_VALUE (elt)))
3990 zeros++;
3991 elts++;
3994 return 4 * zeros >= 3 * elts;
3997 return is_zeros_p (exp);
4000 /* Helper function for store_constructor.
4001 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4002 TYPE is the type of the CONSTRUCTOR, not the element type.
4003 CLEARED is as for store_constructor.
4005 This provides a recursive shortcut back to store_constructor when it isn't
4006 necessary to go through store_field. This is so that we can pass through
4007 the cleared field to let store_constructor know that we may not have to
4008 clear a substructure if the outer structure has already been cleared. */
4010 static void
4011 store_constructor_field (target, bitsize, bitpos,
4012 mode, exp, type, cleared)
4013 rtx target;
4014 int bitsize, bitpos;
4015 enum machine_mode mode;
4016 tree exp, type;
4017 int cleared;
4019 if (TREE_CODE (exp) == CONSTRUCTOR
4020 && bitpos % BITS_PER_UNIT == 0
4021 /* If we have a non-zero bitpos for a register target, then we just
4022 let store_field do the bitfield handling. This is unlikely to
4023 generate unnecessary clear instructions anyways. */
4024 && (bitpos == 0 || GET_CODE (target) == MEM))
4026 if (bitpos != 0)
4027 target = change_address (target, VOIDmode,
4028 plus_constant (XEXP (target, 0),
4029 bitpos / BITS_PER_UNIT));
4030 store_constructor (exp, target, cleared);
4032 else
4033 store_field (target, bitsize, bitpos, mode, exp,
4034 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
4035 int_size_in_bytes (type), 0);
4038 /* Store the value of constructor EXP into the rtx TARGET.
4039 TARGET is either a REG or a MEM.
4040 CLEARED is true if TARGET is known to have been zero'd. */
4042 static void
4043 store_constructor (exp, target, cleared)
4044 tree exp;
4045 rtx target;
4046 int cleared;
4048 tree type = TREE_TYPE (exp);
4049 rtx exp_size = expr_size (exp);
4051 /* We know our target cannot conflict, since safe_from_p has been called. */
4052 #if 0
4053 /* Don't try copying piece by piece into a hard register
4054 since that is vulnerable to being clobbered by EXP.
4055 Instead, construct in a pseudo register and then copy it all. */
4056 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4058 rtx temp = gen_reg_rtx (GET_MODE (target));
4059 store_constructor (exp, temp, 0);
4060 emit_move_insn (target, temp);
4061 return;
4063 #endif
4065 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4066 || TREE_CODE (type) == QUAL_UNION_TYPE)
4068 register tree elt;
4070 /* Inform later passes that the whole union value is dead. */
4071 if (TREE_CODE (type) == UNION_TYPE
4072 || TREE_CODE (type) == QUAL_UNION_TYPE)
4073 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4075 /* If we are building a static constructor into a register,
4076 set the initial value as zero so we can fold the value into
4077 a constant. But if more than one register is involved,
4078 this probably loses. */
4079 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4080 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4082 if (! cleared)
4083 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4085 cleared = 1;
4088 /* If the constructor has fewer fields than the structure
4089 or if we are initializing the structure to mostly zeros,
4090 clear the whole structure first. */
4091 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4092 != list_length (TYPE_FIELDS (type)))
4093 || mostly_zeros_p (exp))
4095 if (! cleared)
4096 clear_storage (target, expr_size (exp),
4097 TYPE_ALIGN (type) / BITS_PER_UNIT);
4099 cleared = 1;
4101 else
4102 /* Inform later passes that the old value is dead. */
4103 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4105 /* Store each element of the constructor into
4106 the corresponding field of TARGET. */
4108 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4110 register tree field = TREE_PURPOSE (elt);
4111 tree value = TREE_VALUE (elt);
4112 register enum machine_mode mode;
4113 int bitsize;
4114 int bitpos = 0;
4115 int unsignedp;
4116 tree pos, constant = 0, offset = 0;
4117 rtx to_rtx = target;
4119 /* Just ignore missing fields.
4120 We cleared the whole structure, above,
4121 if any fields are missing. */
4122 if (field == 0)
4123 continue;
4125 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4126 continue;
4128 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4129 unsignedp = TREE_UNSIGNED (field);
4130 mode = DECL_MODE (field);
4131 if (DECL_BIT_FIELD (field))
4132 mode = VOIDmode;
4134 pos = DECL_FIELD_BITPOS (field);
4135 if (TREE_CODE (pos) == INTEGER_CST)
4136 constant = pos;
4137 else if (TREE_CODE (pos) == PLUS_EXPR
4138 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4139 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4140 else
4141 offset = pos;
4143 if (constant)
4144 bitpos = TREE_INT_CST_LOW (constant);
4146 if (offset)
4148 rtx offset_rtx;
4150 if (contains_placeholder_p (offset))
4151 offset = build (WITH_RECORD_EXPR, sizetype,
4152 offset, make_tree (TREE_TYPE (exp), target));
4154 offset = size_binop (FLOOR_DIV_EXPR, offset,
4155 size_int (BITS_PER_UNIT));
4157 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4158 if (GET_CODE (to_rtx) != MEM)
4159 abort ();
4161 if (GET_MODE (offset_rtx) != ptr_mode)
4163 #ifdef POINTERS_EXTEND_UNSIGNED
4164 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4165 #else
4166 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4167 #endif
4170 to_rtx
4171 = change_address (to_rtx, VOIDmode,
4172 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4173 force_reg (ptr_mode, offset_rtx)));
4175 if (TREE_READONLY (field))
4177 if (GET_CODE (to_rtx) == MEM)
4178 to_rtx = copy_rtx (to_rtx);
4180 RTX_UNCHANGING_P (to_rtx) = 1;
4183 #ifdef WORD_REGISTER_OPERATIONS
4184 /* If this initializes a field that is smaller than a word, at the
4185 start of a word, try to widen it to a full word.
4186 This special case allows us to output C++ member function
4187 initializations in a form that the optimizers can understand. */
4188 if (constant
4189 && GET_CODE (target) == REG
4190 && bitsize < BITS_PER_WORD
4191 && bitpos % BITS_PER_WORD == 0
4192 && GET_MODE_CLASS (mode) == MODE_INT
4193 && TREE_CODE (value) == INTEGER_CST
4194 && GET_CODE (exp_size) == CONST_INT
4195 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4197 tree type = TREE_TYPE (value);
4198 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4200 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4201 value = convert (type, value);
4203 if (BYTES_BIG_ENDIAN)
4204 value
4205 = fold (build (LSHIFT_EXPR, type, value,
4206 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4207 bitsize = BITS_PER_WORD;
4208 mode = word_mode;
4210 #endif
4211 store_constructor_field (to_rtx, bitsize, bitpos,
4212 mode, value, type, cleared);
4215 else if (TREE_CODE (type) == ARRAY_TYPE)
4217 register tree elt;
4218 register int i;
4219 int need_to_clear;
4220 tree domain = TYPE_DOMAIN (type);
4221 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4222 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4223 tree elttype = TREE_TYPE (type);
4225 /* If the constructor has fewer elements than the array,
4226 clear the whole array first. Similarly if this is
4227 static constructor of a non-BLKmode object. */
4228 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4229 need_to_clear = 1;
4230 else
4232 HOST_WIDE_INT count = 0, zero_count = 0;
4233 need_to_clear = 0;
4234 /* This loop is a more accurate version of the loop in
4235 mostly_zeros_p (it handles RANGE_EXPR in an index).
4236 It is also needed to check for missing elements. */
4237 for (elt = CONSTRUCTOR_ELTS (exp);
4238 elt != NULL_TREE;
4239 elt = TREE_CHAIN (elt))
4241 tree index = TREE_PURPOSE (elt);
4242 HOST_WIDE_INT this_node_count;
4243 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4245 tree lo_index = TREE_OPERAND (index, 0);
4246 tree hi_index = TREE_OPERAND (index, 1);
4247 if (TREE_CODE (lo_index) != INTEGER_CST
4248 || TREE_CODE (hi_index) != INTEGER_CST)
4250 need_to_clear = 1;
4251 break;
4253 this_node_count = TREE_INT_CST_LOW (hi_index)
4254 - TREE_INT_CST_LOW (lo_index) + 1;
4256 else
4257 this_node_count = 1;
4258 count += this_node_count;
4259 if (mostly_zeros_p (TREE_VALUE (elt)))
4260 zero_count += this_node_count;
4262 /* Clear the entire array first if there are any missing elements,
4263 or if the incidence of zero elements is >= 75%. */
4264 if (count < maxelt - minelt + 1
4265 || 4 * zero_count >= 3 * count)
4266 need_to_clear = 1;
4268 if (need_to_clear)
4270 if (! cleared)
4271 clear_storage (target, expr_size (exp),
4272 TYPE_ALIGN (type) / BITS_PER_UNIT);
4273 cleared = 1;
4275 else
4276 /* Inform later passes that the old value is dead. */
4277 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4279 /* Store each element of the constructor into
4280 the corresponding element of TARGET, determined
4281 by counting the elements. */
4282 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4283 elt;
4284 elt = TREE_CHAIN (elt), i++)
4286 register enum machine_mode mode;
4287 int bitsize;
4288 int bitpos;
4289 int unsignedp;
4290 tree value = TREE_VALUE (elt);
4291 tree index = TREE_PURPOSE (elt);
4292 rtx xtarget = target;
4294 if (cleared && is_zeros_p (value))
4295 continue;
4297 mode = TYPE_MODE (elttype);
4298 bitsize = GET_MODE_BITSIZE (mode);
4299 unsignedp = TREE_UNSIGNED (elttype);
4301 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4303 tree lo_index = TREE_OPERAND (index, 0);
4304 tree hi_index = TREE_OPERAND (index, 1);
4305 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4306 struct nesting *loop;
4307 HOST_WIDE_INT lo, hi, count;
4308 tree position;
4310 /* If the range is constant and "small", unroll the loop. */
4311 if (TREE_CODE (lo_index) == INTEGER_CST
4312 && TREE_CODE (hi_index) == INTEGER_CST
4313 && (lo = TREE_INT_CST_LOW (lo_index),
4314 hi = TREE_INT_CST_LOW (hi_index),
4315 count = hi - lo + 1,
4316 (GET_CODE (target) != MEM
4317 || count <= 2
4318 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4319 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4320 <= 40 * 8))))
4322 lo -= minelt; hi -= minelt;
4323 for (; lo <= hi; lo++)
4325 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4326 store_constructor_field (target, bitsize, bitpos,
4327 mode, value, type, cleared);
4330 else
4332 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4333 loop_top = gen_label_rtx ();
4334 loop_end = gen_label_rtx ();
4336 unsignedp = TREE_UNSIGNED (domain);
4338 index = build_decl (VAR_DECL, NULL_TREE, domain);
4340 DECL_RTL (index) = index_r
4341 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4342 &unsignedp, 0));
4344 if (TREE_CODE (value) == SAVE_EXPR
4345 && SAVE_EXPR_RTL (value) == 0)
4347 /* Make sure value gets expanded once before the
4348 loop. */
4349 expand_expr (value, const0_rtx, VOIDmode, 0);
4350 emit_queue ();
4352 store_expr (lo_index, index_r, 0);
4353 loop = expand_start_loop (0);
4355 /* Assign value to element index. */
4356 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4357 size_int (BITS_PER_UNIT));
4358 position = size_binop (MULT_EXPR,
4359 size_binop (MINUS_EXPR, index,
4360 TYPE_MIN_VALUE (domain)),
4361 position);
4362 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4363 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4364 xtarget = change_address (target, mode, addr);
4365 if (TREE_CODE (value) == CONSTRUCTOR)
4366 store_constructor (value, xtarget, cleared);
4367 else
4368 store_expr (value, xtarget, 0);
4370 expand_exit_loop_if_false (loop,
4371 build (LT_EXPR, integer_type_node,
4372 index, hi_index));
4374 expand_increment (build (PREINCREMENT_EXPR,
4375 TREE_TYPE (index),
4376 index, integer_one_node), 0, 0);
4377 expand_end_loop ();
4378 emit_label (loop_end);
4380 /* Needed by stupid register allocation. to extend the
4381 lifetime of pseudo-regs used by target past the end
4382 of the loop. */
4383 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4386 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4387 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4389 rtx pos_rtx, addr;
4390 tree position;
4392 if (index == 0)
4393 index = size_int (i);
4395 if (minelt)
4396 index = size_binop (MINUS_EXPR, index,
4397 TYPE_MIN_VALUE (domain));
4398 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4399 size_int (BITS_PER_UNIT));
4400 position = size_binop (MULT_EXPR, index, position);
4401 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4402 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4403 xtarget = change_address (target, mode, addr);
4404 store_expr (value, xtarget, 0);
4406 else
4408 if (index != 0)
4409 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4410 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4411 else
4412 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4413 store_constructor_field (target, bitsize, bitpos,
4414 mode, value, type, cleared);
4418 /* set constructor assignments */
4419 else if (TREE_CODE (type) == SET_TYPE)
4421 tree elt = CONSTRUCTOR_ELTS (exp);
4422 int nbytes = int_size_in_bytes (type), nbits;
4423 tree domain = TYPE_DOMAIN (type);
4424 tree domain_min, domain_max, bitlength;
4426 /* The default implementation strategy is to extract the constant
4427 parts of the constructor, use that to initialize the target,
4428 and then "or" in whatever non-constant ranges we need in addition.
4430 If a large set is all zero or all ones, it is
4431 probably better to set it using memset (if available) or bzero.
4432 Also, if a large set has just a single range, it may also be
4433 better to first clear all the first clear the set (using
4434 bzero/memset), and set the bits we want. */
4436 /* Check for all zeros. */
4437 if (elt == NULL_TREE)
4439 if (!cleared)
4440 clear_storage (target, expr_size (exp),
4441 TYPE_ALIGN (type) / BITS_PER_UNIT);
4442 return;
4445 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4446 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4447 bitlength = size_binop (PLUS_EXPR,
4448 size_binop (MINUS_EXPR, domain_max, domain_min),
4449 size_one_node);
4451 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4452 abort ();
4453 nbits = TREE_INT_CST_LOW (bitlength);
4455 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4456 are "complicated" (more than one range), initialize (the
4457 constant parts) by copying from a constant. */
4458 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4459 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4461 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4462 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4463 char *bit_buffer = (char *) alloca (nbits);
4464 HOST_WIDE_INT word = 0;
4465 int bit_pos = 0;
4466 int ibit = 0;
4467 int offset = 0; /* In bytes from beginning of set. */
4468 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4469 for (;;)
4471 if (bit_buffer[ibit])
4473 if (BYTES_BIG_ENDIAN)
4474 word |= (1 << (set_word_size - 1 - bit_pos));
4475 else
4476 word |= 1 << bit_pos;
4478 bit_pos++; ibit++;
4479 if (bit_pos >= set_word_size || ibit == nbits)
4481 if (word != 0 || ! cleared)
4483 rtx datum = GEN_INT (word);
4484 rtx to_rtx;
4485 /* The assumption here is that it is safe to use
4486 XEXP if the set is multi-word, but not if
4487 it's single-word. */
4488 if (GET_CODE (target) == MEM)
4490 to_rtx = plus_constant (XEXP (target, 0), offset);
4491 to_rtx = change_address (target, mode, to_rtx);
4493 else if (offset == 0)
4494 to_rtx = target;
4495 else
4496 abort ();
4497 emit_move_insn (to_rtx, datum);
4499 if (ibit == nbits)
4500 break;
4501 word = 0;
4502 bit_pos = 0;
4503 offset += set_word_size / BITS_PER_UNIT;
4507 else if (!cleared)
4509 /* Don't bother clearing storage if the set is all ones. */
4510 if (TREE_CHAIN (elt) != NULL_TREE
4511 || (TREE_PURPOSE (elt) == NULL_TREE
4512 ? nbits != 1
4513 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4514 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4515 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4516 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4517 != nbits))))
4518 clear_storage (target, expr_size (exp),
4519 TYPE_ALIGN (type) / BITS_PER_UNIT);
4522 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4524 /* start of range of element or NULL */
4525 tree startbit = TREE_PURPOSE (elt);
4526 /* end of range of element, or element value */
4527 tree endbit = TREE_VALUE (elt);
4528 #ifdef TARGET_MEM_FUNCTIONS
4529 HOST_WIDE_INT startb, endb;
4530 #endif
4531 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4533 bitlength_rtx = expand_expr (bitlength,
4534 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4536 /* handle non-range tuple element like [ expr ] */
4537 if (startbit == NULL_TREE)
4539 startbit = save_expr (endbit);
4540 endbit = startbit;
4542 startbit = convert (sizetype, startbit);
4543 endbit = convert (sizetype, endbit);
4544 if (! integer_zerop (domain_min))
4546 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4547 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4549 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4550 EXPAND_CONST_ADDRESS);
4551 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4552 EXPAND_CONST_ADDRESS);
4554 if (REG_P (target))
4556 targetx = assign_stack_temp (GET_MODE (target),
4557 GET_MODE_SIZE (GET_MODE (target)),
4559 emit_move_insn (targetx, target);
4561 else if (GET_CODE (target) == MEM)
4562 targetx = target;
4563 else
4564 abort ();
4566 #ifdef TARGET_MEM_FUNCTIONS
4567 /* Optimization: If startbit and endbit are
4568 constants divisible by BITS_PER_UNIT,
4569 call memset instead. */
4570 if (TREE_CODE (startbit) == INTEGER_CST
4571 && TREE_CODE (endbit) == INTEGER_CST
4572 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4573 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4575 emit_library_call (memset_libfunc, 0,
4576 VOIDmode, 3,
4577 plus_constant (XEXP (targetx, 0),
4578 startb / BITS_PER_UNIT),
4579 Pmode,
4580 constm1_rtx, TYPE_MODE (integer_type_node),
4581 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4582 TYPE_MODE (sizetype));
4584 else
4585 #endif
4587 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4588 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4589 bitlength_rtx, TYPE_MODE (sizetype),
4590 startbit_rtx, TYPE_MODE (sizetype),
4591 endbit_rtx, TYPE_MODE (sizetype));
4593 if (REG_P (target))
4594 emit_move_insn (target, targetx);
4598 else
4599 abort ();
4602 /* Store the value of EXP (an expression tree)
4603 into a subfield of TARGET which has mode MODE and occupies
4604 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4605 If MODE is VOIDmode, it means that we are storing into a bit-field.
4607 If VALUE_MODE is VOIDmode, return nothing in particular.
4608 UNSIGNEDP is not used in this case.
4610 Otherwise, return an rtx for the value stored. This rtx
4611 has mode VALUE_MODE if that is convenient to do.
4612 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4614 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4615 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4617 ALIAS_SET is the alias set for the destination. This value will
4618 (in general) be different from that for TARGET, since TARGET is a
4619 reference to the containing structure. */
4621 static rtx
4622 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4623 unsignedp, align, total_size, alias_set)
4624 rtx target;
4625 int bitsize, bitpos;
4626 enum machine_mode mode;
4627 tree exp;
4628 enum machine_mode value_mode;
4629 int unsignedp;
4630 int align;
4631 int total_size;
4632 int alias_set;
4634 HOST_WIDE_INT width_mask = 0;
4636 if (TREE_CODE (exp) == ERROR_MARK)
4637 return const0_rtx;
4639 if (bitsize < HOST_BITS_PER_WIDE_INT)
4640 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4642 /* If we are storing into an unaligned field of an aligned union that is
4643 in a register, we may have the mode of TARGET being an integer mode but
4644 MODE == BLKmode. In that case, get an aligned object whose size and
4645 alignment are the same as TARGET and store TARGET into it (we can avoid
4646 the store if the field being stored is the entire width of TARGET). Then
4647 call ourselves recursively to store the field into a BLKmode version of
4648 that object. Finally, load from the object into TARGET. This is not
4649 very efficient in general, but should only be slightly more expensive
4650 than the otherwise-required unaligned accesses. Perhaps this can be
4651 cleaned up later. */
4653 if (mode == BLKmode
4654 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4656 rtx object = assign_stack_temp (GET_MODE (target),
4657 GET_MODE_SIZE (GET_MODE (target)), 0);
4658 rtx blk_object = copy_rtx (object);
4660 MEM_SET_IN_STRUCT_P (object, 1);
4661 MEM_SET_IN_STRUCT_P (blk_object, 1);
4662 PUT_MODE (blk_object, BLKmode);
4664 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4665 emit_move_insn (object, target);
4667 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4668 align, total_size, alias_set);
4670 /* Even though we aren't returning target, we need to
4671 give it the updated value. */
4672 emit_move_insn (target, object);
4674 return blk_object;
4677 /* If the structure is in a register or if the component
4678 is a bit field, we cannot use addressing to access it.
4679 Use bit-field techniques or SUBREG to store in it. */
4681 if (mode == VOIDmode
4682 || (mode != BLKmode && ! direct_store[(int) mode]
4683 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4684 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4685 || GET_CODE (target) == REG
4686 || GET_CODE (target) == SUBREG
4687 /* If the field isn't aligned enough to store as an ordinary memref,
4688 store it as a bit field. */
4689 || (SLOW_UNALIGNED_ACCESS
4690 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4691 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4693 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4695 /* If BITSIZE is narrower than the size of the type of EXP
4696 we will be narrowing TEMP. Normally, what's wanted are the
4697 low-order bits. However, if EXP's type is a record and this is
4698 big-endian machine, we want the upper BITSIZE bits. */
4699 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4700 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4701 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4702 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4703 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4704 - bitsize),
4705 temp, 1);
4707 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4708 MODE. */
4709 if (mode != VOIDmode && mode != BLKmode
4710 && mode != TYPE_MODE (TREE_TYPE (exp)))
4711 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4713 /* If the modes of TARGET and TEMP are both BLKmode, both
4714 must be in memory and BITPOS must be aligned on a byte
4715 boundary. If so, we simply do a block copy. */
4716 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4718 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4719 || bitpos % BITS_PER_UNIT != 0)
4720 abort ();
4722 target = change_address (target, VOIDmode,
4723 plus_constant (XEXP (target, 0),
4724 bitpos / BITS_PER_UNIT));
4726 emit_block_move (target, temp,
4727 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4728 / BITS_PER_UNIT),
4731 return value_mode == VOIDmode ? const0_rtx : target;
4734 /* Store the value in the bitfield. */
4735 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4736 if (value_mode != VOIDmode)
4738 /* The caller wants an rtx for the value. */
4739 /* If possible, avoid refetching from the bitfield itself. */
4740 if (width_mask != 0
4741 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4743 tree count;
4744 enum machine_mode tmode;
4746 if (unsignedp)
4747 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4748 tmode = GET_MODE (temp);
4749 if (tmode == VOIDmode)
4750 tmode = value_mode;
4751 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4752 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4753 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4755 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4756 NULL_RTX, value_mode, 0, align,
4757 total_size);
4759 return const0_rtx;
4761 else
4763 rtx addr = XEXP (target, 0);
4764 rtx to_rtx;
4766 /* If a value is wanted, it must be the lhs;
4767 so make the address stable for multiple use. */
4769 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4770 && ! CONSTANT_ADDRESS_P (addr)
4771 /* A frame-pointer reference is already stable. */
4772 && ! (GET_CODE (addr) == PLUS
4773 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4774 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4775 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4776 addr = copy_to_reg (addr);
4778 /* Now build a reference to just the desired component. */
4780 to_rtx = copy_rtx (change_address (target, mode,
4781 plus_constant (addr,
4782 (bitpos
4783 / BITS_PER_UNIT))));
4784 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4785 MEM_ALIAS_SET (to_rtx) = alias_set;
4787 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4791 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4792 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4793 ARRAY_REFs and find the ultimate containing object, which we return.
4795 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4796 bit position, and *PUNSIGNEDP to the signedness of the field.
4797 If the position of the field is variable, we store a tree
4798 giving the variable offset (in units) in *POFFSET.
4799 This offset is in addition to the bit position.
4800 If the position is not variable, we store 0 in *POFFSET.
4801 We set *PALIGNMENT to the alignment in bytes of the address that will be
4802 computed. This is the alignment of the thing we return if *POFFSET
4803 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4805 If any of the extraction expressions is volatile,
4806 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4808 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4809 is a mode that can be used to access the field. In that case, *PBITSIZE
4810 is redundant.
4812 If the field describes a variable-sized object, *PMODE is set to
4813 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4814 this case, but the address of the object can be found. */
4816 tree
4817 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4818 punsignedp, pvolatilep, palignment)
4819 tree exp;
4820 int *pbitsize;
4821 int *pbitpos;
4822 tree *poffset;
4823 enum machine_mode *pmode;
4824 int *punsignedp;
4825 int *pvolatilep;
4826 int *palignment;
4828 tree orig_exp = exp;
4829 tree size_tree = 0;
4830 enum machine_mode mode = VOIDmode;
4831 tree offset = integer_zero_node;
4832 unsigned int alignment = BIGGEST_ALIGNMENT;
4834 if (TREE_CODE (exp) == COMPONENT_REF)
4836 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4837 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4838 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4839 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4841 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4843 size_tree = TREE_OPERAND (exp, 1);
4844 *punsignedp = TREE_UNSIGNED (exp);
4846 else
4848 mode = TYPE_MODE (TREE_TYPE (exp));
4849 if (mode == BLKmode)
4850 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4852 *pbitsize = GET_MODE_BITSIZE (mode);
4853 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4856 if (size_tree)
4858 if (TREE_CODE (size_tree) != INTEGER_CST)
4859 mode = BLKmode, *pbitsize = -1;
4860 else
4861 *pbitsize = TREE_INT_CST_LOW (size_tree);
4864 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4865 and find the ultimate containing object. */
4867 *pbitpos = 0;
4869 while (1)
4871 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4873 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4874 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4875 : TREE_OPERAND (exp, 2));
4876 tree constant = integer_zero_node, var = pos;
4878 /* If this field hasn't been filled in yet, don't go
4879 past it. This should only happen when folding expressions
4880 made during type construction. */
4881 if (pos == 0)
4882 break;
4884 /* Assume here that the offset is a multiple of a unit.
4885 If not, there should be an explicitly added constant. */
4886 if (TREE_CODE (pos) == PLUS_EXPR
4887 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4888 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4889 else if (TREE_CODE (pos) == INTEGER_CST)
4890 constant = pos, var = integer_zero_node;
4892 *pbitpos += TREE_INT_CST_LOW (constant);
4893 offset = size_binop (PLUS_EXPR, offset,
4894 size_binop (EXACT_DIV_EXPR, var,
4895 size_int (BITS_PER_UNIT)));
4898 else if (TREE_CODE (exp) == ARRAY_REF)
4900 /* This code is based on the code in case ARRAY_REF in expand_expr
4901 below. We assume here that the size of an array element is
4902 always an integral multiple of BITS_PER_UNIT. */
4904 tree index = TREE_OPERAND (exp, 1);
4905 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4906 tree low_bound
4907 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4908 tree index_type = TREE_TYPE (index);
4909 tree xindex;
4911 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4913 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4914 index);
4915 index_type = TREE_TYPE (index);
4918 /* Optimize the special-case of a zero lower bound.
4920 We convert the low_bound to sizetype to avoid some problems
4921 with constant folding. (E.g. suppose the lower bound is 1,
4922 and its mode is QI. Without the conversion, (ARRAY
4923 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4924 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4926 But sizetype isn't quite right either (especially if
4927 the lowbound is negative). FIXME */
4929 if (! integer_zerop (low_bound))
4930 index = fold (build (MINUS_EXPR, index_type, index,
4931 convert (sizetype, low_bound)));
4933 if (TREE_CODE (index) == INTEGER_CST)
4935 index = convert (sbitsizetype, index);
4936 index_type = TREE_TYPE (index);
4939 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4940 convert (sbitsizetype,
4941 TYPE_SIZE (TREE_TYPE (exp)))));
4943 if (TREE_CODE (xindex) == INTEGER_CST
4944 && TREE_INT_CST_HIGH (xindex) == 0)
4945 *pbitpos += TREE_INT_CST_LOW (xindex);
4946 else
4948 /* Either the bit offset calculated above is not constant, or
4949 it overflowed. In either case, redo the multiplication
4950 against the size in units. This is especially important
4951 in the non-constant case to avoid a division at runtime. */
4952 xindex = fold (build (MULT_EXPR, ssizetype, index,
4953 convert (ssizetype,
4954 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4956 if (contains_placeholder_p (xindex))
4957 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4959 offset = size_binop (PLUS_EXPR, offset, xindex);
4962 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4963 && ! ((TREE_CODE (exp) == NOP_EXPR
4964 || TREE_CODE (exp) == CONVERT_EXPR)
4965 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4966 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4967 != UNION_TYPE))
4968 && (TYPE_MODE (TREE_TYPE (exp))
4969 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4970 break;
4972 /* If any reference in the chain is volatile, the effect is volatile. */
4973 if (TREE_THIS_VOLATILE (exp))
4974 *pvolatilep = 1;
4976 /* If the offset is non-constant already, then we can't assume any
4977 alignment more than the alignment here. */
4978 if (! integer_zerop (offset))
4979 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4981 exp = TREE_OPERAND (exp, 0);
4984 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4985 alignment = MIN (alignment, DECL_ALIGN (exp));
4986 else if (TREE_TYPE (exp) != 0)
4987 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4989 if (integer_zerop (offset))
4990 offset = 0;
4992 if (offset != 0 && contains_placeholder_p (offset))
4993 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4995 *pmode = mode;
4996 *poffset = offset;
4997 *palignment = alignment / BITS_PER_UNIT;
4998 return exp;
5001 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5002 static enum memory_use_mode
5003 get_memory_usage_from_modifier (modifier)
5004 enum expand_modifier modifier;
5006 switch (modifier)
5008 case EXPAND_NORMAL:
5009 case EXPAND_SUM:
5010 return MEMORY_USE_RO;
5011 break;
5012 case EXPAND_MEMORY_USE_WO:
5013 return MEMORY_USE_WO;
5014 break;
5015 case EXPAND_MEMORY_USE_RW:
5016 return MEMORY_USE_RW;
5017 break;
5018 case EXPAND_MEMORY_USE_DONT:
5019 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5020 MEMORY_USE_DONT, because they are modifiers to a call of
5021 expand_expr in the ADDR_EXPR case of expand_expr. */
5022 case EXPAND_CONST_ADDRESS:
5023 case EXPAND_INITIALIZER:
5024 return MEMORY_USE_DONT;
5025 case EXPAND_MEMORY_USE_BAD:
5026 default:
5027 abort ();
5031 /* Given an rtx VALUE that may contain additions and multiplications,
5032 return an equivalent value that just refers to a register or memory.
5033 This is done by generating instructions to perform the arithmetic
5034 and returning a pseudo-register containing the value.
5036 The returned value may be a REG, SUBREG, MEM or constant. */
5039 force_operand (value, target)
5040 rtx value, target;
5042 register optab binoptab = 0;
5043 /* Use a temporary to force order of execution of calls to
5044 `force_operand'. */
5045 rtx tmp;
5046 register rtx op2;
5047 /* Use subtarget as the target for operand 0 of a binary operation. */
5048 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5050 /* Check for a PIC address load. */
5051 if (flag_pic
5052 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5053 && XEXP (value, 0) == pic_offset_table_rtx
5054 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5055 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5056 || GET_CODE (XEXP (value, 1)) == CONST))
5058 if (!subtarget)
5059 subtarget = gen_reg_rtx (GET_MODE (value));
5060 emit_move_insn (subtarget, value);
5061 return subtarget;
5064 if (GET_CODE (value) == PLUS)
5065 binoptab = add_optab;
5066 else if (GET_CODE (value) == MINUS)
5067 binoptab = sub_optab;
5068 else if (GET_CODE (value) == MULT)
5070 op2 = XEXP (value, 1);
5071 if (!CONSTANT_P (op2)
5072 && !(GET_CODE (op2) == REG && op2 != subtarget))
5073 subtarget = 0;
5074 tmp = force_operand (XEXP (value, 0), subtarget);
5075 return expand_mult (GET_MODE (value), tmp,
5076 force_operand (op2, NULL_RTX),
5077 target, 0);
5080 if (binoptab)
5082 op2 = XEXP (value, 1);
5083 if (!CONSTANT_P (op2)
5084 && !(GET_CODE (op2) == REG && op2 != subtarget))
5085 subtarget = 0;
5086 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5088 binoptab = add_optab;
5089 op2 = negate_rtx (GET_MODE (value), op2);
5092 /* Check for an addition with OP2 a constant integer and our first
5093 operand a PLUS of a virtual register and something else. In that
5094 case, we want to emit the sum of the virtual register and the
5095 constant first and then add the other value. This allows virtual
5096 register instantiation to simply modify the constant rather than
5097 creating another one around this addition. */
5098 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5099 && GET_CODE (XEXP (value, 0)) == PLUS
5100 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5101 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5102 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5104 rtx temp = expand_binop (GET_MODE (value), binoptab,
5105 XEXP (XEXP (value, 0), 0), op2,
5106 subtarget, 0, OPTAB_LIB_WIDEN);
5107 return expand_binop (GET_MODE (value), binoptab, temp,
5108 force_operand (XEXP (XEXP (value, 0), 1), 0),
5109 target, 0, OPTAB_LIB_WIDEN);
5112 tmp = force_operand (XEXP (value, 0), subtarget);
5113 return expand_binop (GET_MODE (value), binoptab, tmp,
5114 force_operand (op2, NULL_RTX),
5115 target, 0, OPTAB_LIB_WIDEN);
5116 /* We give UNSIGNEDP = 0 to expand_binop
5117 because the only operations we are expanding here are signed ones. */
5119 return value;
5122 /* Subroutine of expand_expr:
5123 save the non-copied parts (LIST) of an expr (LHS), and return a list
5124 which can restore these values to their previous values,
5125 should something modify their storage. */
5127 static tree
5128 save_noncopied_parts (lhs, list)
5129 tree lhs;
5130 tree list;
5132 tree tail;
5133 tree parts = 0;
5135 for (tail = list; tail; tail = TREE_CHAIN (tail))
5136 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5137 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5138 else
5140 tree part = TREE_VALUE (tail);
5141 tree part_type = TREE_TYPE (part);
5142 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5143 rtx target = assign_temp (part_type, 0, 1, 1);
5144 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5145 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5146 parts = tree_cons (to_be_saved,
5147 build (RTL_EXPR, part_type, NULL_TREE,
5148 (tree) target),
5149 parts);
5150 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5152 return parts;
5155 /* Subroutine of expand_expr:
5156 record the non-copied parts (LIST) of an expr (LHS), and return a list
5157 which specifies the initial values of these parts. */
5159 static tree
5160 init_noncopied_parts (lhs, list)
5161 tree lhs;
5162 tree list;
5164 tree tail;
5165 tree parts = 0;
5167 for (tail = list; tail; tail = TREE_CHAIN (tail))
5168 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5169 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5170 else if (TREE_PURPOSE (tail))
5172 tree part = TREE_VALUE (tail);
5173 tree part_type = TREE_TYPE (part);
5174 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5175 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5177 return parts;
5180 /* Subroutine of expand_expr: return nonzero iff there is no way that
5181 EXP can reference X, which is being modified. TOP_P is nonzero if this
5182 call is going to be used to determine whether we need a temporary
5183 for EXP, as opposed to a recursive call to this function.
5185 It is always safe for this routine to return zero since it merely
5186 searches for optimization opportunities. */
5188 static int
5189 safe_from_p (x, exp, top_p)
5190 rtx x;
5191 tree exp;
5192 int top_p;
5194 rtx exp_rtl = 0;
5195 int i, nops;
5196 static int save_expr_count;
5197 static int save_expr_size = 0;
5198 static tree *save_expr_rewritten;
5199 static tree save_expr_trees[256];
5201 if (x == 0
5202 /* If EXP has varying size, we MUST use a target since we currently
5203 have no way of allocating temporaries of variable size
5204 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5205 So we assume here that something at a higher level has prevented a
5206 clash. This is somewhat bogus, but the best we can do. Only
5207 do this when X is BLKmode and when we are at the top level. */
5208 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5209 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5210 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5211 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5212 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5213 != INTEGER_CST)
5214 && GET_MODE (x) == BLKmode))
5215 return 1;
5217 if (top_p && save_expr_size == 0)
5219 int rtn;
5221 save_expr_count = 0;
5222 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5223 save_expr_rewritten = &save_expr_trees[0];
5225 rtn = safe_from_p (x, exp, 1);
5227 for (i = 0; i < save_expr_count; ++i)
5229 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5230 abort ();
5231 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5234 save_expr_size = 0;
5236 return rtn;
5239 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5240 find the underlying pseudo. */
5241 if (GET_CODE (x) == SUBREG)
5243 x = SUBREG_REG (x);
5244 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5245 return 0;
5248 /* If X is a location in the outgoing argument area, it is always safe. */
5249 if (GET_CODE (x) == MEM
5250 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5251 || (GET_CODE (XEXP (x, 0)) == PLUS
5252 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5253 return 1;
5255 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5257 case 'd':
5258 exp_rtl = DECL_RTL (exp);
5259 break;
5261 case 'c':
5262 return 1;
5264 case 'x':
5265 if (TREE_CODE (exp) == TREE_LIST)
5266 return ((TREE_VALUE (exp) == 0
5267 || safe_from_p (x, TREE_VALUE (exp), 0))
5268 && (TREE_CHAIN (exp) == 0
5269 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5270 else if (TREE_CODE (exp) == ERROR_MARK)
5271 return 1; /* An already-visited SAVE_EXPR? */
5272 else
5273 return 0;
5275 case '1':
5276 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5278 case '2':
5279 case '<':
5280 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5281 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5283 case 'e':
5284 case 'r':
5285 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5286 the expression. If it is set, we conflict iff we are that rtx or
5287 both are in memory. Otherwise, we check all operands of the
5288 expression recursively. */
5290 switch (TREE_CODE (exp))
5292 case ADDR_EXPR:
5293 return (staticp (TREE_OPERAND (exp, 0))
5294 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5295 || TREE_STATIC (exp));
5297 case INDIRECT_REF:
5298 if (GET_CODE (x) == MEM)
5299 return 0;
5300 break;
5302 case CALL_EXPR:
5303 exp_rtl = CALL_EXPR_RTL (exp);
5304 if (exp_rtl == 0)
5306 /* Assume that the call will clobber all hard registers and
5307 all of memory. */
5308 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5309 || GET_CODE (x) == MEM)
5310 return 0;
5313 break;
5315 case RTL_EXPR:
5316 /* If a sequence exists, we would have to scan every instruction
5317 in the sequence to see if it was safe. This is probably not
5318 worthwhile. */
5319 if (RTL_EXPR_SEQUENCE (exp))
5320 return 0;
5322 exp_rtl = RTL_EXPR_RTL (exp);
5323 break;
5325 case WITH_CLEANUP_EXPR:
5326 exp_rtl = RTL_EXPR_RTL (exp);
5327 break;
5329 case CLEANUP_POINT_EXPR:
5330 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5332 case SAVE_EXPR:
5333 exp_rtl = SAVE_EXPR_RTL (exp);
5334 if (exp_rtl)
5335 break;
5337 /* This SAVE_EXPR might appear many times in the top-level
5338 safe_from_p() expression, and if it has a complex
5339 subexpression, examining it multiple times could result
5340 in a combinatorial explosion. E.g. on an Alpha
5341 running at least 200MHz, a Fortran test case compiled with
5342 optimization took about 28 minutes to compile -- even though
5343 it was only a few lines long, and the complicated line causing
5344 so much time to be spent in the earlier version of safe_from_p()
5345 had only 293 or so unique nodes.
5347 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5348 where it is so we can turn it back in the top-level safe_from_p()
5349 when we're done. */
5351 /* For now, don't bother re-sizing the array. */
5352 if (save_expr_count >= save_expr_size)
5353 return 0;
5354 save_expr_rewritten[save_expr_count++] = exp;
5356 nops = tree_code_length[(int) SAVE_EXPR];
5357 for (i = 0; i < nops; i++)
5359 tree operand = TREE_OPERAND (exp, i);
5360 if (operand == NULL_TREE)
5361 continue;
5362 TREE_SET_CODE (exp, ERROR_MARK);
5363 if (!safe_from_p (x, operand, 0))
5364 return 0;
5365 TREE_SET_CODE (exp, SAVE_EXPR);
5367 TREE_SET_CODE (exp, ERROR_MARK);
5368 return 1;
5370 case BIND_EXPR:
5371 /* The only operand we look at is operand 1. The rest aren't
5372 part of the expression. */
5373 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5375 case METHOD_CALL_EXPR:
5376 /* This takes a rtx argument, but shouldn't appear here. */
5377 abort ();
5379 default:
5380 break;
5383 /* If we have an rtx, we do not need to scan our operands. */
5384 if (exp_rtl)
5385 break;
5387 nops = tree_code_length[(int) TREE_CODE (exp)];
5388 for (i = 0; i < nops; i++)
5389 if (TREE_OPERAND (exp, i) != 0
5390 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5391 return 0;
5394 /* If we have an rtl, find any enclosed object. Then see if we conflict
5395 with it. */
5396 if (exp_rtl)
5398 if (GET_CODE (exp_rtl) == SUBREG)
5400 exp_rtl = SUBREG_REG (exp_rtl);
5401 if (GET_CODE (exp_rtl) == REG
5402 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5403 return 0;
5406 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5407 are memory and EXP is not readonly. */
5408 return ! (rtx_equal_p (x, exp_rtl)
5409 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5410 && ! TREE_READONLY (exp)));
5413 /* If we reach here, it is safe. */
5414 return 1;
5417 /* Subroutine of expand_expr: return nonzero iff EXP is an
5418 expression whose type is statically determinable. */
5420 static int
5421 fixed_type_p (exp)
5422 tree exp;
5424 if (TREE_CODE (exp) == PARM_DECL
5425 || TREE_CODE (exp) == VAR_DECL
5426 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5427 || TREE_CODE (exp) == COMPONENT_REF
5428 || TREE_CODE (exp) == ARRAY_REF)
5429 return 1;
5430 return 0;
5433 /* Subroutine of expand_expr: return rtx if EXP is a
5434 variable or parameter; else return 0. */
5436 static rtx
5437 var_rtx (exp)
5438 tree exp;
5440 STRIP_NOPS (exp);
5441 switch (TREE_CODE (exp))
5443 case PARM_DECL:
5444 case VAR_DECL:
5445 return DECL_RTL (exp);
5446 default:
5447 return 0;
5451 #ifdef MAX_INTEGER_COMPUTATION_MODE
5452 void
5453 check_max_integer_computation_mode (exp)
5454 tree exp;
5456 enum tree_code code;
5457 enum machine_mode mode;
5459 /* Strip any NOPs that don't change the mode. */
5460 STRIP_NOPS (exp);
5461 code = TREE_CODE (exp);
5463 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5464 if (code == NOP_EXPR
5465 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5466 return;
5468 /* First check the type of the overall operation. We need only look at
5469 unary, binary and relational operations. */
5470 if (TREE_CODE_CLASS (code) == '1'
5471 || TREE_CODE_CLASS (code) == '2'
5472 || TREE_CODE_CLASS (code) == '<')
5474 mode = TYPE_MODE (TREE_TYPE (exp));
5475 if (GET_MODE_CLASS (mode) == MODE_INT
5476 && mode > MAX_INTEGER_COMPUTATION_MODE)
5477 fatal ("unsupported wide integer operation");
5480 /* Check operand of a unary op. */
5481 if (TREE_CODE_CLASS (code) == '1')
5483 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5484 if (GET_MODE_CLASS (mode) == MODE_INT
5485 && mode > MAX_INTEGER_COMPUTATION_MODE)
5486 fatal ("unsupported wide integer operation");
5489 /* Check operands of a binary/comparison op. */
5490 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5492 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5493 if (GET_MODE_CLASS (mode) == MODE_INT
5494 && mode > MAX_INTEGER_COMPUTATION_MODE)
5495 fatal ("unsupported wide integer operation");
5497 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5498 if (GET_MODE_CLASS (mode) == MODE_INT
5499 && mode > MAX_INTEGER_COMPUTATION_MODE)
5500 fatal ("unsupported wide integer operation");
5503 #endif
5506 /* expand_expr: generate code for computing expression EXP.
5507 An rtx for the computed value is returned. The value is never null.
5508 In the case of a void EXP, const0_rtx is returned.
5510 The value may be stored in TARGET if TARGET is nonzero.
5511 TARGET is just a suggestion; callers must assume that
5512 the rtx returned may not be the same as TARGET.
5514 If TARGET is CONST0_RTX, it means that the value will be ignored.
5516 If TMODE is not VOIDmode, it suggests generating the
5517 result in mode TMODE. But this is done only when convenient.
5518 Otherwise, TMODE is ignored and the value generated in its natural mode.
5519 TMODE is just a suggestion; callers must assume that
5520 the rtx returned may not have mode TMODE.
5522 Note that TARGET may have neither TMODE nor MODE. In that case, it
5523 probably will not be used.
5525 If MODIFIER is EXPAND_SUM then when EXP is an addition
5526 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5527 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5528 products as above, or REG or MEM, or constant.
5529 Ordinarily in such cases we would output mul or add instructions
5530 and then return a pseudo reg containing the sum.
5532 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5533 it also marks a label as absolutely required (it can't be dead).
5534 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5535 This is used for outputting expressions used in initializers.
5537 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5538 with a constant address even if that address is not normally legitimate.
5539 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5542 expand_expr (exp, target, tmode, modifier)
5543 register tree exp;
5544 rtx target;
5545 enum machine_mode tmode;
5546 enum expand_modifier modifier;
5548 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5549 This is static so it will be accessible to our recursive callees. */
5550 static tree placeholder_list = 0;
5551 register rtx op0, op1, temp;
5552 tree type = TREE_TYPE (exp);
5553 int unsignedp = TREE_UNSIGNED (type);
5554 register enum machine_mode mode;
5555 register enum tree_code code = TREE_CODE (exp);
5556 optab this_optab;
5557 rtx subtarget, original_target;
5558 int ignore;
5559 tree context;
5560 /* Used by check-memory-usage to make modifier read only. */
5561 enum expand_modifier ro_modifier;
5563 /* Handle ERROR_MARK before anybody tries to access its type. */
5564 if (TREE_CODE (exp) == ERROR_MARK)
5566 op0 = CONST0_RTX (tmode);
5567 if (op0 != 0)
5568 return op0;
5569 return const0_rtx;
5572 mode = TYPE_MODE (type);
5573 /* Use subtarget as the target for operand 0 of a binary operation. */
5574 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5575 original_target = target;
5576 ignore = (target == const0_rtx
5577 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5578 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5579 || code == COND_EXPR)
5580 && TREE_CODE (type) == VOID_TYPE));
5582 /* Make a read-only version of the modifier. */
5583 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5584 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5585 ro_modifier = modifier;
5586 else
5587 ro_modifier = EXPAND_NORMAL;
5589 /* Don't use hard regs as subtargets, because the combiner
5590 can only handle pseudo regs. */
5591 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5592 subtarget = 0;
5593 /* Avoid subtargets inside loops,
5594 since they hide some invariant expressions. */
5595 if (preserve_subexpressions_p ())
5596 subtarget = 0;
5598 /* If we are going to ignore this result, we need only do something
5599 if there is a side-effect somewhere in the expression. If there
5600 is, short-circuit the most common cases here. Note that we must
5601 not call expand_expr with anything but const0_rtx in case this
5602 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5604 if (ignore)
5606 if (! TREE_SIDE_EFFECTS (exp))
5607 return const0_rtx;
5609 /* Ensure we reference a volatile object even if value is ignored. */
5610 if (TREE_THIS_VOLATILE (exp)
5611 && TREE_CODE (exp) != FUNCTION_DECL
5612 && mode != VOIDmode && mode != BLKmode)
5614 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5615 if (GET_CODE (temp) == MEM)
5616 temp = copy_to_reg (temp);
5617 return const0_rtx;
5620 if (TREE_CODE_CLASS (code) == '1')
5621 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5622 VOIDmode, ro_modifier);
5623 else if (TREE_CODE_CLASS (code) == '2'
5624 || TREE_CODE_CLASS (code) == '<')
5626 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5627 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5628 return const0_rtx;
5630 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5631 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5632 /* If the second operand has no side effects, just evaluate
5633 the first. */
5634 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5635 VOIDmode, ro_modifier);
5637 target = 0;
5640 #ifdef MAX_INTEGER_COMPUTATION_MODE
5641 /* Only check stuff here if the mode we want is different from the mode
5642 of the expression; if it's the same, check_max_integer_computiation_mode
5643 will handle it. Do we really need to check this stuff at all? */
5645 if (target
5646 && GET_MODE (target) != mode
5647 && TREE_CODE (exp) != INTEGER_CST
5648 && TREE_CODE (exp) != PARM_DECL
5649 && TREE_CODE (exp) != ARRAY_REF
5650 && TREE_CODE (exp) != COMPONENT_REF
5651 && TREE_CODE (exp) != BIT_FIELD_REF
5652 && TREE_CODE (exp) != INDIRECT_REF
5653 && TREE_CODE (exp) != CALL_EXPR
5654 && TREE_CODE (exp) != VAR_DECL
5655 && TREE_CODE (exp) != RTL_EXPR)
5657 enum machine_mode mode = GET_MODE (target);
5659 if (GET_MODE_CLASS (mode) == MODE_INT
5660 && mode > MAX_INTEGER_COMPUTATION_MODE)
5661 fatal ("unsupported wide integer operation");
5664 if (tmode != mode
5665 && TREE_CODE (exp) != INTEGER_CST
5666 && TREE_CODE (exp) != PARM_DECL
5667 && TREE_CODE (exp) != ARRAY_REF
5668 && TREE_CODE (exp) != COMPONENT_REF
5669 && TREE_CODE (exp) != BIT_FIELD_REF
5670 && TREE_CODE (exp) != INDIRECT_REF
5671 && TREE_CODE (exp) != VAR_DECL
5672 && TREE_CODE (exp) != CALL_EXPR
5673 && TREE_CODE (exp) != RTL_EXPR
5674 && GET_MODE_CLASS (tmode) == MODE_INT
5675 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5676 fatal ("unsupported wide integer operation");
5678 check_max_integer_computation_mode (exp);
5679 #endif
5681 /* If will do cse, generate all results into pseudo registers
5682 since 1) that allows cse to find more things
5683 and 2) otherwise cse could produce an insn the machine
5684 cannot support. */
5686 if (! cse_not_expected && mode != BLKmode && target
5687 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5688 target = subtarget;
5690 switch (code)
5692 case LABEL_DECL:
5694 tree function = decl_function_context (exp);
5695 /* Handle using a label in a containing function. */
5696 if (function != current_function_decl
5697 && function != inline_function_decl && function != 0)
5699 struct function *p = find_function_data (function);
5700 /* Allocate in the memory associated with the function
5701 that the label is in. */
5702 push_obstacks (p->function_obstack,
5703 p->function_maybepermanent_obstack);
5705 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5706 label_rtx (exp),
5707 p->forced_labels);
5708 pop_obstacks ();
5710 else
5712 if (modifier == EXPAND_INITIALIZER)
5713 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5714 label_rtx (exp),
5715 forced_labels);
5717 temp = gen_rtx_MEM (FUNCTION_MODE,
5718 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5719 if (function != current_function_decl
5720 && function != inline_function_decl && function != 0)
5721 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5722 return temp;
5725 case PARM_DECL:
5726 if (DECL_RTL (exp) == 0)
5728 error_with_decl (exp, "prior parameter's size depends on `%s'");
5729 return CONST0_RTX (mode);
5732 /* ... fall through ... */
5734 case VAR_DECL:
5735 /* If a static var's type was incomplete when the decl was written,
5736 but the type is complete now, lay out the decl now. */
5737 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5738 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5740 push_obstacks_nochange ();
5741 end_temporary_allocation ();
5742 layout_decl (exp, 0);
5743 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5744 pop_obstacks ();
5747 /* Although static-storage variables start off initialized, according to
5748 ANSI C, a memcpy could overwrite them with uninitialized values. So
5749 we check them too. This also lets us check for read-only variables
5750 accessed via a non-const declaration, in case it won't be detected
5751 any other way (e.g., in an embedded system or OS kernel without
5752 memory protection).
5754 Aggregates are not checked here; they're handled elsewhere. */
5755 if (current_function_check_memory_usage && code == VAR_DECL
5756 && GET_CODE (DECL_RTL (exp)) == MEM
5757 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5759 enum memory_use_mode memory_usage;
5760 memory_usage = get_memory_usage_from_modifier (modifier);
5762 if (memory_usage != MEMORY_USE_DONT)
5763 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5764 XEXP (DECL_RTL (exp), 0), Pmode,
5765 GEN_INT (int_size_in_bytes (type)),
5766 TYPE_MODE (sizetype),
5767 GEN_INT (memory_usage),
5768 TYPE_MODE (integer_type_node));
5771 /* ... fall through ... */
5773 case FUNCTION_DECL:
5774 case RESULT_DECL:
5775 if (DECL_RTL (exp) == 0)
5776 abort ();
5778 /* Ensure variable marked as used even if it doesn't go through
5779 a parser. If it hasn't be used yet, write out an external
5780 definition. */
5781 if (! TREE_USED (exp))
5783 assemble_external (exp);
5784 TREE_USED (exp) = 1;
5787 /* Show we haven't gotten RTL for this yet. */
5788 temp = 0;
5790 /* Handle variables inherited from containing functions. */
5791 context = decl_function_context (exp);
5793 /* We treat inline_function_decl as an alias for the current function
5794 because that is the inline function whose vars, types, etc.
5795 are being merged into the current function.
5796 See expand_inline_function. */
5798 if (context != 0 && context != current_function_decl
5799 && context != inline_function_decl
5800 /* If var is static, we don't need a static chain to access it. */
5801 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5802 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5804 rtx addr;
5806 /* Mark as non-local and addressable. */
5807 DECL_NONLOCAL (exp) = 1;
5808 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5809 abort ();
5810 mark_addressable (exp);
5811 if (GET_CODE (DECL_RTL (exp)) != MEM)
5812 abort ();
5813 addr = XEXP (DECL_RTL (exp), 0);
5814 if (GET_CODE (addr) == MEM)
5815 addr = gen_rtx_MEM (Pmode,
5816 fix_lexical_addr (XEXP (addr, 0), exp));
5817 else
5818 addr = fix_lexical_addr (addr, exp);
5819 temp = change_address (DECL_RTL (exp), mode, addr);
5822 /* This is the case of an array whose size is to be determined
5823 from its initializer, while the initializer is still being parsed.
5824 See expand_decl. */
5826 else if (GET_CODE (DECL_RTL (exp)) == MEM
5827 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5828 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5829 XEXP (DECL_RTL (exp), 0));
5831 /* If DECL_RTL is memory, we are in the normal case and either
5832 the address is not valid or it is not a register and -fforce-addr
5833 is specified, get the address into a register. */
5835 else if (GET_CODE (DECL_RTL (exp)) == MEM
5836 && modifier != EXPAND_CONST_ADDRESS
5837 && modifier != EXPAND_SUM
5838 && modifier != EXPAND_INITIALIZER
5839 && (! memory_address_p (DECL_MODE (exp),
5840 XEXP (DECL_RTL (exp), 0))
5841 || (flag_force_addr
5842 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5843 temp = change_address (DECL_RTL (exp), VOIDmode,
5844 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5846 /* If we got something, return it. But first, set the alignment
5847 the address is a register. */
5848 if (temp != 0)
5850 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5851 mark_reg_pointer (XEXP (temp, 0),
5852 DECL_ALIGN (exp) / BITS_PER_UNIT);
5854 return temp;
5857 /* If the mode of DECL_RTL does not match that of the decl, it
5858 must be a promoted value. We return a SUBREG of the wanted mode,
5859 but mark it so that we know that it was already extended. */
5861 if (GET_CODE (DECL_RTL (exp)) == REG
5862 && GET_MODE (DECL_RTL (exp)) != mode)
5864 /* Get the signedness used for this variable. Ensure we get the
5865 same mode we got when the variable was declared. */
5866 if (GET_MODE (DECL_RTL (exp))
5867 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5868 abort ();
5870 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5871 SUBREG_PROMOTED_VAR_P (temp) = 1;
5872 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5873 return temp;
5876 return DECL_RTL (exp);
5878 case INTEGER_CST:
5879 return immed_double_const (TREE_INT_CST_LOW (exp),
5880 TREE_INT_CST_HIGH (exp),
5881 mode);
5883 case CONST_DECL:
5884 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5885 EXPAND_MEMORY_USE_BAD);
5887 case REAL_CST:
5888 /* If optimized, generate immediate CONST_DOUBLE
5889 which will be turned into memory by reload if necessary.
5891 We used to force a register so that loop.c could see it. But
5892 this does not allow gen_* patterns to perform optimizations with
5893 the constants. It also produces two insns in cases like "x = 1.0;".
5894 On most machines, floating-point constants are not permitted in
5895 many insns, so we'd end up copying it to a register in any case.
5897 Now, we do the copying in expand_binop, if appropriate. */
5898 return immed_real_const (exp);
5900 case COMPLEX_CST:
5901 case STRING_CST:
5902 if (! TREE_CST_RTL (exp))
5903 output_constant_def (exp);
5905 /* TREE_CST_RTL probably contains a constant address.
5906 On RISC machines where a constant address isn't valid,
5907 make some insns to get that address into a register. */
5908 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5909 && modifier != EXPAND_CONST_ADDRESS
5910 && modifier != EXPAND_INITIALIZER
5911 && modifier != EXPAND_SUM
5912 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5913 || (flag_force_addr
5914 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5915 return change_address (TREE_CST_RTL (exp), VOIDmode,
5916 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5917 return TREE_CST_RTL (exp);
5919 case EXPR_WITH_FILE_LOCATION:
5921 rtx to_return;
5922 char *saved_input_filename = input_filename;
5923 int saved_lineno = lineno;
5924 input_filename = EXPR_WFL_FILENAME (exp);
5925 lineno = EXPR_WFL_LINENO (exp);
5926 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5927 emit_line_note (input_filename, lineno);
5928 /* Possibly avoid switching back and force here */
5929 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5930 input_filename = saved_input_filename;
5931 lineno = saved_lineno;
5932 return to_return;
5935 case SAVE_EXPR:
5936 context = decl_function_context (exp);
5938 /* If this SAVE_EXPR was at global context, assume we are an
5939 initialization function and move it into our context. */
5940 if (context == 0)
5941 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5943 /* We treat inline_function_decl as an alias for the current function
5944 because that is the inline function whose vars, types, etc.
5945 are being merged into the current function.
5946 See expand_inline_function. */
5947 if (context == current_function_decl || context == inline_function_decl)
5948 context = 0;
5950 /* If this is non-local, handle it. */
5951 if (context)
5953 /* The following call just exists to abort if the context is
5954 not of a containing function. */
5955 find_function_data (context);
5957 temp = SAVE_EXPR_RTL (exp);
5958 if (temp && GET_CODE (temp) == REG)
5960 put_var_into_stack (exp);
5961 temp = SAVE_EXPR_RTL (exp);
5963 if (temp == 0 || GET_CODE (temp) != MEM)
5964 abort ();
5965 return change_address (temp, mode,
5966 fix_lexical_addr (XEXP (temp, 0), exp));
5968 if (SAVE_EXPR_RTL (exp) == 0)
5970 if (mode == VOIDmode)
5971 temp = const0_rtx;
5972 else
5973 temp = assign_temp (type, 3, 0, 0);
5975 SAVE_EXPR_RTL (exp) = temp;
5976 if (!optimize && GET_CODE (temp) == REG)
5977 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5978 save_expr_regs);
5980 /* If the mode of TEMP does not match that of the expression, it
5981 must be a promoted value. We pass store_expr a SUBREG of the
5982 wanted mode but mark it so that we know that it was already
5983 extended. Note that `unsignedp' was modified above in
5984 this case. */
5986 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5988 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5989 SUBREG_PROMOTED_VAR_P (temp) = 1;
5990 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5993 if (temp == const0_rtx)
5994 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5995 EXPAND_MEMORY_USE_BAD);
5996 else
5997 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5999 TREE_USED (exp) = 1;
6002 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6003 must be a promoted value. We return a SUBREG of the wanted mode,
6004 but mark it so that we know that it was already extended. */
6006 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6007 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6009 /* Compute the signedness and make the proper SUBREG. */
6010 promote_mode (type, mode, &unsignedp, 0);
6011 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6012 SUBREG_PROMOTED_VAR_P (temp) = 1;
6013 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6014 return temp;
6017 return SAVE_EXPR_RTL (exp);
6019 case UNSAVE_EXPR:
6021 rtx temp;
6022 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6023 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6024 return temp;
6027 case PLACEHOLDER_EXPR:
6029 tree placeholder_expr;
6031 /* If there is an object on the head of the placeholder list,
6032 see if some object in it of type TYPE or a pointer to it. For
6033 further information, see tree.def. */
6034 for (placeholder_expr = placeholder_list;
6035 placeholder_expr != 0;
6036 placeholder_expr = TREE_CHAIN (placeholder_expr))
6038 tree need_type = TYPE_MAIN_VARIANT (type);
6039 tree object = 0;
6040 tree old_list = placeholder_list;
6041 tree elt;
6043 /* Find the outermost reference that is of the type we want.
6044 If none, see if any object has a type that is a pointer to
6045 the type we want. */
6046 for (elt = TREE_PURPOSE (placeholder_expr);
6047 elt != 0 && object == 0;
6049 = ((TREE_CODE (elt) == COMPOUND_EXPR
6050 || TREE_CODE (elt) == COND_EXPR)
6051 ? TREE_OPERAND (elt, 1)
6052 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6053 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6054 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6055 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6056 ? TREE_OPERAND (elt, 0) : 0))
6057 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6058 object = elt;
6060 for (elt = TREE_PURPOSE (placeholder_expr);
6061 elt != 0 && object == 0;
6063 = ((TREE_CODE (elt) == COMPOUND_EXPR
6064 || TREE_CODE (elt) == COND_EXPR)
6065 ? TREE_OPERAND (elt, 1)
6066 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6067 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6068 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6069 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6070 ? TREE_OPERAND (elt, 0) : 0))
6071 if (POINTER_TYPE_P (TREE_TYPE (elt))
6072 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6073 == need_type))
6074 object = build1 (INDIRECT_REF, need_type, elt);
6076 if (object != 0)
6078 /* Expand this object skipping the list entries before
6079 it was found in case it is also a PLACEHOLDER_EXPR.
6080 In that case, we want to translate it using subsequent
6081 entries. */
6082 placeholder_list = TREE_CHAIN (placeholder_expr);
6083 temp = expand_expr (object, original_target, tmode,
6084 ro_modifier);
6085 placeholder_list = old_list;
6086 return temp;
6091 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6092 abort ();
6094 case WITH_RECORD_EXPR:
6095 /* Put the object on the placeholder list, expand our first operand,
6096 and pop the list. */
6097 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6098 placeholder_list);
6099 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6100 tmode, ro_modifier);
6101 placeholder_list = TREE_CHAIN (placeholder_list);
6102 return target;
6104 case GOTO_EXPR:
6105 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6106 expand_goto (TREE_OPERAND (exp, 0));
6107 else
6108 expand_computed_goto (TREE_OPERAND (exp, 0));
6109 return const0_rtx;
6111 case EXIT_EXPR:
6112 expand_exit_loop_if_false (NULL_PTR,
6113 invert_truthvalue (TREE_OPERAND (exp, 0)));
6114 return const0_rtx;
6116 case LABELED_BLOCK_EXPR:
6117 if (LABELED_BLOCK_BODY (exp))
6118 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6119 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6120 return const0_rtx;
6122 case EXIT_BLOCK_EXPR:
6123 if (EXIT_BLOCK_RETURN (exp))
6124 sorry ("returned value in block_exit_expr");
6125 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6126 return const0_rtx;
6128 case LOOP_EXPR:
6129 push_temp_slots ();
6130 expand_start_loop (1);
6131 expand_expr_stmt (TREE_OPERAND (exp, 0));
6132 expand_end_loop ();
6133 pop_temp_slots ();
6135 return const0_rtx;
6137 case BIND_EXPR:
6139 tree vars = TREE_OPERAND (exp, 0);
6140 int vars_need_expansion = 0;
6142 /* Need to open a binding contour here because
6143 if there are any cleanups they must be contained here. */
6144 expand_start_bindings (0);
6146 /* Mark the corresponding BLOCK for output in its proper place. */
6147 if (TREE_OPERAND (exp, 2) != 0
6148 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6149 insert_block (TREE_OPERAND (exp, 2));
6151 /* If VARS have not yet been expanded, expand them now. */
6152 while (vars)
6154 if (DECL_RTL (vars) == 0)
6156 vars_need_expansion = 1;
6157 expand_decl (vars);
6159 expand_decl_init (vars);
6160 vars = TREE_CHAIN (vars);
6163 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6165 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6167 return temp;
6170 case RTL_EXPR:
6171 if (RTL_EXPR_SEQUENCE (exp))
6173 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6174 abort ();
6175 emit_insns (RTL_EXPR_SEQUENCE (exp));
6176 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6178 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6179 free_temps_for_rtl_expr (exp);
6180 return RTL_EXPR_RTL (exp);
6182 case CONSTRUCTOR:
6183 /* If we don't need the result, just ensure we evaluate any
6184 subexpressions. */
6185 if (ignore)
6187 tree elt;
6188 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6189 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6190 EXPAND_MEMORY_USE_BAD);
6191 return const0_rtx;
6194 /* All elts simple constants => refer to a constant in memory. But
6195 if this is a non-BLKmode mode, let it store a field at a time
6196 since that should make a CONST_INT or CONST_DOUBLE when we
6197 fold. Likewise, if we have a target we can use, it is best to
6198 store directly into the target unless the type is large enough
6199 that memcpy will be used. If we are making an initializer and
6200 all operands are constant, put it in memory as well. */
6201 else if ((TREE_STATIC (exp)
6202 && ((mode == BLKmode
6203 && ! (target != 0 && safe_from_p (target, exp, 1)))
6204 || TREE_ADDRESSABLE (exp)
6205 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6206 && (!MOVE_BY_PIECES_P
6207 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6208 TYPE_ALIGN (type) / BITS_PER_UNIT))
6209 && ! mostly_zeros_p (exp))))
6210 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6212 rtx constructor = output_constant_def (exp);
6213 if (modifier != EXPAND_CONST_ADDRESS
6214 && modifier != EXPAND_INITIALIZER
6215 && modifier != EXPAND_SUM
6216 && (! memory_address_p (GET_MODE (constructor),
6217 XEXP (constructor, 0))
6218 || (flag_force_addr
6219 && GET_CODE (XEXP (constructor, 0)) != REG)))
6220 constructor = change_address (constructor, VOIDmode,
6221 XEXP (constructor, 0));
6222 return constructor;
6225 else
6227 /* Handle calls that pass values in multiple non-contiguous
6228 locations. The Irix 6 ABI has examples of this. */
6229 if (target == 0 || ! safe_from_p (target, exp, 1)
6230 || GET_CODE (target) == PARALLEL)
6232 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6233 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6234 else
6235 target = assign_temp (type, 0, 1, 1);
6238 if (TREE_READONLY (exp))
6240 if (GET_CODE (target) == MEM)
6241 target = copy_rtx (target);
6243 RTX_UNCHANGING_P (target) = 1;
6246 store_constructor (exp, target, 0);
6247 return target;
6250 case INDIRECT_REF:
6252 tree exp1 = TREE_OPERAND (exp, 0);
6253 tree exp2;
6254 tree index;
6255 tree string = string_constant (exp1, &index);
6256 int i;
6258 /* Try to optimize reads from const strings. */
6259 if (string
6260 && TREE_CODE (string) == STRING_CST
6261 && TREE_CODE (index) == INTEGER_CST
6262 && !TREE_INT_CST_HIGH (index)
6263 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6264 && GET_MODE_CLASS (mode) == MODE_INT
6265 && GET_MODE_SIZE (mode) == 1
6266 && modifier != EXPAND_MEMORY_USE_WO)
6267 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6269 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6270 op0 = memory_address (mode, op0);
6272 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6274 enum memory_use_mode memory_usage;
6275 memory_usage = get_memory_usage_from_modifier (modifier);
6277 if (memory_usage != MEMORY_USE_DONT)
6279 in_check_memory_usage = 1;
6280 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6281 op0, Pmode,
6282 GEN_INT (int_size_in_bytes (type)),
6283 TYPE_MODE (sizetype),
6284 GEN_INT (memory_usage),
6285 TYPE_MODE (integer_type_node));
6286 in_check_memory_usage = 0;
6290 temp = gen_rtx_MEM (mode, op0);
6291 /* If address was computed by addition,
6292 mark this as an element of an aggregate. */
6293 if (TREE_CODE (exp1) == PLUS_EXPR
6294 || (TREE_CODE (exp1) == SAVE_EXPR
6295 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6296 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6297 || (TREE_CODE (exp1) == ADDR_EXPR
6298 && (exp2 = TREE_OPERAND (exp1, 0))
6299 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6300 MEM_SET_IN_STRUCT_P (temp, 1);
6302 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6303 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6305 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6306 here, because, in C and C++, the fact that a location is accessed
6307 through a pointer to const does not mean that the value there can
6308 never change. Languages where it can never change should
6309 also set TREE_STATIC. */
6310 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6311 return temp;
6314 case ARRAY_REF:
6315 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6316 abort ();
6319 tree array = TREE_OPERAND (exp, 0);
6320 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6321 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6322 tree index = TREE_OPERAND (exp, 1);
6323 tree index_type = TREE_TYPE (index);
6324 HOST_WIDE_INT i;
6326 /* Optimize the special-case of a zero lower bound.
6328 We convert the low_bound to sizetype to avoid some problems
6329 with constant folding. (E.g. suppose the lower bound is 1,
6330 and its mode is QI. Without the conversion, (ARRAY
6331 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6332 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6334 But sizetype isn't quite right either (especially if
6335 the lowbound is negative). FIXME */
6337 if (! integer_zerop (low_bound))
6338 index = fold (build (MINUS_EXPR, index_type, index,
6339 convert (sizetype, low_bound)));
6341 /* Fold an expression like: "foo"[2].
6342 This is not done in fold so it won't happen inside &.
6343 Don't fold if this is for wide characters since it's too
6344 difficult to do correctly and this is a very rare case. */
6346 if (TREE_CODE (array) == STRING_CST
6347 && TREE_CODE (index) == INTEGER_CST
6348 && !TREE_INT_CST_HIGH (index)
6349 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6350 && GET_MODE_CLASS (mode) == MODE_INT
6351 && GET_MODE_SIZE (mode) == 1)
6352 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6354 /* If this is a constant index into a constant array,
6355 just get the value from the array. Handle both the cases when
6356 we have an explicit constructor and when our operand is a variable
6357 that was declared const. */
6359 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6361 if (TREE_CODE (index) == INTEGER_CST
6362 && TREE_INT_CST_HIGH (index) == 0)
6364 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6366 i = TREE_INT_CST_LOW (index);
6367 while (elem && i--)
6368 elem = TREE_CHAIN (elem);
6369 if (elem)
6370 return expand_expr (fold (TREE_VALUE (elem)), target,
6371 tmode, ro_modifier);
6375 else if (optimize >= 1
6376 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6377 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6378 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6380 if (TREE_CODE (index) == INTEGER_CST)
6382 tree init = DECL_INITIAL (array);
6384 i = TREE_INT_CST_LOW (index);
6385 if (TREE_CODE (init) == CONSTRUCTOR)
6387 tree elem = CONSTRUCTOR_ELTS (init);
6389 while (elem
6390 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6391 elem = TREE_CHAIN (elem);
6392 if (elem)
6393 return expand_expr (fold (TREE_VALUE (elem)), target,
6394 tmode, ro_modifier);
6396 else if (TREE_CODE (init) == STRING_CST
6397 && TREE_INT_CST_HIGH (index) == 0
6398 && (TREE_INT_CST_LOW (index)
6399 < TREE_STRING_LENGTH (init)))
6400 return (GEN_INT
6401 (TREE_STRING_POINTER
6402 (init)[TREE_INT_CST_LOW (index)]));
6407 /* ... fall through ... */
6409 case COMPONENT_REF:
6410 case BIT_FIELD_REF:
6411 /* If the operand is a CONSTRUCTOR, we can just extract the
6412 appropriate field if it is present. Don't do this if we have
6413 already written the data since we want to refer to that copy
6414 and varasm.c assumes that's what we'll do. */
6415 if (code != ARRAY_REF
6416 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6417 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6419 tree elt;
6421 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6422 elt = TREE_CHAIN (elt))
6423 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6424 /* We can normally use the value of the field in the
6425 CONSTRUCTOR. However, if this is a bitfield in
6426 an integral mode that we can fit in a HOST_WIDE_INT,
6427 we must mask only the number of bits in the bitfield,
6428 since this is done implicitly by the constructor. If
6429 the bitfield does not meet either of those conditions,
6430 we can't do this optimization. */
6431 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6432 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6433 == MODE_INT)
6434 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6435 <= HOST_BITS_PER_WIDE_INT))))
6437 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6438 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6440 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6442 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6444 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6445 op0 = expand_and (op0, op1, target);
6447 else
6449 enum machine_mode imode
6450 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6451 tree count
6452 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6455 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6456 target, 0);
6457 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6458 target, 0);
6462 return op0;
6467 enum machine_mode mode1;
6468 int bitsize;
6469 int bitpos;
6470 tree offset;
6471 int volatilep = 0;
6472 int alignment;
6473 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6474 &mode1, &unsignedp, &volatilep,
6475 &alignment);
6477 /* If we got back the original object, something is wrong. Perhaps
6478 we are evaluating an expression too early. In any event, don't
6479 infinitely recurse. */
6480 if (tem == exp)
6481 abort ();
6483 /* If TEM's type is a union of variable size, pass TARGET to the inner
6484 computation, since it will need a temporary and TARGET is known
6485 to have to do. This occurs in unchecked conversion in Ada. */
6487 op0 = expand_expr (tem,
6488 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6489 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6490 != INTEGER_CST)
6491 ? target : NULL_RTX),
6492 VOIDmode,
6493 modifier == EXPAND_INITIALIZER
6494 ? modifier : EXPAND_NORMAL);
6496 /* If this is a constant, put it into a register if it is a
6497 legitimate constant and memory if it isn't. */
6498 if (CONSTANT_P (op0))
6500 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6501 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6502 op0 = force_reg (mode, op0);
6503 else
6504 op0 = validize_mem (force_const_mem (mode, op0));
6507 if (offset != 0)
6509 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6511 if (GET_CODE (op0) != MEM)
6512 abort ();
6514 if (GET_MODE (offset_rtx) != ptr_mode)
6516 #ifdef POINTERS_EXTEND_UNSIGNED
6517 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6518 #else
6519 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6520 #endif
6523 /* A constant address in TO_RTX can have VOIDmode, we must not try
6524 to call force_reg for that case. Avoid that case. */
6525 if (GET_CODE (op0) == MEM
6526 && GET_MODE (op0) == BLKmode
6527 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6528 && bitsize
6529 && (bitpos % bitsize) == 0
6530 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6531 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6533 rtx temp = change_address (op0, mode1,
6534 plus_constant (XEXP (op0, 0),
6535 (bitpos /
6536 BITS_PER_UNIT)));
6537 if (GET_CODE (XEXP (temp, 0)) == REG)
6538 op0 = temp;
6539 else
6540 op0 = change_address (op0, mode1,
6541 force_reg (GET_MODE (XEXP (temp, 0)),
6542 XEXP (temp, 0)));
6543 bitpos = 0;
6547 op0 = change_address (op0, VOIDmode,
6548 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6549 force_reg (ptr_mode, offset_rtx)));
6552 /* Don't forget about volatility even if this is a bitfield. */
6553 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6555 op0 = copy_rtx (op0);
6556 MEM_VOLATILE_P (op0) = 1;
6559 /* Check the access. */
6560 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6562 enum memory_use_mode memory_usage;
6563 memory_usage = get_memory_usage_from_modifier (modifier);
6565 if (memory_usage != MEMORY_USE_DONT)
6567 rtx to;
6568 int size;
6570 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6571 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6573 /* Check the access right of the pointer. */
6574 if (size > BITS_PER_UNIT)
6575 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6576 to, Pmode,
6577 GEN_INT (size / BITS_PER_UNIT),
6578 TYPE_MODE (sizetype),
6579 GEN_INT (memory_usage),
6580 TYPE_MODE (integer_type_node));
6584 /* In cases where an aligned union has an unaligned object
6585 as a field, we might be extracting a BLKmode value from
6586 an integer-mode (e.g., SImode) object. Handle this case
6587 by doing the extract into an object as wide as the field
6588 (which we know to be the width of a basic mode), then
6589 storing into memory, and changing the mode to BLKmode.
6590 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6591 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6592 if (mode1 == VOIDmode
6593 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6594 || (modifier != EXPAND_CONST_ADDRESS
6595 && modifier != EXPAND_INITIALIZER
6596 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6597 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6598 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6599 /* If the field isn't aligned enough to fetch as a memref,
6600 fetch it as a bit field. */
6601 || (SLOW_UNALIGNED_ACCESS
6602 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6603 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6605 enum machine_mode ext_mode = mode;
6607 if (ext_mode == BLKmode)
6608 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6610 if (ext_mode == BLKmode)
6612 /* In this case, BITPOS must start at a byte boundary and
6613 TARGET, if specified, must be a MEM. */
6614 if (GET_CODE (op0) != MEM
6615 || (target != 0 && GET_CODE (target) != MEM)
6616 || bitpos % BITS_PER_UNIT != 0)
6617 abort ();
6619 op0 = change_address (op0, VOIDmode,
6620 plus_constant (XEXP (op0, 0),
6621 bitpos / BITS_PER_UNIT));
6622 if (target == 0)
6623 target = assign_temp (type, 0, 1, 1);
6625 emit_block_move (target, op0,
6626 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6627 / BITS_PER_UNIT),
6630 return target;
6633 op0 = validize_mem (op0);
6635 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6636 mark_reg_pointer (XEXP (op0, 0), alignment);
6638 op0 = extract_bit_field (op0, bitsize, bitpos,
6639 unsignedp, target, ext_mode, ext_mode,
6640 alignment,
6641 int_size_in_bytes (TREE_TYPE (tem)));
6643 /* If the result is a record type and BITSIZE is narrower than
6644 the mode of OP0, an integral mode, and this is a big endian
6645 machine, we must put the field into the high-order bits. */
6646 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6647 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6648 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6649 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6650 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6651 - bitsize),
6652 op0, 1);
6654 if (mode == BLKmode)
6656 rtx new = assign_stack_temp (ext_mode,
6657 bitsize / BITS_PER_UNIT, 0);
6659 emit_move_insn (new, op0);
6660 op0 = copy_rtx (new);
6661 PUT_MODE (op0, BLKmode);
6662 MEM_SET_IN_STRUCT_P (op0, 1);
6665 return op0;
6668 /* If the result is BLKmode, use that to access the object
6669 now as well. */
6670 if (mode == BLKmode)
6671 mode1 = BLKmode;
6673 /* Get a reference to just this component. */
6674 if (modifier == EXPAND_CONST_ADDRESS
6675 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6676 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6677 (bitpos / BITS_PER_UNIT)));
6678 else
6679 op0 = change_address (op0, mode1,
6680 plus_constant (XEXP (op0, 0),
6681 (bitpos / BITS_PER_UNIT)));
6683 if (GET_CODE (op0) == MEM)
6684 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6686 if (GET_CODE (XEXP (op0, 0)) == REG)
6687 mark_reg_pointer (XEXP (op0, 0), alignment);
6689 MEM_SET_IN_STRUCT_P (op0, 1);
6690 MEM_VOLATILE_P (op0) |= volatilep;
6691 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6692 || modifier == EXPAND_CONST_ADDRESS
6693 || modifier == EXPAND_INITIALIZER)
6694 return op0;
6695 else if (target == 0)
6696 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6698 convert_move (target, op0, unsignedp);
6699 return target;
6702 /* Intended for a reference to a buffer of a file-object in Pascal.
6703 But it's not certain that a special tree code will really be
6704 necessary for these. INDIRECT_REF might work for them. */
6705 case BUFFER_REF:
6706 abort ();
6708 case IN_EXPR:
6710 /* Pascal set IN expression.
6712 Algorithm:
6713 rlo = set_low - (set_low%bits_per_word);
6714 the_word = set [ (index - rlo)/bits_per_word ];
6715 bit_index = index % bits_per_word;
6716 bitmask = 1 << bit_index;
6717 return !!(the_word & bitmask); */
6719 tree set = TREE_OPERAND (exp, 0);
6720 tree index = TREE_OPERAND (exp, 1);
6721 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6722 tree set_type = TREE_TYPE (set);
6723 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6724 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6725 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6726 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6727 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6728 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6729 rtx setaddr = XEXP (setval, 0);
6730 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6731 rtx rlow;
6732 rtx diff, quo, rem, addr, bit, result;
6734 preexpand_calls (exp);
6736 /* If domain is empty, answer is no. Likewise if index is constant
6737 and out of bounds. */
6738 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6739 && TREE_CODE (set_low_bound) == INTEGER_CST
6740 && tree_int_cst_lt (set_high_bound, set_low_bound))
6741 || (TREE_CODE (index) == INTEGER_CST
6742 && TREE_CODE (set_low_bound) == INTEGER_CST
6743 && tree_int_cst_lt (index, set_low_bound))
6744 || (TREE_CODE (set_high_bound) == INTEGER_CST
6745 && TREE_CODE (index) == INTEGER_CST
6746 && tree_int_cst_lt (set_high_bound, index))))
6747 return const0_rtx;
6749 if (target == 0)
6750 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6752 /* If we get here, we have to generate the code for both cases
6753 (in range and out of range). */
6755 op0 = gen_label_rtx ();
6756 op1 = gen_label_rtx ();
6758 if (! (GET_CODE (index_val) == CONST_INT
6759 && GET_CODE (lo_r) == CONST_INT))
6761 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6762 GET_MODE (index_val), iunsignedp, 0, op1);
6765 if (! (GET_CODE (index_val) == CONST_INT
6766 && GET_CODE (hi_r) == CONST_INT))
6768 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6769 GET_MODE (index_val), iunsignedp, 0, op1);
6772 /* Calculate the element number of bit zero in the first word
6773 of the set. */
6774 if (GET_CODE (lo_r) == CONST_INT)
6775 rlow = GEN_INT (INTVAL (lo_r)
6776 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6777 else
6778 rlow = expand_binop (index_mode, and_optab, lo_r,
6779 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6780 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6782 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6783 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6785 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6786 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6787 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6788 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6790 addr = memory_address (byte_mode,
6791 expand_binop (index_mode, add_optab, diff,
6792 setaddr, NULL_RTX, iunsignedp,
6793 OPTAB_LIB_WIDEN));
6795 /* Extract the bit we want to examine */
6796 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6797 gen_rtx_MEM (byte_mode, addr),
6798 make_tree (TREE_TYPE (index), rem),
6799 NULL_RTX, 1);
6800 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6801 GET_MODE (target) == byte_mode ? target : 0,
6802 1, OPTAB_LIB_WIDEN);
6804 if (result != target)
6805 convert_move (target, result, 1);
6807 /* Output the code to handle the out-of-range case. */
6808 emit_jump (op0);
6809 emit_label (op1);
6810 emit_move_insn (target, const0_rtx);
6811 emit_label (op0);
6812 return target;
6815 case WITH_CLEANUP_EXPR:
6816 if (RTL_EXPR_RTL (exp) == 0)
6818 RTL_EXPR_RTL (exp)
6819 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6820 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6822 /* That's it for this cleanup. */
6823 TREE_OPERAND (exp, 2) = 0;
6825 return RTL_EXPR_RTL (exp);
6827 case CLEANUP_POINT_EXPR:
6829 /* Start a new binding layer that will keep track of all cleanup
6830 actions to be performed. */
6831 expand_start_bindings (0);
6833 target_temp_slot_level = temp_slot_level;
6835 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6836 /* If we're going to use this value, load it up now. */
6837 if (! ignore)
6838 op0 = force_not_mem (op0);
6839 preserve_temp_slots (op0);
6840 expand_end_bindings (NULL_TREE, 0, 0);
6842 return op0;
6844 case CALL_EXPR:
6845 /* Check for a built-in function. */
6846 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6847 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6848 == FUNCTION_DECL)
6849 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6850 return expand_builtin (exp, target, subtarget, tmode, ignore);
6852 /* If this call was expanded already by preexpand_calls,
6853 just return the result we got. */
6854 if (CALL_EXPR_RTL (exp) != 0)
6855 return CALL_EXPR_RTL (exp);
6857 return expand_call (exp, target, ignore);
6859 case NON_LVALUE_EXPR:
6860 case NOP_EXPR:
6861 case CONVERT_EXPR:
6862 case REFERENCE_EXPR:
6863 if (TREE_CODE (type) == UNION_TYPE)
6865 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6866 if (target == 0)
6868 if (mode != BLKmode)
6869 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6870 else
6871 target = assign_temp (type, 0, 1, 1);
6874 if (GET_CODE (target) == MEM)
6875 /* Store data into beginning of memory target. */
6876 store_expr (TREE_OPERAND (exp, 0),
6877 change_address (target, TYPE_MODE (valtype), 0), 0);
6879 else if (GET_CODE (target) == REG)
6880 /* Store this field into a union of the proper type. */
6881 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6882 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6883 VOIDmode, 0, 1,
6884 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6886 else
6887 abort ();
6889 /* Return the entire union. */
6890 return target;
6893 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6895 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6896 ro_modifier);
6898 /* If the signedness of the conversion differs and OP0 is
6899 a promoted SUBREG, clear that indication since we now
6900 have to do the proper extension. */
6901 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6902 && GET_CODE (op0) == SUBREG)
6903 SUBREG_PROMOTED_VAR_P (op0) = 0;
6905 return op0;
6908 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6909 if (GET_MODE (op0) == mode)
6910 return op0;
6912 /* If OP0 is a constant, just convert it into the proper mode. */
6913 if (CONSTANT_P (op0))
6914 return
6915 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6916 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6918 if (modifier == EXPAND_INITIALIZER)
6919 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6921 if (target == 0)
6922 return
6923 convert_to_mode (mode, op0,
6924 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6925 else
6926 convert_move (target, op0,
6927 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6928 return target;
6930 case PLUS_EXPR:
6931 /* We come here from MINUS_EXPR when the second operand is a
6932 constant. */
6933 plus_expr:
6934 this_optab = add_optab;
6936 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6937 something else, make sure we add the register to the constant and
6938 then to the other thing. This case can occur during strength
6939 reduction and doing it this way will produce better code if the
6940 frame pointer or argument pointer is eliminated.
6942 fold-const.c will ensure that the constant is always in the inner
6943 PLUS_EXPR, so the only case we need to do anything about is if
6944 sp, ap, or fp is our second argument, in which case we must swap
6945 the innermost first argument and our second argument. */
6947 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6948 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6949 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6950 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6951 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6952 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6954 tree t = TREE_OPERAND (exp, 1);
6956 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6957 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6960 /* If the result is to be ptr_mode and we are adding an integer to
6961 something, we might be forming a constant. So try to use
6962 plus_constant. If it produces a sum and we can't accept it,
6963 use force_operand. This allows P = &ARR[const] to generate
6964 efficient code on machines where a SYMBOL_REF is not a valid
6965 address.
6967 If this is an EXPAND_SUM call, always return the sum. */
6968 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6969 || mode == ptr_mode)
6971 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6972 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6973 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6975 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6976 EXPAND_SUM);
6977 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6978 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6979 op1 = force_operand (op1, target);
6980 return op1;
6983 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6984 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6985 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6988 EXPAND_SUM);
6989 if (! CONSTANT_P (op0))
6991 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6992 VOIDmode, modifier);
6993 /* Don't go to both_summands if modifier
6994 says it's not right to return a PLUS. */
6995 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6996 goto binop2;
6997 goto both_summands;
6999 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
7000 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7001 op0 = force_operand (op0, target);
7002 return op0;
7006 /* No sense saving up arithmetic to be done
7007 if it's all in the wrong mode to form part of an address.
7008 And force_operand won't know whether to sign-extend or
7009 zero-extend. */
7010 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7011 || mode != ptr_mode)
7012 goto binop;
7014 preexpand_calls (exp);
7015 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7016 subtarget = 0;
7018 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7019 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7021 both_summands:
7022 /* Make sure any term that's a sum with a constant comes last. */
7023 if (GET_CODE (op0) == PLUS
7024 && CONSTANT_P (XEXP (op0, 1)))
7026 temp = op0;
7027 op0 = op1;
7028 op1 = temp;
7030 /* If adding to a sum including a constant,
7031 associate it to put the constant outside. */
7032 if (GET_CODE (op1) == PLUS
7033 && CONSTANT_P (XEXP (op1, 1)))
7035 rtx constant_term = const0_rtx;
7037 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7038 if (temp != 0)
7039 op0 = temp;
7040 /* Ensure that MULT comes first if there is one. */
7041 else if (GET_CODE (op0) == MULT)
7042 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7043 else
7044 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7046 /* Let's also eliminate constants from op0 if possible. */
7047 op0 = eliminate_constant_term (op0, &constant_term);
7049 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7050 their sum should be a constant. Form it into OP1, since the
7051 result we want will then be OP0 + OP1. */
7053 temp = simplify_binary_operation (PLUS, mode, constant_term,
7054 XEXP (op1, 1));
7055 if (temp != 0)
7056 op1 = temp;
7057 else
7058 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7061 /* Put a constant term last and put a multiplication first. */
7062 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7063 temp = op1, op1 = op0, op0 = temp;
7065 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7066 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7068 case MINUS_EXPR:
7069 /* For initializers, we are allowed to return a MINUS of two
7070 symbolic constants. Here we handle all cases when both operands
7071 are constant. */
7072 /* Handle difference of two symbolic constants,
7073 for the sake of an initializer. */
7074 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7075 && really_constant_p (TREE_OPERAND (exp, 0))
7076 && really_constant_p (TREE_OPERAND (exp, 1)))
7078 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7079 VOIDmode, ro_modifier);
7080 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7081 VOIDmode, ro_modifier);
7083 /* If the last operand is a CONST_INT, use plus_constant of
7084 the negated constant. Else make the MINUS. */
7085 if (GET_CODE (op1) == CONST_INT)
7086 return plus_constant (op0, - INTVAL (op1));
7087 else
7088 return gen_rtx_MINUS (mode, op0, op1);
7090 /* Convert A - const to A + (-const). */
7091 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7093 tree negated = fold (build1 (NEGATE_EXPR, type,
7094 TREE_OPERAND (exp, 1)));
7096 /* Deal with the case where we can't negate the constant
7097 in TYPE. */
7098 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7100 tree newtype = signed_type (type);
7101 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7102 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7103 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7105 if (! TREE_OVERFLOW (newneg))
7106 return expand_expr (convert (type,
7107 build (PLUS_EXPR, newtype,
7108 newop0, newneg)),
7109 target, tmode, ro_modifier);
7111 else
7113 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7114 goto plus_expr;
7117 this_optab = sub_optab;
7118 goto binop;
7120 case MULT_EXPR:
7121 preexpand_calls (exp);
7122 /* If first operand is constant, swap them.
7123 Thus the following special case checks need only
7124 check the second operand. */
7125 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7127 register tree t1 = TREE_OPERAND (exp, 0);
7128 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7129 TREE_OPERAND (exp, 1) = t1;
7132 /* Attempt to return something suitable for generating an
7133 indexed address, for machines that support that. */
7135 if (modifier == EXPAND_SUM && mode == ptr_mode
7136 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7137 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7140 EXPAND_SUM);
7142 /* Apply distributive law if OP0 is x+c. */
7143 if (GET_CODE (op0) == PLUS
7144 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7145 return gen_rtx_PLUS (mode,
7146 gen_rtx_MULT (mode, XEXP (op0, 0),
7147 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7148 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7149 * INTVAL (XEXP (op0, 1))));
7151 if (GET_CODE (op0) != REG)
7152 op0 = force_operand (op0, NULL_RTX);
7153 if (GET_CODE (op0) != REG)
7154 op0 = copy_to_mode_reg (mode, op0);
7156 return gen_rtx_MULT (mode, op0,
7157 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7160 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7161 subtarget = 0;
7163 /* Check for multiplying things that have been extended
7164 from a narrower type. If this machine supports multiplying
7165 in that narrower type with a result in the desired type,
7166 do it that way, and avoid the explicit type-conversion. */
7167 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7168 && TREE_CODE (type) == INTEGER_TYPE
7169 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7170 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7171 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7172 && int_fits_type_p (TREE_OPERAND (exp, 1),
7173 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7174 /* Don't use a widening multiply if a shift will do. */
7175 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7176 > HOST_BITS_PER_WIDE_INT)
7177 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7179 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7180 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7182 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7183 /* If both operands are extended, they must either both
7184 be zero-extended or both be sign-extended. */
7185 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7187 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7189 enum machine_mode innermode
7190 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7191 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7192 ? smul_widen_optab : umul_widen_optab);
7193 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7194 ? umul_widen_optab : smul_widen_optab);
7195 if (mode == GET_MODE_WIDER_MODE (innermode))
7197 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7199 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7200 NULL_RTX, VOIDmode, 0);
7201 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7202 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7203 VOIDmode, 0);
7204 else
7205 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7206 NULL_RTX, VOIDmode, 0);
7207 goto binop2;
7209 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7210 && innermode == word_mode)
7212 rtx htem;
7213 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7214 NULL_RTX, VOIDmode, 0);
7215 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7216 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7217 VOIDmode, 0);
7218 else
7219 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7220 NULL_RTX, VOIDmode, 0);
7221 temp = expand_binop (mode, other_optab, op0, op1, target,
7222 unsignedp, OPTAB_LIB_WIDEN);
7223 htem = expand_mult_highpart_adjust (innermode,
7224 gen_highpart (innermode, temp),
7225 op0, op1,
7226 gen_highpart (innermode, temp),
7227 unsignedp);
7228 emit_move_insn (gen_highpart (innermode, temp), htem);
7229 return temp;
7233 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7234 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7235 return expand_mult (mode, op0, op1, target, unsignedp);
7237 case TRUNC_DIV_EXPR:
7238 case FLOOR_DIV_EXPR:
7239 case CEIL_DIV_EXPR:
7240 case ROUND_DIV_EXPR:
7241 case EXACT_DIV_EXPR:
7242 preexpand_calls (exp);
7243 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7244 subtarget = 0;
7245 /* Possible optimization: compute the dividend with EXPAND_SUM
7246 then if the divisor is constant can optimize the case
7247 where some terms of the dividend have coeffs divisible by it. */
7248 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7249 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7250 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7252 case RDIV_EXPR:
7253 this_optab = flodiv_optab;
7254 goto binop;
7256 case TRUNC_MOD_EXPR:
7257 case FLOOR_MOD_EXPR:
7258 case CEIL_MOD_EXPR:
7259 case ROUND_MOD_EXPR:
7260 preexpand_calls (exp);
7261 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7262 subtarget = 0;
7263 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7264 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7265 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7267 case FIX_ROUND_EXPR:
7268 case FIX_FLOOR_EXPR:
7269 case FIX_CEIL_EXPR:
7270 abort (); /* Not used for C. */
7272 case FIX_TRUNC_EXPR:
7273 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7274 if (target == 0)
7275 target = gen_reg_rtx (mode);
7276 expand_fix (target, op0, unsignedp);
7277 return target;
7279 case FLOAT_EXPR:
7280 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7281 if (target == 0)
7282 target = gen_reg_rtx (mode);
7283 /* expand_float can't figure out what to do if FROM has VOIDmode.
7284 So give it the correct mode. With -O, cse will optimize this. */
7285 if (GET_MODE (op0) == VOIDmode)
7286 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7287 op0);
7288 expand_float (target, op0,
7289 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7290 return target;
7292 case NEGATE_EXPR:
7293 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7294 temp = expand_unop (mode, neg_optab, op0, target, 0);
7295 if (temp == 0)
7296 abort ();
7297 return temp;
7299 case ABS_EXPR:
7300 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7302 /* Handle complex values specially. */
7303 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7304 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7305 return expand_complex_abs (mode, op0, target, unsignedp);
7307 /* Unsigned abs is simply the operand. Testing here means we don't
7308 risk generating incorrect code below. */
7309 if (TREE_UNSIGNED (type))
7310 return op0;
7312 return expand_abs (mode, op0, target,
7313 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7315 case MAX_EXPR:
7316 case MIN_EXPR:
7317 target = original_target;
7318 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7319 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7320 || GET_MODE (target) != mode
7321 || (GET_CODE (target) == REG
7322 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7323 target = gen_reg_rtx (mode);
7324 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7325 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7327 /* First try to do it with a special MIN or MAX instruction.
7328 If that does not win, use a conditional jump to select the proper
7329 value. */
7330 this_optab = (TREE_UNSIGNED (type)
7331 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7332 : (code == MIN_EXPR ? smin_optab : smax_optab));
7334 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7335 OPTAB_WIDEN);
7336 if (temp != 0)
7337 return temp;
7339 /* At this point, a MEM target is no longer useful; we will get better
7340 code without it. */
7342 if (GET_CODE (target) == MEM)
7343 target = gen_reg_rtx (mode);
7345 if (target != op0)
7346 emit_move_insn (target, op0);
7348 op0 = gen_label_rtx ();
7350 /* If this mode is an integer too wide to compare properly,
7351 compare word by word. Rely on cse to optimize constant cases. */
7352 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7354 if (code == MAX_EXPR)
7355 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7356 target, op1, NULL_RTX, op0);
7357 else
7358 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7359 op1, target, NULL_RTX, op0);
7360 emit_move_insn (target, op1);
7362 else
7364 if (code == MAX_EXPR)
7365 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7366 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7367 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7368 else
7369 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7370 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7371 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7372 if (temp == const0_rtx)
7373 emit_move_insn (target, op1);
7374 else if (temp != const_true_rtx)
7376 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7377 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7378 else
7379 abort ();
7380 emit_move_insn (target, op1);
7383 emit_label (op0);
7384 return target;
7386 case BIT_NOT_EXPR:
7387 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7388 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7389 if (temp == 0)
7390 abort ();
7391 return temp;
7393 case FFS_EXPR:
7394 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7395 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7396 if (temp == 0)
7397 abort ();
7398 return temp;
7400 /* ??? Can optimize bitwise operations with one arg constant.
7401 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7402 and (a bitwise1 b) bitwise2 b (etc)
7403 but that is probably not worth while. */
7405 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7406 boolean values when we want in all cases to compute both of them. In
7407 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7408 as actual zero-or-1 values and then bitwise anding. In cases where
7409 there cannot be any side effects, better code would be made by
7410 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7411 how to recognize those cases. */
7413 case TRUTH_AND_EXPR:
7414 case BIT_AND_EXPR:
7415 this_optab = and_optab;
7416 goto binop;
7418 case TRUTH_OR_EXPR:
7419 case BIT_IOR_EXPR:
7420 this_optab = ior_optab;
7421 goto binop;
7423 case TRUTH_XOR_EXPR:
7424 case BIT_XOR_EXPR:
7425 this_optab = xor_optab;
7426 goto binop;
7428 case LSHIFT_EXPR:
7429 case RSHIFT_EXPR:
7430 case LROTATE_EXPR:
7431 case RROTATE_EXPR:
7432 preexpand_calls (exp);
7433 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7434 subtarget = 0;
7435 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7436 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7437 unsignedp);
7439 /* Could determine the answer when only additive constants differ. Also,
7440 the addition of one can be handled by changing the condition. */
7441 case LT_EXPR:
7442 case LE_EXPR:
7443 case GT_EXPR:
7444 case GE_EXPR:
7445 case EQ_EXPR:
7446 case NE_EXPR:
7447 preexpand_calls (exp);
7448 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7449 if (temp != 0)
7450 return temp;
7452 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7453 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7454 && original_target
7455 && GET_CODE (original_target) == REG
7456 && (GET_MODE (original_target)
7457 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7459 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7460 VOIDmode, 0);
7462 if (temp != original_target)
7463 temp = copy_to_reg (temp);
7465 op1 = gen_label_rtx ();
7466 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7467 GET_MODE (temp), unsignedp, 0, op1);
7468 emit_move_insn (temp, const1_rtx);
7469 emit_label (op1);
7470 return temp;
7473 /* If no set-flag instruction, must generate a conditional
7474 store into a temporary variable. Drop through
7475 and handle this like && and ||. */
7477 case TRUTH_ANDIF_EXPR:
7478 case TRUTH_ORIF_EXPR:
7479 if (! ignore
7480 && (target == 0 || ! safe_from_p (target, exp, 1)
7481 /* Make sure we don't have a hard reg (such as function's return
7482 value) live across basic blocks, if not optimizing. */
7483 || (!optimize && GET_CODE (target) == REG
7484 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7485 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7487 if (target)
7488 emit_clr_insn (target);
7490 op1 = gen_label_rtx ();
7491 jumpifnot (exp, op1);
7493 if (target)
7494 emit_0_to_1_insn (target);
7496 emit_label (op1);
7497 return ignore ? const0_rtx : target;
7499 case TRUTH_NOT_EXPR:
7500 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7501 /* The parser is careful to generate TRUTH_NOT_EXPR
7502 only with operands that are always zero or one. */
7503 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7504 target, 1, OPTAB_LIB_WIDEN);
7505 if (temp == 0)
7506 abort ();
7507 return temp;
7509 case COMPOUND_EXPR:
7510 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7511 emit_queue ();
7512 return expand_expr (TREE_OPERAND (exp, 1),
7513 (ignore ? const0_rtx : target),
7514 VOIDmode, 0);
7516 case COND_EXPR:
7517 /* If we would have a "singleton" (see below) were it not for a
7518 conversion in each arm, bring that conversion back out. */
7519 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7520 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7521 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7522 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7524 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7525 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7527 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7528 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7529 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7530 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7531 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7532 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7533 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7534 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7535 return expand_expr (build1 (NOP_EXPR, type,
7536 build (COND_EXPR, TREE_TYPE (true),
7537 TREE_OPERAND (exp, 0),
7538 true, false)),
7539 target, tmode, modifier);
7543 /* Note that COND_EXPRs whose type is a structure or union
7544 are required to be constructed to contain assignments of
7545 a temporary variable, so that we can evaluate them here
7546 for side effect only. If type is void, we must do likewise. */
7548 /* If an arm of the branch requires a cleanup,
7549 only that cleanup is performed. */
7551 tree singleton = 0;
7552 tree binary_op = 0, unary_op = 0;
7554 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7555 convert it to our mode, if necessary. */
7556 if (integer_onep (TREE_OPERAND (exp, 1))
7557 && integer_zerop (TREE_OPERAND (exp, 2))
7558 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7560 if (ignore)
7562 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7563 ro_modifier);
7564 return const0_rtx;
7567 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7568 if (GET_MODE (op0) == mode)
7569 return op0;
7571 if (target == 0)
7572 target = gen_reg_rtx (mode);
7573 convert_move (target, op0, unsignedp);
7574 return target;
7577 /* Check for X ? A + B : A. If we have this, we can copy A to the
7578 output and conditionally add B. Similarly for unary operations.
7579 Don't do this if X has side-effects because those side effects
7580 might affect A or B and the "?" operation is a sequence point in
7581 ANSI. (operand_equal_p tests for side effects.) */
7583 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7584 && operand_equal_p (TREE_OPERAND (exp, 2),
7585 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7586 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7587 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7588 && operand_equal_p (TREE_OPERAND (exp, 1),
7589 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7590 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7591 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7592 && operand_equal_p (TREE_OPERAND (exp, 2),
7593 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7594 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7595 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7596 && operand_equal_p (TREE_OPERAND (exp, 1),
7597 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7598 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7600 /* If we are not to produce a result, we have no target. Otherwise,
7601 if a target was specified use it; it will not be used as an
7602 intermediate target unless it is safe. If no target, use a
7603 temporary. */
7605 if (ignore)
7606 temp = 0;
7607 else if (original_target
7608 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7609 || (singleton && GET_CODE (original_target) == REG
7610 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7611 && original_target == var_rtx (singleton)))
7612 && GET_MODE (original_target) == mode
7613 #ifdef HAVE_conditional_move
7614 && (! can_conditionally_move_p (mode)
7615 || GET_CODE (original_target) == REG
7616 || TREE_ADDRESSABLE (type))
7617 #endif
7618 && ! (GET_CODE (original_target) == MEM
7619 && MEM_VOLATILE_P (original_target)))
7620 temp = original_target;
7621 else if (TREE_ADDRESSABLE (type))
7622 abort ();
7623 else
7624 temp = assign_temp (type, 0, 0, 1);
7626 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7627 do the test of X as a store-flag operation, do this as
7628 A + ((X != 0) << log C). Similarly for other simple binary
7629 operators. Only do for C == 1 if BRANCH_COST is low. */
7630 if (temp && singleton && binary_op
7631 && (TREE_CODE (binary_op) == PLUS_EXPR
7632 || TREE_CODE (binary_op) == MINUS_EXPR
7633 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7634 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7635 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7636 : integer_onep (TREE_OPERAND (binary_op, 1)))
7637 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7639 rtx result;
7640 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7641 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7642 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7643 : xor_optab);
7645 /* If we had X ? A : A + 1, do this as A + (X == 0).
7647 We have to invert the truth value here and then put it
7648 back later if do_store_flag fails. We cannot simply copy
7649 TREE_OPERAND (exp, 0) to another variable and modify that
7650 because invert_truthvalue can modify the tree pointed to
7651 by its argument. */
7652 if (singleton == TREE_OPERAND (exp, 1))
7653 TREE_OPERAND (exp, 0)
7654 = invert_truthvalue (TREE_OPERAND (exp, 0));
7656 result = do_store_flag (TREE_OPERAND (exp, 0),
7657 (safe_from_p (temp, singleton, 1)
7658 ? temp : NULL_RTX),
7659 mode, BRANCH_COST <= 1);
7661 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7662 result = expand_shift (LSHIFT_EXPR, mode, result,
7663 build_int_2 (tree_log2
7664 (TREE_OPERAND
7665 (binary_op, 1)),
7667 (safe_from_p (temp, singleton, 1)
7668 ? temp : NULL_RTX), 0);
7670 if (result)
7672 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7673 return expand_binop (mode, boptab, op1, result, temp,
7674 unsignedp, OPTAB_LIB_WIDEN);
7676 else if (singleton == TREE_OPERAND (exp, 1))
7677 TREE_OPERAND (exp, 0)
7678 = invert_truthvalue (TREE_OPERAND (exp, 0));
7681 do_pending_stack_adjust ();
7682 NO_DEFER_POP;
7683 op0 = gen_label_rtx ();
7685 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7687 if (temp != 0)
7689 /* If the target conflicts with the other operand of the
7690 binary op, we can't use it. Also, we can't use the target
7691 if it is a hard register, because evaluating the condition
7692 might clobber it. */
7693 if ((binary_op
7694 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7695 || (GET_CODE (temp) == REG
7696 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7697 temp = gen_reg_rtx (mode);
7698 store_expr (singleton, temp, 0);
7700 else
7701 expand_expr (singleton,
7702 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7703 if (singleton == TREE_OPERAND (exp, 1))
7704 jumpif (TREE_OPERAND (exp, 0), op0);
7705 else
7706 jumpifnot (TREE_OPERAND (exp, 0), op0);
7708 start_cleanup_deferral ();
7709 if (binary_op && temp == 0)
7710 /* Just touch the other operand. */
7711 expand_expr (TREE_OPERAND (binary_op, 1),
7712 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7713 else if (binary_op)
7714 store_expr (build (TREE_CODE (binary_op), type,
7715 make_tree (type, temp),
7716 TREE_OPERAND (binary_op, 1)),
7717 temp, 0);
7718 else
7719 store_expr (build1 (TREE_CODE (unary_op), type,
7720 make_tree (type, temp)),
7721 temp, 0);
7722 op1 = op0;
7724 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7725 comparison operator. If we have one of these cases, set the
7726 output to A, branch on A (cse will merge these two references),
7727 then set the output to FOO. */
7728 else if (temp
7729 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7730 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7731 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7732 TREE_OPERAND (exp, 1), 0)
7733 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7734 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7735 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7737 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7738 temp = gen_reg_rtx (mode);
7739 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7740 jumpif (TREE_OPERAND (exp, 0), op0);
7742 start_cleanup_deferral ();
7743 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7744 op1 = op0;
7746 else if (temp
7747 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7748 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7749 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7750 TREE_OPERAND (exp, 2), 0)
7751 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7752 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7753 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7755 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7756 temp = gen_reg_rtx (mode);
7757 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7758 jumpifnot (TREE_OPERAND (exp, 0), op0);
7760 start_cleanup_deferral ();
7761 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7762 op1 = op0;
7764 else
7766 op1 = gen_label_rtx ();
7767 jumpifnot (TREE_OPERAND (exp, 0), op0);
7769 start_cleanup_deferral ();
7770 if (temp != 0)
7771 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7772 else
7773 expand_expr (TREE_OPERAND (exp, 1),
7774 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7775 end_cleanup_deferral ();
7776 emit_queue ();
7777 emit_jump_insn (gen_jump (op1));
7778 emit_barrier ();
7779 emit_label (op0);
7780 start_cleanup_deferral ();
7781 if (temp != 0)
7782 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7783 else
7784 expand_expr (TREE_OPERAND (exp, 2),
7785 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7788 end_cleanup_deferral ();
7790 emit_queue ();
7791 emit_label (op1);
7792 OK_DEFER_POP;
7794 return temp;
7797 case TARGET_EXPR:
7799 /* Something needs to be initialized, but we didn't know
7800 where that thing was when building the tree. For example,
7801 it could be the return value of a function, or a parameter
7802 to a function which lays down in the stack, or a temporary
7803 variable which must be passed by reference.
7805 We guarantee that the expression will either be constructed
7806 or copied into our original target. */
7808 tree slot = TREE_OPERAND (exp, 0);
7809 tree cleanups = NULL_TREE;
7810 tree exp1;
7812 if (TREE_CODE (slot) != VAR_DECL)
7813 abort ();
7815 if (! ignore)
7816 target = original_target;
7818 if (target == 0)
7820 if (DECL_RTL (slot) != 0)
7822 target = DECL_RTL (slot);
7823 /* If we have already expanded the slot, so don't do
7824 it again. (mrs) */
7825 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7826 return target;
7828 else
7830 target = assign_temp (type, 2, 0, 1);
7831 /* All temp slots at this level must not conflict. */
7832 preserve_temp_slots (target);
7833 DECL_RTL (slot) = target;
7834 if (TREE_ADDRESSABLE (slot))
7836 TREE_ADDRESSABLE (slot) = 0;
7837 mark_addressable (slot);
7840 /* Since SLOT is not known to the called function
7841 to belong to its stack frame, we must build an explicit
7842 cleanup. This case occurs when we must build up a reference
7843 to pass the reference as an argument. In this case,
7844 it is very likely that such a reference need not be
7845 built here. */
7847 if (TREE_OPERAND (exp, 2) == 0)
7848 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7849 cleanups = TREE_OPERAND (exp, 2);
7852 else
7854 /* This case does occur, when expanding a parameter which
7855 needs to be constructed on the stack. The target
7856 is the actual stack address that we want to initialize.
7857 The function we call will perform the cleanup in this case. */
7859 /* If we have already assigned it space, use that space,
7860 not target that we were passed in, as our target
7861 parameter is only a hint. */
7862 if (DECL_RTL (slot) != 0)
7864 target = DECL_RTL (slot);
7865 /* If we have already expanded the slot, so don't do
7866 it again. (mrs) */
7867 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7868 return target;
7870 else
7872 DECL_RTL (slot) = target;
7873 /* If we must have an addressable slot, then make sure that
7874 the RTL that we just stored in slot is OK. */
7875 if (TREE_ADDRESSABLE (slot))
7877 TREE_ADDRESSABLE (slot) = 0;
7878 mark_addressable (slot);
7883 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7884 /* Mark it as expanded. */
7885 TREE_OPERAND (exp, 1) = NULL_TREE;
7887 TREE_USED (slot) = 1;
7888 store_expr (exp1, target, 0);
7890 expand_decl_cleanup (NULL_TREE, cleanups);
7892 return target;
7895 case INIT_EXPR:
7897 tree lhs = TREE_OPERAND (exp, 0);
7898 tree rhs = TREE_OPERAND (exp, 1);
7899 tree noncopied_parts = 0;
7900 tree lhs_type = TREE_TYPE (lhs);
7902 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7903 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7904 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7905 TYPE_NONCOPIED_PARTS (lhs_type));
7906 while (noncopied_parts != 0)
7908 expand_assignment (TREE_VALUE (noncopied_parts),
7909 TREE_PURPOSE (noncopied_parts), 0, 0);
7910 noncopied_parts = TREE_CHAIN (noncopied_parts);
7912 return temp;
7915 case MODIFY_EXPR:
7917 /* If lhs is complex, expand calls in rhs before computing it.
7918 That's so we don't compute a pointer and save it over a call.
7919 If lhs is simple, compute it first so we can give it as a
7920 target if the rhs is just a call. This avoids an extra temp and copy
7921 and that prevents a partial-subsumption which makes bad code.
7922 Actually we could treat component_ref's of vars like vars. */
7924 tree lhs = TREE_OPERAND (exp, 0);
7925 tree rhs = TREE_OPERAND (exp, 1);
7926 tree noncopied_parts = 0;
7927 tree lhs_type = TREE_TYPE (lhs);
7929 temp = 0;
7931 if (TREE_CODE (lhs) != VAR_DECL
7932 && TREE_CODE (lhs) != RESULT_DECL
7933 && TREE_CODE (lhs) != PARM_DECL
7934 && ! (TREE_CODE (lhs) == INDIRECT_REF
7935 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7936 preexpand_calls (exp);
7938 /* Check for |= or &= of a bitfield of size one into another bitfield
7939 of size 1. In this case, (unless we need the result of the
7940 assignment) we can do this more efficiently with a
7941 test followed by an assignment, if necessary.
7943 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7944 things change so we do, this code should be enhanced to
7945 support it. */
7946 if (ignore
7947 && TREE_CODE (lhs) == COMPONENT_REF
7948 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7949 || TREE_CODE (rhs) == BIT_AND_EXPR)
7950 && TREE_OPERAND (rhs, 0) == lhs
7951 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7952 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7953 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7955 rtx label = gen_label_rtx ();
7957 do_jump (TREE_OPERAND (rhs, 1),
7958 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7959 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7960 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7961 (TREE_CODE (rhs) == BIT_IOR_EXPR
7962 ? integer_one_node
7963 : integer_zero_node)),
7964 0, 0);
7965 do_pending_stack_adjust ();
7966 emit_label (label);
7967 return const0_rtx;
7970 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7971 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7972 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7973 TYPE_NONCOPIED_PARTS (lhs_type));
7975 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7976 while (noncopied_parts != 0)
7978 expand_assignment (TREE_PURPOSE (noncopied_parts),
7979 TREE_VALUE (noncopied_parts), 0, 0);
7980 noncopied_parts = TREE_CHAIN (noncopied_parts);
7982 return temp;
7985 case RETURN_EXPR:
7986 if (!TREE_OPERAND (exp, 0))
7987 expand_null_return ();
7988 else
7989 expand_return (TREE_OPERAND (exp, 0));
7990 return const0_rtx;
7992 case PREINCREMENT_EXPR:
7993 case PREDECREMENT_EXPR:
7994 return expand_increment (exp, 0, ignore);
7996 case POSTINCREMENT_EXPR:
7997 case POSTDECREMENT_EXPR:
7998 /* Faster to treat as pre-increment if result is not used. */
7999 return expand_increment (exp, ! ignore, ignore);
8001 case ADDR_EXPR:
8002 /* If nonzero, TEMP will be set to the address of something that might
8003 be a MEM corresponding to a stack slot. */
8004 temp = 0;
8006 /* Are we taking the address of a nested function? */
8007 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8008 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8009 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8010 && ! TREE_STATIC (exp))
8012 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8013 op0 = force_operand (op0, target);
8015 /* If we are taking the address of something erroneous, just
8016 return a zero. */
8017 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8018 return const0_rtx;
8019 else
8021 /* We make sure to pass const0_rtx down if we came in with
8022 ignore set, to avoid doing the cleanups twice for something. */
8023 op0 = expand_expr (TREE_OPERAND (exp, 0),
8024 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8025 (modifier == EXPAND_INITIALIZER
8026 ? modifier : EXPAND_CONST_ADDRESS));
8028 /* If we are going to ignore the result, OP0 will have been set
8029 to const0_rtx, so just return it. Don't get confused and
8030 think we are taking the address of the constant. */
8031 if (ignore)
8032 return op0;
8034 op0 = protect_from_queue (op0, 0);
8036 /* We would like the object in memory. If it is a constant,
8037 we can have it be statically allocated into memory. For
8038 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
8039 memory and store the value into it. */
8041 if (CONSTANT_P (op0))
8042 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8043 op0);
8044 else if (GET_CODE (op0) == MEM)
8046 mark_temp_addr_taken (op0);
8047 temp = XEXP (op0, 0);
8050 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8051 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8053 /* If this object is in a register, it must be not
8054 be BLKmode. */
8055 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8056 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8058 mark_temp_addr_taken (memloc);
8059 emit_move_insn (memloc, op0);
8060 op0 = memloc;
8063 if (GET_CODE (op0) != MEM)
8064 abort ();
8066 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8068 temp = XEXP (op0, 0);
8069 #ifdef POINTERS_EXTEND_UNSIGNED
8070 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8071 && mode == ptr_mode)
8072 temp = convert_memory_address (ptr_mode, temp);
8073 #endif
8074 return temp;
8077 op0 = force_operand (XEXP (op0, 0), target);
8080 if (flag_force_addr && GET_CODE (op0) != REG)
8081 op0 = force_reg (Pmode, op0);
8083 if (GET_CODE (op0) == REG
8084 && ! REG_USERVAR_P (op0))
8085 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8087 /* If we might have had a temp slot, add an equivalent address
8088 for it. */
8089 if (temp != 0)
8090 update_temp_slot_address (temp, op0);
8092 #ifdef POINTERS_EXTEND_UNSIGNED
8093 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8094 && mode == ptr_mode)
8095 op0 = convert_memory_address (ptr_mode, op0);
8096 #endif
8098 return op0;
8100 case ENTRY_VALUE_EXPR:
8101 abort ();
8103 /* COMPLEX type for Extended Pascal & Fortran */
8104 case COMPLEX_EXPR:
8106 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8107 rtx insns;
8109 /* Get the rtx code of the operands. */
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8111 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8113 if (! target)
8114 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8116 start_sequence ();
8118 /* Move the real (op0) and imaginary (op1) parts to their location. */
8119 emit_move_insn (gen_realpart (mode, target), op0);
8120 emit_move_insn (gen_imagpart (mode, target), op1);
8122 insns = get_insns ();
8123 end_sequence ();
8125 /* Complex construction should appear as a single unit. */
8126 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8127 each with a separate pseudo as destination.
8128 It's not correct for flow to treat them as a unit. */
8129 if (GET_CODE (target) != CONCAT)
8130 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8131 else
8132 emit_insns (insns);
8134 return target;
8137 case REALPART_EXPR:
8138 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8139 return gen_realpart (mode, op0);
8141 case IMAGPART_EXPR:
8142 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8143 return gen_imagpart (mode, op0);
8145 case CONJ_EXPR:
8147 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8148 rtx imag_t;
8149 rtx insns;
8151 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8153 if (! target)
8154 target = gen_reg_rtx (mode);
8156 start_sequence ();
8158 /* Store the realpart and the negated imagpart to target. */
8159 emit_move_insn (gen_realpart (partmode, target),
8160 gen_realpart (partmode, op0));
8162 imag_t = gen_imagpart (partmode, target);
8163 temp = expand_unop (partmode, neg_optab,
8164 gen_imagpart (partmode, op0), imag_t, 0);
8165 if (temp != imag_t)
8166 emit_move_insn (imag_t, temp);
8168 insns = get_insns ();
8169 end_sequence ();
8171 /* Conjugate should appear as a single unit
8172 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8173 each with a separate pseudo as destination.
8174 It's not correct for flow to treat them as a unit. */
8175 if (GET_CODE (target) != CONCAT)
8176 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8177 else
8178 emit_insns (insns);
8180 return target;
8183 case TRY_CATCH_EXPR:
8185 tree handler = TREE_OPERAND (exp, 1);
8187 expand_eh_region_start ();
8189 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8191 expand_eh_region_end (handler);
8193 return op0;
8196 case TRY_FINALLY_EXPR:
8198 tree try_block = TREE_OPERAND (exp, 0);
8199 tree finally_block = TREE_OPERAND (exp, 1);
8200 rtx finally_label = gen_label_rtx ();
8201 rtx done_label = gen_label_rtx ();
8202 rtx return_link = gen_reg_rtx (Pmode);
8203 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8204 (tree) finally_label, (tree) return_link);
8205 TREE_SIDE_EFFECTS (cleanup) = 1;
8207 /* Start a new binding layer that will keep track of all cleanup
8208 actions to be performed. */
8209 expand_start_bindings (0);
8211 target_temp_slot_level = temp_slot_level;
8213 expand_decl_cleanup (NULL_TREE, cleanup);
8214 op0 = expand_expr (try_block, target, tmode, modifier);
8216 preserve_temp_slots (op0);
8217 expand_end_bindings (NULL_TREE, 0, 0);
8218 emit_jump (done_label);
8219 emit_label (finally_label);
8220 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8221 emit_indirect_jump (return_link);
8222 emit_label (done_label);
8223 return op0;
8226 case GOTO_SUBROUTINE_EXPR:
8228 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8229 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8230 rtx return_address = gen_label_rtx ();
8231 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8232 emit_jump (subr);
8233 emit_label (return_address);
8234 return const0_rtx;
8237 case POPDCC_EXPR:
8239 rtx dcc = get_dynamic_cleanup_chain ();
8240 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8241 return const0_rtx;
8244 case POPDHC_EXPR:
8246 rtx dhc = get_dynamic_handler_chain ();
8247 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8248 return const0_rtx;
8251 default:
8252 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8255 /* Here to do an ordinary binary operator, generating an instruction
8256 from the optab already placed in `this_optab'. */
8257 binop:
8258 preexpand_calls (exp);
8259 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8260 subtarget = 0;
8261 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8262 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8263 binop2:
8264 temp = expand_binop (mode, this_optab, op0, op1, target,
8265 unsignedp, OPTAB_LIB_WIDEN);
8266 if (temp == 0)
8267 abort ();
8268 return temp;
8273 /* Return the alignment in bits of EXP, a pointer valued expression.
8274 But don't return more than MAX_ALIGN no matter what.
8275 The alignment returned is, by default, the alignment of the thing that
8276 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8278 Otherwise, look at the expression to see if we can do better, i.e., if the
8279 expression is actually pointing at an object whose alignment is tighter. */
8281 static int
8282 get_pointer_alignment (exp, max_align)
8283 tree exp;
8284 unsigned max_align;
8286 unsigned align, inner;
8288 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8289 return 0;
8291 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8292 align = MIN (align, max_align);
8294 while (1)
8296 switch (TREE_CODE (exp))
8298 case NOP_EXPR:
8299 case CONVERT_EXPR:
8300 case NON_LVALUE_EXPR:
8301 exp = TREE_OPERAND (exp, 0);
8302 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8303 return align;
8304 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8305 align = MIN (inner, max_align);
8306 break;
8308 case PLUS_EXPR:
8309 /* If sum of pointer + int, restrict our maximum alignment to that
8310 imposed by the integer. If not, we can't do any better than
8311 ALIGN. */
8312 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8313 return align;
8315 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8316 & (max_align - 1))
8317 != 0)
8318 max_align >>= 1;
8320 exp = TREE_OPERAND (exp, 0);
8321 break;
8323 case ADDR_EXPR:
8324 /* See what we are pointing at and look at its alignment. */
8325 exp = TREE_OPERAND (exp, 0);
8326 if (TREE_CODE (exp) == FUNCTION_DECL)
8327 align = FUNCTION_BOUNDARY;
8328 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8329 align = DECL_ALIGN (exp);
8330 #ifdef CONSTANT_ALIGNMENT
8331 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8332 align = CONSTANT_ALIGNMENT (exp, align);
8333 #endif
8334 return MIN (align, max_align);
8336 default:
8337 return align;
8342 /* Return the tree node and offset if a given argument corresponds to
8343 a string constant. */
8345 static tree
8346 string_constant (arg, ptr_offset)
8347 tree arg;
8348 tree *ptr_offset;
8350 STRIP_NOPS (arg);
8352 if (TREE_CODE (arg) == ADDR_EXPR
8353 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8355 *ptr_offset = integer_zero_node;
8356 return TREE_OPERAND (arg, 0);
8358 else if (TREE_CODE (arg) == PLUS_EXPR)
8360 tree arg0 = TREE_OPERAND (arg, 0);
8361 tree arg1 = TREE_OPERAND (arg, 1);
8363 STRIP_NOPS (arg0);
8364 STRIP_NOPS (arg1);
8366 if (TREE_CODE (arg0) == ADDR_EXPR
8367 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8369 *ptr_offset = arg1;
8370 return TREE_OPERAND (arg0, 0);
8372 else if (TREE_CODE (arg1) == ADDR_EXPR
8373 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8375 *ptr_offset = arg0;
8376 return TREE_OPERAND (arg1, 0);
8380 return 0;
8383 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8384 way, because it could contain a zero byte in the middle.
8385 TREE_STRING_LENGTH is the size of the character array, not the string.
8387 Unfortunately, string_constant can't access the values of const char
8388 arrays with initializers, so neither can we do so here. */
8390 static tree
8391 c_strlen (src)
8392 tree src;
8394 tree offset_node;
8395 int offset, max;
8396 char *ptr;
8398 src = string_constant (src, &offset_node);
8399 if (src == 0)
8400 return 0;
8401 max = TREE_STRING_LENGTH (src);
8402 ptr = TREE_STRING_POINTER (src);
8403 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8405 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8406 compute the offset to the following null if we don't know where to
8407 start searching for it. */
8408 int i;
8409 for (i = 0; i < max; i++)
8410 if (ptr[i] == 0)
8411 return 0;
8412 /* We don't know the starting offset, but we do know that the string
8413 has no internal zero bytes. We can assume that the offset falls
8414 within the bounds of the string; otherwise, the programmer deserves
8415 what he gets. Subtract the offset from the length of the string,
8416 and return that. */
8417 /* This would perhaps not be valid if we were dealing with named
8418 arrays in addition to literal string constants. */
8419 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8422 /* We have a known offset into the string. Start searching there for
8423 a null character. */
8424 if (offset_node == 0)
8425 offset = 0;
8426 else
8428 /* Did we get a long long offset? If so, punt. */
8429 if (TREE_INT_CST_HIGH (offset_node) != 0)
8430 return 0;
8431 offset = TREE_INT_CST_LOW (offset_node);
8433 /* If the offset is known to be out of bounds, warn, and call strlen at
8434 runtime. */
8435 if (offset < 0 || offset > max)
8437 warning ("offset outside bounds of constant string");
8438 return 0;
8440 /* Use strlen to search for the first zero byte. Since any strings
8441 constructed with build_string will have nulls appended, we win even
8442 if we get handed something like (char[4])"abcd".
8444 Since OFFSET is our starting index into the string, no further
8445 calculation is needed. */
8446 return size_int (strlen (ptr + offset));
8450 expand_builtin_return_addr (fndecl_code, count, tem)
8451 enum built_in_function fndecl_code;
8452 int count;
8453 rtx tem;
8455 int i;
8457 /* Some machines need special handling before we can access
8458 arbitrary frames. For example, on the sparc, we must first flush
8459 all register windows to the stack. */
8460 #ifdef SETUP_FRAME_ADDRESSES
8461 if (count > 0)
8462 SETUP_FRAME_ADDRESSES ();
8463 #endif
8465 /* On the sparc, the return address is not in the frame, it is in a
8466 register. There is no way to access it off of the current frame
8467 pointer, but it can be accessed off the previous frame pointer by
8468 reading the value from the register window save area. */
8469 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8470 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8471 count--;
8472 #endif
8474 /* Scan back COUNT frames to the specified frame. */
8475 for (i = 0; i < count; i++)
8477 /* Assume the dynamic chain pointer is in the word that the
8478 frame address points to, unless otherwise specified. */
8479 #ifdef DYNAMIC_CHAIN_ADDRESS
8480 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8481 #endif
8482 tem = memory_address (Pmode, tem);
8483 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8486 /* For __builtin_frame_address, return what we've got. */
8487 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8488 return tem;
8490 /* For __builtin_return_address, Get the return address from that
8491 frame. */
8492 #ifdef RETURN_ADDR_RTX
8493 tem = RETURN_ADDR_RTX (count, tem);
8494 #else
8495 tem = memory_address (Pmode,
8496 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8497 tem = gen_rtx_MEM (Pmode, tem);
8498 #endif
8499 return tem;
8502 /* __builtin_setjmp is passed a pointer to an array of five words (not
8503 all will be used on all machines). It operates similarly to the C
8504 library function of the same name, but is more efficient. Much of
8505 the code below (and for longjmp) is copied from the handling of
8506 non-local gotos.
8508 NOTE: This is intended for use by GNAT and the exception handling
8509 scheme in the compiler and will only work in the method used by
8510 them. */
8513 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8514 rtx buf_addr;
8515 rtx target;
8516 rtx first_label, next_label;
8518 rtx lab1 = gen_label_rtx ();
8519 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8520 enum machine_mode value_mode;
8521 rtx stack_save;
8523 value_mode = TYPE_MODE (integer_type_node);
8525 #ifdef POINTERS_EXTEND_UNSIGNED
8526 buf_addr = convert_memory_address (Pmode, buf_addr);
8527 #endif
8529 buf_addr = force_reg (Pmode, buf_addr);
8531 if (target == 0 || GET_CODE (target) != REG
8532 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8533 target = gen_reg_rtx (value_mode);
8535 emit_queue ();
8537 /* We store the frame pointer and the address of lab1 in the buffer
8538 and use the rest of it for the stack save area, which is
8539 machine-dependent. */
8541 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8542 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8543 #endif
8545 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8546 BUILTIN_SETJMP_FRAME_VALUE);
8547 emit_move_insn (validize_mem
8548 (gen_rtx_MEM (Pmode,
8549 plus_constant (buf_addr,
8550 GET_MODE_SIZE (Pmode)))),
8551 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1)));
8553 stack_save = gen_rtx_MEM (sa_mode,
8554 plus_constant (buf_addr,
8555 2 * GET_MODE_SIZE (Pmode)));
8556 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8558 /* If there is further processing to do, do it. */
8559 #ifdef HAVE_builtin_setjmp_setup
8560 if (HAVE_builtin_setjmp_setup)
8561 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8562 #endif
8564 /* Set TARGET to zero and branch to the first-time-through label. */
8565 emit_move_insn (target, const0_rtx);
8566 emit_jump_insn (gen_jump (first_label));
8567 emit_barrier ();
8568 emit_label (lab1);
8570 /* Tell flow about the strange goings on. Putting `lab1' on
8571 `nonlocal_goto_handler_labels' to indicates that function
8572 calls may traverse the arc back to this label. */
8574 current_function_has_nonlocal_label = 1;
8575 nonlocal_goto_handler_labels =
8576 gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels);
8578 /* Clobber the FP when we get here, so we have to make sure it's
8579 marked as used by this function. */
8580 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8582 /* Mark the static chain as clobbered here so life information
8583 doesn't get messed up for it. */
8584 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8586 /* Now put in the code to restore the frame pointer, and argument
8587 pointer, if needed. The code below is from expand_end_bindings
8588 in stmt.c; see detailed documentation there. */
8589 #ifdef HAVE_nonlocal_goto
8590 if (! HAVE_nonlocal_goto)
8591 #endif
8592 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8594 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8595 if (fixed_regs[ARG_POINTER_REGNUM])
8597 #ifdef ELIMINABLE_REGS
8598 size_t i;
8599 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8601 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8602 if (elim_regs[i].from == ARG_POINTER_REGNUM
8603 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8604 break;
8606 if (i == sizeof elim_regs / sizeof elim_regs [0])
8607 #endif
8609 /* Now restore our arg pointer from the address at which it
8610 was saved in our stack frame.
8611 If there hasn't be space allocated for it yet, make
8612 some now. */
8613 if (arg_pointer_save_area == 0)
8614 arg_pointer_save_area
8615 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8616 emit_move_insn (virtual_incoming_args_rtx,
8617 copy_to_reg (arg_pointer_save_area));
8620 #endif
8622 #ifdef HAVE_builtin_setjmp_receiver
8623 if (HAVE_builtin_setjmp_receiver)
8624 emit_insn (gen_builtin_setjmp_receiver (lab1));
8625 else
8626 #endif
8627 #ifdef HAVE_nonlocal_goto_receiver
8628 if (HAVE_nonlocal_goto_receiver)
8629 emit_insn (gen_nonlocal_goto_receiver ());
8630 else
8631 #endif
8633 ; /* Nothing */
8636 /* Set TARGET, and branch to the next-time-through label. */
8637 emit_move_insn (target, const1_rtx);
8638 emit_jump_insn (gen_jump (next_label));
8639 emit_barrier ();
8641 return target;
8644 void
8645 expand_builtin_longjmp (buf_addr, value)
8646 rtx buf_addr, value;
8648 rtx fp, lab, stack;
8649 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8651 #ifdef POINTERS_EXTEND_UNSIGNED
8652 buf_addr = convert_memory_address (Pmode, buf_addr);
8653 #endif
8654 buf_addr = force_reg (Pmode, buf_addr);
8656 /* We used to store value in static_chain_rtx, but that fails if pointers
8657 are smaller than integers. We instead require that the user must pass
8658 a second argument of 1, because that is what builtin_setjmp will
8659 return. This also makes EH slightly more efficient, since we are no
8660 longer copying around a value that we don't care about. */
8661 if (value != const1_rtx)
8662 abort ();
8664 #ifdef HAVE_builtin_longjmp
8665 if (HAVE_builtin_longjmp)
8666 emit_insn (gen_builtin_longjmp (buf_addr));
8667 else
8668 #endif
8670 fp = gen_rtx_MEM (Pmode, buf_addr);
8671 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8672 GET_MODE_SIZE (Pmode)));
8674 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8675 2 * GET_MODE_SIZE (Pmode)));
8677 /* Pick up FP, label, and SP from the block and jump. This code is
8678 from expand_goto in stmt.c; see there for detailed comments. */
8679 #if HAVE_nonlocal_goto
8680 if (HAVE_nonlocal_goto)
8681 /* We have to pass a value to the nonlocal_goto pattern that will
8682 get copied into the static_chain pointer, but it does not matter
8683 what that value is, because builtin_setjmp does not use it. */
8684 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8685 else
8686 #endif
8688 lab = copy_to_reg (lab);
8690 emit_move_insn (hard_frame_pointer_rtx, fp);
8691 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8693 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8694 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8695 emit_indirect_jump (lab);
8700 static rtx
8701 get_memory_rtx (exp)
8702 tree exp;
8704 rtx mem;
8705 int is_aggregate;
8707 mem = gen_rtx_MEM (BLKmode,
8708 memory_address (BLKmode,
8709 expand_expr (exp, NULL_RTX,
8710 ptr_mode, EXPAND_SUM)));
8712 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8714 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8715 if the value is the address of a structure or if the expression is
8716 cast to a pointer to structure type. */
8717 is_aggregate = 0;
8719 while (TREE_CODE (exp) == NOP_EXPR)
8721 tree cast_type = TREE_TYPE (exp);
8722 if (TREE_CODE (cast_type) == POINTER_TYPE
8723 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8725 is_aggregate = 1;
8726 break;
8728 exp = TREE_OPERAND (exp, 0);
8731 if (is_aggregate == 0)
8733 tree type;
8735 if (TREE_CODE (exp) == ADDR_EXPR)
8736 /* If this is the address of an object, check whether the
8737 object is an array. */
8738 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8739 else
8740 type = TREE_TYPE (TREE_TYPE (exp));
8741 is_aggregate = AGGREGATE_TYPE_P (type);
8744 MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8745 return mem;
8749 /* Expand an expression EXP that calls a built-in function,
8750 with result going to TARGET if that's convenient
8751 (and in mode MODE if that's convenient).
8752 SUBTARGET may be used as the target for computing one of EXP's operands.
8753 IGNORE is nonzero if the value is to be ignored. */
8755 #define CALLED_AS_BUILT_IN(NODE) \
8756 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8758 static rtx
8759 expand_builtin (exp, target, subtarget, mode, ignore)
8760 tree exp;
8761 rtx target;
8762 rtx subtarget;
8763 enum machine_mode mode;
8764 int ignore;
8766 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8767 tree arglist = TREE_OPERAND (exp, 1);
8768 rtx op0;
8769 rtx lab1, insns;
8770 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8771 optab builtin_optab;
8773 switch (DECL_FUNCTION_CODE (fndecl))
8775 case BUILT_IN_ABS:
8776 case BUILT_IN_LABS:
8777 case BUILT_IN_FABS:
8778 /* build_function_call changes these into ABS_EXPR. */
8779 abort ();
8781 case BUILT_IN_SIN:
8782 case BUILT_IN_COS:
8783 /* Treat these like sqrt, but only if the user asks for them. */
8784 if (! flag_fast_math)
8785 break;
8786 case BUILT_IN_FSQRT:
8787 /* If not optimizing, call the library function. */
8788 if (! optimize)
8789 break;
8791 if (arglist == 0
8792 /* Arg could be wrong type if user redeclared this fcn wrong. */
8793 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8794 break;
8796 /* Stabilize and compute the argument. */
8797 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8798 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8800 exp = copy_node (exp);
8801 arglist = copy_node (arglist);
8802 TREE_OPERAND (exp, 1) = arglist;
8803 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8805 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8807 /* Make a suitable register to place result in. */
8808 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8810 emit_queue ();
8811 start_sequence ();
8813 switch (DECL_FUNCTION_CODE (fndecl))
8815 case BUILT_IN_SIN:
8816 builtin_optab = sin_optab; break;
8817 case BUILT_IN_COS:
8818 builtin_optab = cos_optab; break;
8819 case BUILT_IN_FSQRT:
8820 builtin_optab = sqrt_optab; break;
8821 default:
8822 abort ();
8825 /* Compute into TARGET.
8826 Set TARGET to wherever the result comes back. */
8827 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8828 builtin_optab, op0, target, 0);
8830 /* If we were unable to expand via the builtin, stop the
8831 sequence (without outputting the insns) and break, causing
8832 a call to the library function. */
8833 if (target == 0)
8835 end_sequence ();
8836 break;
8839 /* Check the results by default. But if flag_fast_math is turned on,
8840 then assume sqrt will always be called with valid arguments. */
8842 if (flag_errno_math && ! flag_fast_math)
8844 /* Don't define the builtin FP instructions
8845 if your machine is not IEEE. */
8846 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8847 abort ();
8849 lab1 = gen_label_rtx ();
8851 /* Test the result; if it is NaN, set errno=EDOM because
8852 the argument was not in the domain. */
8853 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8854 0, 0, lab1);
8856 #ifdef TARGET_EDOM
8858 #ifdef GEN_ERRNO_RTX
8859 rtx errno_rtx = GEN_ERRNO_RTX;
8860 #else
8861 rtx errno_rtx
8862 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8863 #endif
8865 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8867 #else
8868 /* We can't set errno=EDOM directly; let the library call do it.
8869 Pop the arguments right away in case the call gets deleted. */
8870 NO_DEFER_POP;
8871 expand_call (exp, target, 0);
8872 OK_DEFER_POP;
8873 #endif
8875 emit_label (lab1);
8878 /* Output the entire sequence. */
8879 insns = get_insns ();
8880 end_sequence ();
8881 emit_insns (insns);
8883 return target;
8885 case BUILT_IN_FMOD:
8886 break;
8888 /* __builtin_apply_args returns block of memory allocated on
8889 the stack into which is stored the arg pointer, structure
8890 value address, static chain, and all the registers that might
8891 possibly be used in performing a function call. The code is
8892 moved to the start of the function so the incoming values are
8893 saved. */
8894 case BUILT_IN_APPLY_ARGS:
8895 /* Don't do __builtin_apply_args more than once in a function.
8896 Save the result of the first call and reuse it. */
8897 if (apply_args_value != 0)
8898 return apply_args_value;
8900 /* When this function is called, it means that registers must be
8901 saved on entry to this function. So we migrate the
8902 call to the first insn of this function. */
8903 rtx temp;
8904 rtx seq;
8906 start_sequence ();
8907 temp = expand_builtin_apply_args ();
8908 seq = get_insns ();
8909 end_sequence ();
8911 apply_args_value = temp;
8913 /* Put the sequence after the NOTE that starts the function.
8914 If this is inside a SEQUENCE, make the outer-level insn
8915 chain current, so the code is placed at the start of the
8916 function. */
8917 push_topmost_sequence ();
8918 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8919 pop_topmost_sequence ();
8920 return temp;
8923 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8924 FUNCTION with a copy of the parameters described by
8925 ARGUMENTS, and ARGSIZE. It returns a block of memory
8926 allocated on the stack into which is stored all the registers
8927 that might possibly be used for returning the result of a
8928 function. ARGUMENTS is the value returned by
8929 __builtin_apply_args. ARGSIZE is the number of bytes of
8930 arguments that must be copied. ??? How should this value be
8931 computed? We'll also need a safe worst case value for varargs
8932 functions. */
8933 case BUILT_IN_APPLY:
8934 if (arglist == 0
8935 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8936 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8937 || TREE_CHAIN (arglist) == 0
8938 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8939 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8940 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8941 return const0_rtx;
8942 else
8944 int i;
8945 tree t;
8946 rtx ops[3];
8948 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8949 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8951 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8954 /* __builtin_return (RESULT) causes the function to return the
8955 value described by RESULT. RESULT is address of the block of
8956 memory returned by __builtin_apply. */
8957 case BUILT_IN_RETURN:
8958 if (arglist
8959 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8960 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8961 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8962 NULL_RTX, VOIDmode, 0));
8963 return const0_rtx;
8965 case BUILT_IN_SAVEREGS:
8966 /* Don't do __builtin_saveregs more than once in a function.
8967 Save the result of the first call and reuse it. */
8968 if (saveregs_value != 0)
8969 return saveregs_value;
8971 /* When this function is called, it means that registers must be
8972 saved on entry to this function. So we migrate the
8973 call to the first insn of this function. */
8974 rtx temp;
8975 rtx seq;
8977 /* Now really call the function. `expand_call' does not call
8978 expand_builtin, so there is no danger of infinite recursion here. */
8979 start_sequence ();
8981 #ifdef EXPAND_BUILTIN_SAVEREGS
8982 /* Do whatever the machine needs done in this case. */
8983 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8984 #else
8985 /* The register where the function returns its value
8986 is likely to have something else in it, such as an argument.
8987 So preserve that register around the call. */
8989 if (value_mode != VOIDmode)
8991 rtx valreg = hard_libcall_value (value_mode);
8992 rtx saved_valreg = gen_reg_rtx (value_mode);
8994 emit_move_insn (saved_valreg, valreg);
8995 temp = expand_call (exp, target, ignore);
8996 emit_move_insn (valreg, saved_valreg);
8998 else
8999 /* Generate the call, putting the value in a pseudo. */
9000 temp = expand_call (exp, target, ignore);
9001 #endif
9003 seq = get_insns ();
9004 end_sequence ();
9006 saveregs_value = temp;
9008 /* Put the sequence after the NOTE that starts the function.
9009 If this is inside a SEQUENCE, make the outer-level insn
9010 chain current, so the code is placed at the start of the
9011 function. */
9012 push_topmost_sequence ();
9013 emit_insns_before (seq, NEXT_INSN (get_insns ()));
9014 pop_topmost_sequence ();
9015 return temp;
9018 /* __builtin_args_info (N) returns word N of the arg space info
9019 for the current function. The number and meanings of words
9020 is controlled by the definition of CUMULATIVE_ARGS. */
9021 case BUILT_IN_ARGS_INFO:
9023 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
9024 int *word_ptr = (int *) &current_function_args_info;
9025 #if 0
9026 /* These are used by the code below that is if 0'ed away */
9027 int i;
9028 tree type, elts, result;
9029 #endif
9031 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
9032 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
9033 __FILE__, __LINE__);
9035 if (arglist != 0)
9037 tree arg = TREE_VALUE (arglist);
9038 if (TREE_CODE (arg) != INTEGER_CST)
9039 error ("argument of `__builtin_args_info' must be constant");
9040 else
9042 int wordnum = TREE_INT_CST_LOW (arg);
9044 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
9045 error ("argument of `__builtin_args_info' out of range");
9046 else
9047 return GEN_INT (word_ptr[wordnum]);
9050 else
9051 error ("missing argument in `__builtin_args_info'");
9053 return const0_rtx;
9055 #if 0
9056 for (i = 0; i < nwords; i++)
9057 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
9059 type = build_array_type (integer_type_node,
9060 build_index_type (build_int_2 (nwords, 0)));
9061 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
9062 TREE_CONSTANT (result) = 1;
9063 TREE_STATIC (result) = 1;
9064 result = build (INDIRECT_REF, build_pointer_type (type), result);
9065 TREE_CONSTANT (result) = 1;
9066 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
9067 #endif
9070 /* Return the address of the first anonymous stack arg. */
9071 case BUILT_IN_NEXT_ARG:
9073 tree fntype = TREE_TYPE (current_function_decl);
9075 if ((TYPE_ARG_TYPES (fntype) == 0
9076 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
9077 == void_type_node))
9078 && ! current_function_varargs)
9080 error ("`va_start' used in function with fixed args");
9081 return const0_rtx;
9084 if (arglist)
9086 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9087 tree arg = TREE_VALUE (arglist);
9089 /* Strip off all nops for the sake of the comparison. This
9090 is not quite the same as STRIP_NOPS. It does more.
9091 We must also strip off INDIRECT_EXPR for C++ reference
9092 parameters. */
9093 while (TREE_CODE (arg) == NOP_EXPR
9094 || TREE_CODE (arg) == CONVERT_EXPR
9095 || TREE_CODE (arg) == NON_LVALUE_EXPR
9096 || TREE_CODE (arg) == INDIRECT_REF)
9097 arg = TREE_OPERAND (arg, 0);
9098 if (arg != last_parm)
9099 warning ("second parameter of `va_start' not last named argument");
9101 else if (! current_function_varargs)
9102 /* Evidently an out of date version of <stdarg.h>; can't validate
9103 va_start's second argument, but can still work as intended. */
9104 warning ("`__builtin_next_arg' called without an argument");
9107 return expand_binop (Pmode, add_optab,
9108 current_function_internal_arg_pointer,
9109 current_function_arg_offset_rtx,
9110 NULL_RTX, 0, OPTAB_LIB_WIDEN);
9112 case BUILT_IN_CLASSIFY_TYPE:
9113 if (arglist != 0)
9115 tree type = TREE_TYPE (TREE_VALUE (arglist));
9116 enum tree_code code = TREE_CODE (type);
9117 if (code == VOID_TYPE)
9118 return GEN_INT (void_type_class);
9119 if (code == INTEGER_TYPE)
9120 return GEN_INT (integer_type_class);
9121 if (code == CHAR_TYPE)
9122 return GEN_INT (char_type_class);
9123 if (code == ENUMERAL_TYPE)
9124 return GEN_INT (enumeral_type_class);
9125 if (code == BOOLEAN_TYPE)
9126 return GEN_INT (boolean_type_class);
9127 if (code == POINTER_TYPE)
9128 return GEN_INT (pointer_type_class);
9129 if (code == REFERENCE_TYPE)
9130 return GEN_INT (reference_type_class);
9131 if (code == OFFSET_TYPE)
9132 return GEN_INT (offset_type_class);
9133 if (code == REAL_TYPE)
9134 return GEN_INT (real_type_class);
9135 if (code == COMPLEX_TYPE)
9136 return GEN_INT (complex_type_class);
9137 if (code == FUNCTION_TYPE)
9138 return GEN_INT (function_type_class);
9139 if (code == METHOD_TYPE)
9140 return GEN_INT (method_type_class);
9141 if (code == RECORD_TYPE)
9142 return GEN_INT (record_type_class);
9143 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9144 return GEN_INT (union_type_class);
9145 if (code == ARRAY_TYPE)
9147 if (TYPE_STRING_FLAG (type))
9148 return GEN_INT (string_type_class);
9149 else
9150 return GEN_INT (array_type_class);
9152 if (code == SET_TYPE)
9153 return GEN_INT (set_type_class);
9154 if (code == FILE_TYPE)
9155 return GEN_INT (file_type_class);
9156 if (code == LANG_TYPE)
9157 return GEN_INT (lang_type_class);
9159 return GEN_INT (no_type_class);
9161 case BUILT_IN_CONSTANT_P:
9162 if (arglist == 0)
9163 return const0_rtx;
9164 else
9166 tree arg = TREE_VALUE (arglist);
9167 rtx tmp;
9169 /* We return 1 for a numeric type that's known to be a constant
9170 value at compile-time or for an aggregate type that's a
9171 literal constant. */
9172 STRIP_NOPS (arg);
9174 /* If we know this is a constant, emit the constant of one. */
9175 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9176 || (TREE_CODE (arg) == CONSTRUCTOR
9177 && TREE_CONSTANT (arg))
9178 || (TREE_CODE (arg) == ADDR_EXPR
9179 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9180 return const1_rtx;
9182 /* If we aren't going to be running CSE or this expression
9183 has side effects, show we don't know it to be a constant.
9184 Likewise if it's a pointer or aggregate type since in those
9185 case we only want literals, since those are only optimized
9186 when generating RTL, not later. */
9187 if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9188 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9189 || POINTER_TYPE_P (TREE_TYPE (arg)))
9190 return const0_rtx;
9192 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9193 chance to see if it can deduce whether ARG is constant. */
9195 tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9196 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9197 return tmp;
9200 case BUILT_IN_FRAME_ADDRESS:
9201 /* The argument must be a nonnegative integer constant.
9202 It counts the number of frames to scan up the stack.
9203 The value is the address of that frame. */
9204 case BUILT_IN_RETURN_ADDRESS:
9205 /* The argument must be a nonnegative integer constant.
9206 It counts the number of frames to scan up the stack.
9207 The value is the return address saved in that frame. */
9208 if (arglist == 0)
9209 /* Warning about missing arg was already issued. */
9210 return const0_rtx;
9211 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9212 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9214 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9215 error ("invalid arg to `__builtin_frame_address'");
9216 else
9217 error ("invalid arg to `__builtin_return_address'");
9218 return const0_rtx;
9220 else
9222 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9223 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9224 hard_frame_pointer_rtx);
9226 /* Some ports cannot access arbitrary stack frames. */
9227 if (tem == NULL)
9229 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9230 warning ("unsupported arg to `__builtin_frame_address'");
9231 else
9232 warning ("unsupported arg to `__builtin_return_address'");
9233 return const0_rtx;
9236 /* For __builtin_frame_address, return what we've got. */
9237 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9238 return tem;
9240 if (GET_CODE (tem) != REG
9241 && ! CONSTANT_P (tem))
9242 tem = copy_to_mode_reg (Pmode, tem);
9243 return tem;
9246 /* Returns the address of the area where the structure is returned.
9247 0 otherwise. */
9248 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9249 if (arglist != 0
9250 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9251 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9252 return const0_rtx;
9253 else
9254 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9256 case BUILT_IN_ALLOCA:
9257 if (arglist == 0
9258 /* Arg could be non-integer if user redeclared this fcn wrong. */
9259 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9260 break;
9262 /* Compute the argument. */
9263 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9265 /* Allocate the desired space. */
9266 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9268 case BUILT_IN_FFS:
9269 /* If not optimizing, call the library function. */
9270 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9271 break;
9273 if (arglist == 0
9274 /* Arg could be non-integer if user redeclared this fcn wrong. */
9275 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9276 break;
9278 /* Compute the argument. */
9279 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9280 /* Compute ffs, into TARGET if possible.
9281 Set TARGET to wherever the result comes back. */
9282 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9283 ffs_optab, op0, target, 1);
9284 if (target == 0)
9285 abort ();
9286 return target;
9288 case BUILT_IN_STRLEN:
9289 /* If not optimizing, call the library function. */
9290 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9291 break;
9293 if (arglist == 0
9294 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9295 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9296 break;
9297 else
9299 tree src = TREE_VALUE (arglist);
9300 tree len = c_strlen (src);
9302 int align
9303 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9305 rtx result, src_rtx, char_rtx;
9306 enum machine_mode insn_mode = value_mode, char_mode;
9307 enum insn_code icode;
9309 /* If the length is known, just return it. */
9310 if (len != 0)
9311 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9313 /* If SRC is not a pointer type, don't do this operation inline. */
9314 if (align == 0)
9315 break;
9317 /* Call a function if we can't compute strlen in the right mode. */
9319 while (insn_mode != VOIDmode)
9321 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9322 if (icode != CODE_FOR_nothing)
9323 break;
9325 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9327 if (insn_mode == VOIDmode)
9328 break;
9330 /* Make a place to write the result of the instruction. */
9331 result = target;
9332 if (! (result != 0
9333 && GET_CODE (result) == REG
9334 && GET_MODE (result) == insn_mode
9335 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9336 result = gen_reg_rtx (insn_mode);
9338 /* Make sure the operands are acceptable to the predicates. */
9340 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9341 result = gen_reg_rtx (insn_mode);
9342 src_rtx = memory_address (BLKmode,
9343 expand_expr (src, NULL_RTX, ptr_mode,
9344 EXPAND_NORMAL));
9346 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9347 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9349 /* Check the string is readable and has an end. */
9350 if (current_function_check_memory_usage)
9351 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9352 src_rtx, Pmode,
9353 GEN_INT (MEMORY_USE_RO),
9354 TYPE_MODE (integer_type_node));
9356 char_rtx = const0_rtx;
9357 char_mode = insn_operand_mode[(int)icode][2];
9358 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9359 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9361 emit_insn (GEN_FCN (icode) (result,
9362 gen_rtx_MEM (BLKmode, src_rtx),
9363 char_rtx, GEN_INT (align)));
9365 /* Return the value in the proper mode for this function. */
9366 if (GET_MODE (result) == value_mode)
9367 return result;
9368 else if (target != 0)
9370 convert_move (target, result, 0);
9371 return target;
9373 else
9374 return convert_to_mode (value_mode, result, 0);
9377 case BUILT_IN_STRCPY:
9378 /* If not optimizing, call the library function. */
9379 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9380 break;
9382 if (arglist == 0
9383 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9384 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9385 || TREE_CHAIN (arglist) == 0
9386 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9387 break;
9388 else
9390 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9392 if (len == 0)
9393 break;
9395 len = size_binop (PLUS_EXPR, len, integer_one_node);
9397 chainon (arglist, build_tree_list (NULL_TREE, len));
9400 /* Drops in. */
9401 case BUILT_IN_MEMCPY:
9402 /* If not optimizing, call the library function. */
9403 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9404 break;
9406 if (arglist == 0
9407 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9408 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9409 || TREE_CHAIN (arglist) == 0
9410 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9411 != POINTER_TYPE)
9412 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9413 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9414 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9415 != INTEGER_TYPE))
9416 break;
9417 else
9419 tree dest = TREE_VALUE (arglist);
9420 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9421 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9423 int src_align
9424 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9425 int dest_align
9426 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9427 rtx dest_mem, src_mem, dest_addr, len_rtx;
9429 /* If either SRC or DEST is not a pointer type, don't do
9430 this operation in-line. */
9431 if (src_align == 0 || dest_align == 0)
9433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9434 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9435 break;
9438 dest_mem = get_memory_rtx (dest);
9439 src_mem = get_memory_rtx (src);
9440 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9442 /* Just copy the rights of SRC to the rights of DEST. */
9443 if (current_function_check_memory_usage)
9444 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9445 XEXP (dest_mem, 0), Pmode,
9446 XEXP (src_mem, 0), Pmode,
9447 len_rtx, TYPE_MODE (sizetype));
9449 /* Copy word part most expediently. */
9450 dest_addr
9451 = emit_block_move (dest_mem, src_mem, len_rtx,
9452 MIN (src_align, dest_align));
9454 if (dest_addr == 0)
9455 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9457 return dest_addr;
9460 case BUILT_IN_MEMSET:
9461 /* If not optimizing, call the library function. */
9462 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9463 break;
9465 if (arglist == 0
9466 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9467 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9468 || TREE_CHAIN (arglist) == 0
9469 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9470 != INTEGER_TYPE)
9471 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9472 || (INTEGER_TYPE
9473 != (TREE_CODE (TREE_TYPE
9474 (TREE_VALUE
9475 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9476 break;
9477 else
9479 tree dest = TREE_VALUE (arglist);
9480 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9481 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9483 int dest_align
9484 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9485 rtx dest_mem, dest_addr, len_rtx;
9487 /* If DEST is not a pointer type, don't do this
9488 operation in-line. */
9489 if (dest_align == 0)
9490 break;
9492 /* If the arguments have side-effects, then we can only evaluate
9493 them at most once. The following code evaluates them twice if
9494 they are not constants because we break out to expand_call
9495 in that case. They can't be constants if they have side-effects
9496 so we can check for that first. Alternatively, we could call
9497 save_expr to make multiple evaluation safe. */
9498 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9499 break;
9501 /* If VAL is not 0, don't do this operation in-line. */
9502 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9503 break;
9505 /* If LEN does not expand to a constant, don't do this
9506 operation in-line. */
9507 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9508 if (GET_CODE (len_rtx) != CONST_INT)
9509 break;
9511 dest_mem = get_memory_rtx (dest);
9513 /* Just check DST is writable and mark it as readable. */
9514 if (current_function_check_memory_usage)
9515 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9516 XEXP (dest_mem, 0), Pmode,
9517 len_rtx, TYPE_MODE (sizetype),
9518 GEN_INT (MEMORY_USE_WO),
9519 TYPE_MODE (integer_type_node));
9522 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9524 if (dest_addr == 0)
9525 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9527 return dest_addr;
9530 /* These comparison functions need an instruction that returns an actual
9531 index. An ordinary compare that just sets the condition codes
9532 is not enough. */
9533 #ifdef HAVE_cmpstrsi
9534 case BUILT_IN_STRCMP:
9535 /* If not optimizing, call the library function. */
9536 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9537 break;
9539 /* If we need to check memory accesses, call the library function. */
9540 if (current_function_check_memory_usage)
9541 break;
9543 if (arglist == 0
9544 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9545 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9546 || TREE_CHAIN (arglist) == 0
9547 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9548 break;
9549 else if (!HAVE_cmpstrsi)
9550 break;
9552 tree arg1 = TREE_VALUE (arglist);
9553 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9554 tree len, len2;
9556 len = c_strlen (arg1);
9557 if (len)
9558 len = size_binop (PLUS_EXPR, integer_one_node, len);
9559 len2 = c_strlen (arg2);
9560 if (len2)
9561 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9563 /* If we don't have a constant length for the first, use the length
9564 of the second, if we know it. We don't require a constant for
9565 this case; some cost analysis could be done if both are available
9566 but neither is constant. For now, assume they're equally cheap.
9568 If both strings have constant lengths, use the smaller. This
9569 could arise if optimization results in strcpy being called with
9570 two fixed strings, or if the code was machine-generated. We should
9571 add some code to the `memcmp' handler below to deal with such
9572 situations, someday. */
9573 if (!len || TREE_CODE (len) != INTEGER_CST)
9575 if (len2)
9576 len = len2;
9577 else if (len == 0)
9578 break;
9580 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9582 if (tree_int_cst_lt (len2, len))
9583 len = len2;
9586 chainon (arglist, build_tree_list (NULL_TREE, len));
9589 /* Drops in. */
9590 case BUILT_IN_MEMCMP:
9591 /* If not optimizing, call the library function. */
9592 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9593 break;
9595 /* If we need to check memory accesses, call the library function. */
9596 if (current_function_check_memory_usage)
9597 break;
9599 if (arglist == 0
9600 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9601 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9602 || TREE_CHAIN (arglist) == 0
9603 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9604 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9605 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9606 break;
9607 else if (!HAVE_cmpstrsi)
9608 break;
9610 tree arg1 = TREE_VALUE (arglist);
9611 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9612 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9613 rtx result;
9615 int arg1_align
9616 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9617 int arg2_align
9618 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9619 enum machine_mode insn_mode
9620 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9622 /* If we don't have POINTER_TYPE, call the function. */
9623 if (arg1_align == 0 || arg2_align == 0)
9625 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9626 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9627 break;
9630 /* Make a place to write the result of the instruction. */
9631 result = target;
9632 if (! (result != 0
9633 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9634 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9635 result = gen_reg_rtx (insn_mode);
9637 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9638 get_memory_rtx (arg2),
9639 expand_expr (len, NULL_RTX, VOIDmode, 0),
9640 GEN_INT (MIN (arg1_align, arg2_align))));
9642 /* Return the value in the proper mode for this function. */
9643 mode = TYPE_MODE (TREE_TYPE (exp));
9644 if (GET_MODE (result) == mode)
9645 return result;
9646 else if (target != 0)
9648 convert_move (target, result, 0);
9649 return target;
9651 else
9652 return convert_to_mode (mode, result, 0);
9654 #else
9655 case BUILT_IN_STRCMP:
9656 case BUILT_IN_MEMCMP:
9657 break;
9658 #endif
9660 case BUILT_IN_SETJMP:
9661 if (arglist == 0
9662 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9663 break;
9664 else
9666 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9667 VOIDmode, 0);
9668 rtx lab = gen_label_rtx ();
9669 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9670 emit_label (lab);
9671 return ret;
9674 /* __builtin_longjmp is passed a pointer to an array of five words.
9675 It's similar to the C library longjmp function but works with
9676 __builtin_setjmp above. */
9677 case BUILT_IN_LONGJMP:
9678 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9679 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9680 break;
9681 else
9683 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9684 VOIDmode, 0);
9685 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9686 NULL_RTX, VOIDmode, 0);
9688 if (value != const1_rtx)
9690 error ("__builtin_longjmp second argument must be 1");
9691 return const0_rtx;
9694 expand_builtin_longjmp (buf_addr, value);
9695 return const0_rtx;
9698 case BUILT_IN_TRAP:
9699 #ifdef HAVE_trap
9700 if (HAVE_trap)
9701 emit_insn (gen_trap ());
9702 else
9703 #endif
9704 error ("__builtin_trap not supported by this target");
9705 emit_barrier ();
9706 return const0_rtx;
9708 /* Various hooks for the DWARF 2 __throw routine. */
9709 case BUILT_IN_UNWIND_INIT:
9710 expand_builtin_unwind_init ();
9711 return const0_rtx;
9712 case BUILT_IN_DWARF_CFA:
9713 return virtual_cfa_rtx;
9714 #ifdef DWARF2_UNWIND_INFO
9715 case BUILT_IN_DWARF_FP_REGNUM:
9716 return expand_builtin_dwarf_fp_regnum ();
9717 case BUILT_IN_DWARF_REG_SIZE:
9718 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9719 #endif
9720 case BUILT_IN_FROB_RETURN_ADDR:
9721 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9722 case BUILT_IN_EXTRACT_RETURN_ADDR:
9723 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9724 case BUILT_IN_EH_RETURN:
9725 expand_builtin_eh_return (TREE_VALUE (arglist),
9726 TREE_VALUE (TREE_CHAIN (arglist)),
9727 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9728 return const0_rtx;
9730 default: /* just do library call, if unknown builtin */
9731 error ("built-in function `%s' not currently supported",
9732 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9735 /* The switch statement above can drop through to cause the function
9736 to be called normally. */
9738 return expand_call (exp, target, ignore);
9741 /* Built-in functions to perform an untyped call and return. */
9743 /* For each register that may be used for calling a function, this
9744 gives a mode used to copy the register's value. VOIDmode indicates
9745 the register is not used for calling a function. If the machine
9746 has register windows, this gives only the outbound registers.
9747 INCOMING_REGNO gives the corresponding inbound register. */
9748 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9750 /* For each register that may be used for returning values, this gives
9751 a mode used to copy the register's value. VOIDmode indicates the
9752 register is not used for returning values. If the machine has
9753 register windows, this gives only the outbound registers.
9754 INCOMING_REGNO gives the corresponding inbound register. */
9755 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9757 /* For each register that may be used for calling a function, this
9758 gives the offset of that register into the block returned by
9759 __builtin_apply_args. 0 indicates that the register is not
9760 used for calling a function. */
9761 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9763 /* Return the offset of register REGNO into the block returned by
9764 __builtin_apply_args. This is not declared static, since it is
9765 needed in objc-act.c. */
9767 int
9768 apply_args_register_offset (regno)
9769 int regno;
9771 apply_args_size ();
9773 /* Arguments are always put in outgoing registers (in the argument
9774 block) if such make sense. */
9775 #ifdef OUTGOING_REGNO
9776 regno = OUTGOING_REGNO(regno);
9777 #endif
9778 return apply_args_reg_offset[regno];
9781 /* Return the size required for the block returned by __builtin_apply_args,
9782 and initialize apply_args_mode. */
9784 static int
9785 apply_args_size ()
9787 static int size = -1;
9788 int align, regno;
9789 enum machine_mode mode;
9791 /* The values computed by this function never change. */
9792 if (size < 0)
9794 /* The first value is the incoming arg-pointer. */
9795 size = GET_MODE_SIZE (Pmode);
9797 /* The second value is the structure value address unless this is
9798 passed as an "invisible" first argument. */
9799 if (struct_value_rtx)
9800 size += GET_MODE_SIZE (Pmode);
9802 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9803 if (FUNCTION_ARG_REGNO_P (regno))
9805 /* Search for the proper mode for copying this register's
9806 value. I'm not sure this is right, but it works so far. */
9807 enum machine_mode best_mode = VOIDmode;
9809 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9810 mode != VOIDmode;
9811 mode = GET_MODE_WIDER_MODE (mode))
9812 if (HARD_REGNO_MODE_OK (regno, mode)
9813 && HARD_REGNO_NREGS (regno, mode) == 1)
9814 best_mode = mode;
9816 if (best_mode == VOIDmode)
9817 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9818 mode != VOIDmode;
9819 mode = GET_MODE_WIDER_MODE (mode))
9820 if (HARD_REGNO_MODE_OK (regno, mode)
9821 && (mov_optab->handlers[(int) mode].insn_code
9822 != CODE_FOR_nothing))
9823 best_mode = mode;
9825 mode = best_mode;
9826 if (mode == VOIDmode)
9827 abort ();
9829 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9830 if (size % align != 0)
9831 size = CEIL (size, align) * align;
9832 apply_args_reg_offset[regno] = size;
9833 size += GET_MODE_SIZE (mode);
9834 apply_args_mode[regno] = mode;
9836 else
9838 apply_args_mode[regno] = VOIDmode;
9839 apply_args_reg_offset[regno] = 0;
9842 return size;
9845 /* Return the size required for the block returned by __builtin_apply,
9846 and initialize apply_result_mode. */
9848 static int
9849 apply_result_size ()
9851 static int size = -1;
9852 int align, regno;
9853 enum machine_mode mode;
9855 /* The values computed by this function never change. */
9856 if (size < 0)
9858 size = 0;
9860 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9861 if (FUNCTION_VALUE_REGNO_P (regno))
9863 /* Search for the proper mode for copying this register's
9864 value. I'm not sure this is right, but it works so far. */
9865 enum machine_mode best_mode = VOIDmode;
9867 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9868 mode != TImode;
9869 mode = GET_MODE_WIDER_MODE (mode))
9870 if (HARD_REGNO_MODE_OK (regno, mode))
9871 best_mode = mode;
9873 if (best_mode == VOIDmode)
9874 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9875 mode != VOIDmode;
9876 mode = GET_MODE_WIDER_MODE (mode))
9877 if (HARD_REGNO_MODE_OK (regno, mode)
9878 && (mov_optab->handlers[(int) mode].insn_code
9879 != CODE_FOR_nothing))
9880 best_mode = mode;
9882 mode = best_mode;
9883 if (mode == VOIDmode)
9884 abort ();
9886 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9887 if (size % align != 0)
9888 size = CEIL (size, align) * align;
9889 size += GET_MODE_SIZE (mode);
9890 apply_result_mode[regno] = mode;
9892 else
9893 apply_result_mode[regno] = VOIDmode;
9895 /* Allow targets that use untyped_call and untyped_return to override
9896 the size so that machine-specific information can be stored here. */
9897 #ifdef APPLY_RESULT_SIZE
9898 size = APPLY_RESULT_SIZE;
9899 #endif
9901 return size;
9904 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9905 /* Create a vector describing the result block RESULT. If SAVEP is true,
9906 the result block is used to save the values; otherwise it is used to
9907 restore the values. */
9909 static rtx
9910 result_vector (savep, result)
9911 int savep;
9912 rtx result;
9914 int regno, size, align, nelts;
9915 enum machine_mode mode;
9916 rtx reg, mem;
9917 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9919 size = nelts = 0;
9920 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9921 if ((mode = apply_result_mode[regno]) != VOIDmode)
9923 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9924 if (size % align != 0)
9925 size = CEIL (size, align) * align;
9926 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9927 mem = change_address (result, mode,
9928 plus_constant (XEXP (result, 0), size));
9929 savevec[nelts++] = (savep
9930 ? gen_rtx_SET (VOIDmode, mem, reg)
9931 : gen_rtx_SET (VOIDmode, reg, mem));
9932 size += GET_MODE_SIZE (mode);
9934 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9936 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9938 /* Save the state required to perform an untyped call with the same
9939 arguments as were passed to the current function. */
9941 static rtx
9942 expand_builtin_apply_args ()
9944 rtx registers;
9945 int size, align, regno;
9946 enum machine_mode mode;
9948 /* Create a block where the arg-pointer, structure value address,
9949 and argument registers can be saved. */
9950 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9952 /* Walk past the arg-pointer and structure value address. */
9953 size = GET_MODE_SIZE (Pmode);
9954 if (struct_value_rtx)
9955 size += GET_MODE_SIZE (Pmode);
9957 /* Save each register used in calling a function to the block. */
9958 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9959 if ((mode = apply_args_mode[regno]) != VOIDmode)
9961 rtx tem;
9963 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9964 if (size % align != 0)
9965 size = CEIL (size, align) * align;
9967 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9969 #ifdef STACK_REGS
9970 /* For reg-stack.c's stack register household.
9971 Compare with a similar piece of code in function.c. */
9973 emit_insn (gen_rtx_USE (mode, tem));
9974 #endif
9976 emit_move_insn (change_address (registers, mode,
9977 plus_constant (XEXP (registers, 0),
9978 size)),
9979 tem);
9980 size += GET_MODE_SIZE (mode);
9983 /* Save the arg pointer to the block. */
9984 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9985 copy_to_reg (virtual_incoming_args_rtx));
9986 size = GET_MODE_SIZE (Pmode);
9988 /* Save the structure value address unless this is passed as an
9989 "invisible" first argument. */
9990 if (struct_value_incoming_rtx)
9992 emit_move_insn (change_address (registers, Pmode,
9993 plus_constant (XEXP (registers, 0),
9994 size)),
9995 copy_to_reg (struct_value_incoming_rtx));
9996 size += GET_MODE_SIZE (Pmode);
9999 /* Return the address of the block. */
10000 return copy_addr_to_reg (XEXP (registers, 0));
10003 /* Perform an untyped call and save the state required to perform an
10004 untyped return of whatever value was returned by the given function. */
10006 static rtx
10007 expand_builtin_apply (function, arguments, argsize)
10008 rtx function, arguments, argsize;
10010 int size, align, regno;
10011 enum machine_mode mode;
10012 rtx incoming_args, result, reg, dest, call_insn;
10013 rtx old_stack_level = 0;
10014 rtx call_fusage = 0;
10016 /* Create a block where the return registers can be saved. */
10017 result = assign_stack_local (BLKmode, apply_result_size (), -1);
10019 /* ??? The argsize value should be adjusted here. */
10021 /* Fetch the arg pointer from the ARGUMENTS block. */
10022 incoming_args = gen_reg_rtx (Pmode);
10023 emit_move_insn (incoming_args,
10024 gen_rtx_MEM (Pmode, arguments));
10025 #ifndef STACK_GROWS_DOWNWARD
10026 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
10027 incoming_args, 0, OPTAB_LIB_WIDEN);
10028 #endif
10030 /* Perform postincrements before actually calling the function. */
10031 emit_queue ();
10033 /* Push a new argument block and copy the arguments. */
10034 do_pending_stack_adjust ();
10036 /* Save the stack with nonlocal if available */
10037 #ifdef HAVE_save_stack_nonlocal
10038 if (HAVE_save_stack_nonlocal)
10039 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
10040 else
10041 #endif
10042 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
10044 /* Push a block of memory onto the stack to store the memory arguments.
10045 Save the address in a register, and copy the memory arguments. ??? I
10046 haven't figured out how the calling convention macros effect this,
10047 but it's likely that the source and/or destination addresses in
10048 the block copy will need updating in machine specific ways. */
10049 dest = allocate_dynamic_stack_space (argsize, 0, 0);
10050 emit_block_move (gen_rtx_MEM (BLKmode, dest),
10051 gen_rtx_MEM (BLKmode, incoming_args),
10052 argsize,
10053 PARM_BOUNDARY / BITS_PER_UNIT);
10055 /* Refer to the argument block. */
10056 apply_args_size ();
10057 arguments = gen_rtx_MEM (BLKmode, arguments);
10059 /* Walk past the arg-pointer and structure value address. */
10060 size = GET_MODE_SIZE (Pmode);
10061 if (struct_value_rtx)
10062 size += GET_MODE_SIZE (Pmode);
10064 /* Restore each of the registers previously saved. Make USE insns
10065 for each of these registers for use in making the call. */
10066 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10067 if ((mode = apply_args_mode[regno]) != VOIDmode)
10069 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10070 if (size % align != 0)
10071 size = CEIL (size, align) * align;
10072 reg = gen_rtx_REG (mode, regno);
10073 emit_move_insn (reg,
10074 change_address (arguments, mode,
10075 plus_constant (XEXP (arguments, 0),
10076 size)));
10078 use_reg (&call_fusage, reg);
10079 size += GET_MODE_SIZE (mode);
10082 /* Restore the structure value address unless this is passed as an
10083 "invisible" first argument. */
10084 size = GET_MODE_SIZE (Pmode);
10085 if (struct_value_rtx)
10087 rtx value = gen_reg_rtx (Pmode);
10088 emit_move_insn (value,
10089 change_address (arguments, Pmode,
10090 plus_constant (XEXP (arguments, 0),
10091 size)));
10092 emit_move_insn (struct_value_rtx, value);
10093 if (GET_CODE (struct_value_rtx) == REG)
10094 use_reg (&call_fusage, struct_value_rtx);
10095 size += GET_MODE_SIZE (Pmode);
10098 /* All arguments and registers used for the call are set up by now! */
10099 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
10101 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
10102 and we don't want to load it into a register as an optimization,
10103 because prepare_call_address already did it if it should be done. */
10104 if (GET_CODE (function) != SYMBOL_REF)
10105 function = memory_address (FUNCTION_MODE, function);
10107 /* Generate the actual call instruction and save the return value. */
10108 #ifdef HAVE_untyped_call
10109 if (HAVE_untyped_call)
10110 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
10111 result, result_vector (1, result)));
10112 else
10113 #endif
10114 #ifdef HAVE_call_value
10115 if (HAVE_call_value)
10117 rtx valreg = 0;
10119 /* Locate the unique return register. It is not possible to
10120 express a call that sets more than one return register using
10121 call_value; use untyped_call for that. In fact, untyped_call
10122 only needs to save the return registers in the given block. */
10123 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10124 if ((mode = apply_result_mode[regno]) != VOIDmode)
10126 if (valreg)
10127 abort (); /* HAVE_untyped_call required. */
10128 valreg = gen_rtx_REG (mode, regno);
10131 emit_call_insn (gen_call_value (valreg,
10132 gen_rtx_MEM (FUNCTION_MODE, function),
10133 const0_rtx, NULL_RTX, const0_rtx));
10135 emit_move_insn (change_address (result, GET_MODE (valreg),
10136 XEXP (result, 0)),
10137 valreg);
10139 else
10140 #endif
10141 abort ();
10143 /* Find the CALL insn we just emitted. */
10144 for (call_insn = get_last_insn ();
10145 call_insn && GET_CODE (call_insn) != CALL_INSN;
10146 call_insn = PREV_INSN (call_insn))
10149 if (! call_insn)
10150 abort ();
10152 /* Put the register usage information on the CALL. If there is already
10153 some usage information, put ours at the end. */
10154 if (CALL_INSN_FUNCTION_USAGE (call_insn))
10156 rtx link;
10158 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10159 link = XEXP (link, 1))
10162 XEXP (link, 1) = call_fusage;
10164 else
10165 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10167 /* Restore the stack. */
10168 #ifdef HAVE_save_stack_nonlocal
10169 if (HAVE_save_stack_nonlocal)
10170 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10171 else
10172 #endif
10173 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10175 /* Return the address of the result block. */
10176 return copy_addr_to_reg (XEXP (result, 0));
10179 /* Perform an untyped return. */
10181 static void
10182 expand_builtin_return (result)
10183 rtx result;
10185 int size, align, regno;
10186 enum machine_mode mode;
10187 rtx reg;
10188 rtx call_fusage = 0;
10190 apply_result_size ();
10191 result = gen_rtx_MEM (BLKmode, result);
10193 #ifdef HAVE_untyped_return
10194 if (HAVE_untyped_return)
10196 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10197 emit_barrier ();
10198 return;
10200 #endif
10202 /* Restore the return value and note that each value is used. */
10203 size = 0;
10204 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10205 if ((mode = apply_result_mode[regno]) != VOIDmode)
10207 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10208 if (size % align != 0)
10209 size = CEIL (size, align) * align;
10210 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10211 emit_move_insn (reg,
10212 change_address (result, mode,
10213 plus_constant (XEXP (result, 0),
10214 size)));
10216 push_to_sequence (call_fusage);
10217 emit_insn (gen_rtx_USE (VOIDmode, reg));
10218 call_fusage = get_insns ();
10219 end_sequence ();
10220 size += GET_MODE_SIZE (mode);
10223 /* Put the USE insns before the return. */
10224 emit_insns (call_fusage);
10226 /* Return whatever values was restored by jumping directly to the end
10227 of the function. */
10228 expand_null_return ();
10231 /* Expand code for a post- or pre- increment or decrement
10232 and return the RTX for the result.
10233 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10235 static rtx
10236 expand_increment (exp, post, ignore)
10237 register tree exp;
10238 int post, ignore;
10240 register rtx op0, op1;
10241 register rtx temp, value;
10242 register tree incremented = TREE_OPERAND (exp, 0);
10243 optab this_optab = add_optab;
10244 int icode;
10245 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10246 int op0_is_copy = 0;
10247 int single_insn = 0;
10248 /* 1 means we can't store into OP0 directly,
10249 because it is a subreg narrower than a word,
10250 and we don't dare clobber the rest of the word. */
10251 int bad_subreg = 0;
10253 /* Stabilize any component ref that might need to be
10254 evaluated more than once below. */
10255 if (!post
10256 || TREE_CODE (incremented) == BIT_FIELD_REF
10257 || (TREE_CODE (incremented) == COMPONENT_REF
10258 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10259 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10260 incremented = stabilize_reference (incremented);
10261 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10262 ones into save exprs so that they don't accidentally get evaluated
10263 more than once by the code below. */
10264 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10265 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10266 incremented = save_expr (incremented);
10268 /* Compute the operands as RTX.
10269 Note whether OP0 is the actual lvalue or a copy of it:
10270 I believe it is a copy iff it is a register or subreg
10271 and insns were generated in computing it. */
10273 temp = get_last_insn ();
10274 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10276 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10277 in place but instead must do sign- or zero-extension during assignment,
10278 so we copy it into a new register and let the code below use it as
10279 a copy.
10281 Note that we can safely modify this SUBREG since it is know not to be
10282 shared (it was made by the expand_expr call above). */
10284 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10286 if (post)
10287 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10288 else
10289 bad_subreg = 1;
10291 else if (GET_CODE (op0) == SUBREG
10292 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10294 /* We cannot increment this SUBREG in place. If we are
10295 post-incrementing, get a copy of the old value. Otherwise,
10296 just mark that we cannot increment in place. */
10297 if (post)
10298 op0 = copy_to_reg (op0);
10299 else
10300 bad_subreg = 1;
10303 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10304 && temp != get_last_insn ());
10305 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10306 EXPAND_MEMORY_USE_BAD);
10308 /* Decide whether incrementing or decrementing. */
10309 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10310 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10311 this_optab = sub_optab;
10313 /* Convert decrement by a constant into a negative increment. */
10314 if (this_optab == sub_optab
10315 && GET_CODE (op1) == CONST_INT)
10317 op1 = GEN_INT (- INTVAL (op1));
10318 this_optab = add_optab;
10321 /* For a preincrement, see if we can do this with a single instruction. */
10322 if (!post)
10324 icode = (int) this_optab->handlers[(int) mode].insn_code;
10325 if (icode != (int) CODE_FOR_nothing
10326 /* Make sure that OP0 is valid for operands 0 and 1
10327 of the insn we want to queue. */
10328 && (*insn_operand_predicate[icode][0]) (op0, mode)
10329 && (*insn_operand_predicate[icode][1]) (op0, mode)
10330 && (*insn_operand_predicate[icode][2]) (op1, mode))
10331 single_insn = 1;
10334 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10335 then we cannot just increment OP0. We must therefore contrive to
10336 increment the original value. Then, for postincrement, we can return
10337 OP0 since it is a copy of the old value. For preincrement, expand here
10338 unless we can do it with a single insn.
10340 Likewise if storing directly into OP0 would clobber high bits
10341 we need to preserve (bad_subreg). */
10342 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10344 /* This is the easiest way to increment the value wherever it is.
10345 Problems with multiple evaluation of INCREMENTED are prevented
10346 because either (1) it is a component_ref or preincrement,
10347 in which case it was stabilized above, or (2) it is an array_ref
10348 with constant index in an array in a register, which is
10349 safe to reevaluate. */
10350 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10351 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10352 ? MINUS_EXPR : PLUS_EXPR),
10353 TREE_TYPE (exp),
10354 incremented,
10355 TREE_OPERAND (exp, 1));
10357 while (TREE_CODE (incremented) == NOP_EXPR
10358 || TREE_CODE (incremented) == CONVERT_EXPR)
10360 newexp = convert (TREE_TYPE (incremented), newexp);
10361 incremented = TREE_OPERAND (incremented, 0);
10364 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10365 return post ? op0 : temp;
10368 if (post)
10370 /* We have a true reference to the value in OP0.
10371 If there is an insn to add or subtract in this mode, queue it.
10372 Queueing the increment insn avoids the register shuffling
10373 that often results if we must increment now and first save
10374 the old value for subsequent use. */
10376 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10377 op0 = stabilize (op0);
10378 #endif
10380 icode = (int) this_optab->handlers[(int) mode].insn_code;
10381 if (icode != (int) CODE_FOR_nothing
10382 /* Make sure that OP0 is valid for operands 0 and 1
10383 of the insn we want to queue. */
10384 && (*insn_operand_predicate[icode][0]) (op0, mode)
10385 && (*insn_operand_predicate[icode][1]) (op0, mode))
10387 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10388 op1 = force_reg (mode, op1);
10390 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10392 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10394 rtx addr = (general_operand (XEXP (op0, 0), mode)
10395 ? force_reg (Pmode, XEXP (op0, 0))
10396 : copy_to_reg (XEXP (op0, 0)));
10397 rtx temp, result;
10399 op0 = change_address (op0, VOIDmode, addr);
10400 temp = force_reg (GET_MODE (op0), op0);
10401 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10402 op1 = force_reg (mode, op1);
10404 /* The increment queue is LIFO, thus we have to `queue'
10405 the instructions in reverse order. */
10406 enqueue_insn (op0, gen_move_insn (op0, temp));
10407 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10408 return result;
10412 /* Preincrement, or we can't increment with one simple insn. */
10413 if (post)
10414 /* Save a copy of the value before inc or dec, to return it later. */
10415 temp = value = copy_to_reg (op0);
10416 else
10417 /* Arrange to return the incremented value. */
10418 /* Copy the rtx because expand_binop will protect from the queue,
10419 and the results of that would be invalid for us to return
10420 if our caller does emit_queue before using our result. */
10421 temp = copy_rtx (value = op0);
10423 /* Increment however we can. */
10424 op1 = expand_binop (mode, this_optab, value, op1,
10425 current_function_check_memory_usage ? NULL_RTX : op0,
10426 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10427 /* Make sure the value is stored into OP0. */
10428 if (op1 != op0)
10429 emit_move_insn (op0, op1);
10431 return temp;
10434 /* Expand all function calls contained within EXP, innermost ones first.
10435 But don't look within expressions that have sequence points.
10436 For each CALL_EXPR, record the rtx for its value
10437 in the CALL_EXPR_RTL field. */
10439 static void
10440 preexpand_calls (exp)
10441 tree exp;
10443 register int nops, i;
10444 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10446 if (! do_preexpand_calls)
10447 return;
10449 /* Only expressions and references can contain calls. */
10451 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10452 return;
10454 switch (TREE_CODE (exp))
10456 case CALL_EXPR:
10457 /* Do nothing if already expanded. */
10458 if (CALL_EXPR_RTL (exp) != 0
10459 /* Do nothing if the call returns a variable-sized object. */
10460 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10461 /* Do nothing to built-in functions. */
10462 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10463 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10464 == FUNCTION_DECL)
10465 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10466 return;
10468 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10469 return;
10471 case COMPOUND_EXPR:
10472 case COND_EXPR:
10473 case TRUTH_ANDIF_EXPR:
10474 case TRUTH_ORIF_EXPR:
10475 /* If we find one of these, then we can be sure
10476 the adjust will be done for it (since it makes jumps).
10477 Do it now, so that if this is inside an argument
10478 of a function, we don't get the stack adjustment
10479 after some other args have already been pushed. */
10480 do_pending_stack_adjust ();
10481 return;
10483 case BLOCK:
10484 case RTL_EXPR:
10485 case WITH_CLEANUP_EXPR:
10486 case CLEANUP_POINT_EXPR:
10487 case TRY_CATCH_EXPR:
10488 return;
10490 case SAVE_EXPR:
10491 if (SAVE_EXPR_RTL (exp) != 0)
10492 return;
10494 default:
10495 break;
10498 nops = tree_code_length[(int) TREE_CODE (exp)];
10499 for (i = 0; i < nops; i++)
10500 if (TREE_OPERAND (exp, i) != 0)
10502 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10503 if (type == 'e' || type == '<' || type == '1' || type == '2'
10504 || type == 'r')
10505 preexpand_calls (TREE_OPERAND (exp, i));
10509 /* At the start of a function, record that we have no previously-pushed
10510 arguments waiting to be popped. */
10512 void
10513 init_pending_stack_adjust ()
10515 pending_stack_adjust = 0;
10518 /* When exiting from function, if safe, clear out any pending stack adjust
10519 so the adjustment won't get done.
10521 Note, if the current function calls alloca, then it must have a
10522 frame pointer regardless of the value of flag_omit_frame_pointer. */
10524 void
10525 clear_pending_stack_adjust ()
10527 #ifdef EXIT_IGNORE_STACK
10528 if (optimize > 0
10529 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10530 && EXIT_IGNORE_STACK
10531 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10532 && ! flag_inline_functions)
10533 pending_stack_adjust = 0;
10534 #endif
10537 /* Pop any previously-pushed arguments that have not been popped yet. */
10539 void
10540 do_pending_stack_adjust ()
10542 if (inhibit_defer_pop == 0)
10544 if (pending_stack_adjust != 0)
10545 adjust_stack (GEN_INT (pending_stack_adjust));
10546 pending_stack_adjust = 0;
10550 /* Expand conditional expressions. */
10552 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10553 LABEL is an rtx of code CODE_LABEL, in this function and all the
10554 functions here. */
10556 void
10557 jumpifnot (exp, label)
10558 tree exp;
10559 rtx label;
10561 do_jump (exp, label, NULL_RTX);
10564 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10566 void
10567 jumpif (exp, label)
10568 tree exp;
10569 rtx label;
10571 do_jump (exp, NULL_RTX, label);
10574 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10575 the result is zero, or IF_TRUE_LABEL if the result is one.
10576 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10577 meaning fall through in that case.
10579 do_jump always does any pending stack adjust except when it does not
10580 actually perform a jump. An example where there is no jump
10581 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10583 This function is responsible for optimizing cases such as
10584 &&, || and comparison operators in EXP. */
10586 void
10587 do_jump (exp, if_false_label, if_true_label)
10588 tree exp;
10589 rtx if_false_label, if_true_label;
10591 register enum tree_code code = TREE_CODE (exp);
10592 /* Some cases need to create a label to jump to
10593 in order to properly fall through.
10594 These cases set DROP_THROUGH_LABEL nonzero. */
10595 rtx drop_through_label = 0;
10596 rtx temp;
10597 rtx comparison = 0;
10598 int i;
10599 tree type;
10600 enum machine_mode mode;
10602 #ifdef MAX_INTEGER_COMPUTATION_MODE
10603 check_max_integer_computation_mode (exp);
10604 #endif
10606 emit_queue ();
10608 switch (code)
10610 case ERROR_MARK:
10611 break;
10613 case INTEGER_CST:
10614 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10615 if (temp)
10616 emit_jump (temp);
10617 break;
10619 #if 0
10620 /* This is not true with #pragma weak */
10621 case ADDR_EXPR:
10622 /* The address of something can never be zero. */
10623 if (if_true_label)
10624 emit_jump (if_true_label);
10625 break;
10626 #endif
10628 case NOP_EXPR:
10629 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10630 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10631 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10632 goto normal;
10633 case CONVERT_EXPR:
10634 /* If we are narrowing the operand, we have to do the compare in the
10635 narrower mode. */
10636 if ((TYPE_PRECISION (TREE_TYPE (exp))
10637 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10638 goto normal;
10639 case NON_LVALUE_EXPR:
10640 case REFERENCE_EXPR:
10641 case ABS_EXPR:
10642 case NEGATE_EXPR:
10643 case LROTATE_EXPR:
10644 case RROTATE_EXPR:
10645 /* These cannot change zero->non-zero or vice versa. */
10646 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10647 break;
10649 #if 0
10650 /* This is never less insns than evaluating the PLUS_EXPR followed by
10651 a test and can be longer if the test is eliminated. */
10652 case PLUS_EXPR:
10653 /* Reduce to minus. */
10654 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10655 TREE_OPERAND (exp, 0),
10656 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10657 TREE_OPERAND (exp, 1))));
10658 /* Process as MINUS. */
10659 #endif
10661 case MINUS_EXPR:
10662 /* Non-zero iff operands of minus differ. */
10663 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10664 TREE_OPERAND (exp, 0),
10665 TREE_OPERAND (exp, 1)),
10666 NE, NE);
10667 break;
10669 case BIT_AND_EXPR:
10670 /* If we are AND'ing with a small constant, do this comparison in the
10671 smallest type that fits. If the machine doesn't have comparisons
10672 that small, it will be converted back to the wider comparison.
10673 This helps if we are testing the sign bit of a narrower object.
10674 combine can't do this for us because it can't know whether a
10675 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10677 if (! SLOW_BYTE_ACCESS
10678 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10679 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10680 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10681 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10682 && (type = type_for_mode (mode, 1)) != 0
10683 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10684 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10685 != CODE_FOR_nothing))
10687 do_jump (convert (type, exp), if_false_label, if_true_label);
10688 break;
10690 goto normal;
10692 case TRUTH_NOT_EXPR:
10693 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10694 break;
10696 case TRUTH_ANDIF_EXPR:
10697 if (if_false_label == 0)
10698 if_false_label = drop_through_label = gen_label_rtx ();
10699 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10700 start_cleanup_deferral ();
10701 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10702 end_cleanup_deferral ();
10703 break;
10705 case TRUTH_ORIF_EXPR:
10706 if (if_true_label == 0)
10707 if_true_label = drop_through_label = gen_label_rtx ();
10708 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10709 start_cleanup_deferral ();
10710 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10711 end_cleanup_deferral ();
10712 break;
10714 case COMPOUND_EXPR:
10715 push_temp_slots ();
10716 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10717 preserve_temp_slots (NULL_RTX);
10718 free_temp_slots ();
10719 pop_temp_slots ();
10720 emit_queue ();
10721 do_pending_stack_adjust ();
10722 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10723 break;
10725 case COMPONENT_REF:
10726 case BIT_FIELD_REF:
10727 case ARRAY_REF:
10729 int bitsize, bitpos, unsignedp;
10730 enum machine_mode mode;
10731 tree type;
10732 tree offset;
10733 int volatilep = 0;
10734 int alignment;
10736 /* Get description of this reference. We don't actually care
10737 about the underlying object here. */
10738 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10739 &mode, &unsignedp, &volatilep,
10740 &alignment);
10742 type = type_for_size (bitsize, unsignedp);
10743 if (! SLOW_BYTE_ACCESS
10744 && type != 0 && bitsize >= 0
10745 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10746 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10747 != CODE_FOR_nothing))
10749 do_jump (convert (type, exp), if_false_label, if_true_label);
10750 break;
10752 goto normal;
10755 case COND_EXPR:
10756 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10757 if (integer_onep (TREE_OPERAND (exp, 1))
10758 && integer_zerop (TREE_OPERAND (exp, 2)))
10759 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10761 else if (integer_zerop (TREE_OPERAND (exp, 1))
10762 && integer_onep (TREE_OPERAND (exp, 2)))
10763 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10765 else
10767 register rtx label1 = gen_label_rtx ();
10768 drop_through_label = gen_label_rtx ();
10770 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10772 start_cleanup_deferral ();
10773 /* Now the THEN-expression. */
10774 do_jump (TREE_OPERAND (exp, 1),
10775 if_false_label ? if_false_label : drop_through_label,
10776 if_true_label ? if_true_label : drop_through_label);
10777 /* In case the do_jump just above never jumps. */
10778 do_pending_stack_adjust ();
10779 emit_label (label1);
10781 /* Now the ELSE-expression. */
10782 do_jump (TREE_OPERAND (exp, 2),
10783 if_false_label ? if_false_label : drop_through_label,
10784 if_true_label ? if_true_label : drop_through_label);
10785 end_cleanup_deferral ();
10787 break;
10789 case EQ_EXPR:
10791 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10793 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10794 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10796 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10797 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10798 do_jump
10799 (fold
10800 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10801 fold (build (EQ_EXPR, TREE_TYPE (exp),
10802 fold (build1 (REALPART_EXPR,
10803 TREE_TYPE (inner_type),
10804 exp0)),
10805 fold (build1 (REALPART_EXPR,
10806 TREE_TYPE (inner_type),
10807 exp1)))),
10808 fold (build (EQ_EXPR, TREE_TYPE (exp),
10809 fold (build1 (IMAGPART_EXPR,
10810 TREE_TYPE (inner_type),
10811 exp0)),
10812 fold (build1 (IMAGPART_EXPR,
10813 TREE_TYPE (inner_type),
10814 exp1)))))),
10815 if_false_label, if_true_label);
10818 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10819 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10821 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10822 && !can_compare_p (TYPE_MODE (inner_type)))
10823 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10824 else
10825 comparison = compare (exp, EQ, EQ);
10826 break;
10829 case NE_EXPR:
10831 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10833 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10834 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10836 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10837 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10838 do_jump
10839 (fold
10840 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10841 fold (build (NE_EXPR, TREE_TYPE (exp),
10842 fold (build1 (REALPART_EXPR,
10843 TREE_TYPE (inner_type),
10844 exp0)),
10845 fold (build1 (REALPART_EXPR,
10846 TREE_TYPE (inner_type),
10847 exp1)))),
10848 fold (build (NE_EXPR, TREE_TYPE (exp),
10849 fold (build1 (IMAGPART_EXPR,
10850 TREE_TYPE (inner_type),
10851 exp0)),
10852 fold (build1 (IMAGPART_EXPR,
10853 TREE_TYPE (inner_type),
10854 exp1)))))),
10855 if_false_label, if_true_label);
10858 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10859 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10861 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10862 && !can_compare_p (TYPE_MODE (inner_type)))
10863 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10864 else
10865 comparison = compare (exp, NE, NE);
10866 break;
10869 case LT_EXPR:
10870 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10871 == MODE_INT)
10872 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10873 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10874 else
10875 comparison = compare (exp, LT, LTU);
10876 break;
10878 case LE_EXPR:
10879 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10880 == MODE_INT)
10881 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10882 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10883 else
10884 comparison = compare (exp, LE, LEU);
10885 break;
10887 case GT_EXPR:
10888 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10889 == MODE_INT)
10890 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10891 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10892 else
10893 comparison = compare (exp, GT, GTU);
10894 break;
10896 case GE_EXPR:
10897 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10898 == MODE_INT)
10899 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10900 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10901 else
10902 comparison = compare (exp, GE, GEU);
10903 break;
10905 default:
10906 normal:
10907 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10908 #if 0
10909 /* This is not needed any more and causes poor code since it causes
10910 comparisons and tests from non-SI objects to have different code
10911 sequences. */
10912 /* Copy to register to avoid generating bad insns by cse
10913 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10914 if (!cse_not_expected && GET_CODE (temp) == MEM)
10915 temp = copy_to_reg (temp);
10916 #endif
10917 do_pending_stack_adjust ();
10918 if (GET_CODE (temp) == CONST_INT)
10919 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10920 else if (GET_CODE (temp) == LABEL_REF)
10921 comparison = const_true_rtx;
10922 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10923 && !can_compare_p (GET_MODE (temp)))
10924 /* Note swapping the labels gives us not-equal. */
10925 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10926 else if (GET_MODE (temp) != VOIDmode)
10927 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10928 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10929 GET_MODE (temp), NULL_RTX, 0);
10930 else
10931 abort ();
10934 /* Do any postincrements in the expression that was tested. */
10935 emit_queue ();
10937 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10938 straight into a conditional jump instruction as the jump condition.
10939 Otherwise, all the work has been done already. */
10941 if (comparison == const_true_rtx)
10943 if (if_true_label)
10944 emit_jump (if_true_label);
10946 else if (comparison == const0_rtx)
10948 if (if_false_label)
10949 emit_jump (if_false_label);
10951 else if (comparison)
10952 do_jump_for_compare (comparison, if_false_label, if_true_label);
10954 if (drop_through_label)
10956 /* If do_jump produces code that might be jumped around,
10957 do any stack adjusts from that code, before the place
10958 where control merges in. */
10959 do_pending_stack_adjust ();
10960 emit_label (drop_through_label);
10964 /* Given a comparison expression EXP for values too wide to be compared
10965 with one insn, test the comparison and jump to the appropriate label.
10966 The code of EXP is ignored; we always test GT if SWAP is 0,
10967 and LT if SWAP is 1. */
10969 static void
10970 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10971 tree exp;
10972 int swap;
10973 rtx if_false_label, if_true_label;
10975 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10976 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10977 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10978 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10979 rtx drop_through_label = 0;
10980 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10981 int i;
10983 if (! if_true_label || ! if_false_label)
10984 drop_through_label = gen_label_rtx ();
10985 if (! if_true_label)
10986 if_true_label = drop_through_label;
10987 if (! if_false_label)
10988 if_false_label = drop_through_label;
10990 /* Compare a word at a time, high order first. */
10991 for (i = 0; i < nwords; i++)
10993 rtx comp;
10994 rtx op0_word, op1_word;
10996 if (WORDS_BIG_ENDIAN)
10998 op0_word = operand_subword_force (op0, i, mode);
10999 op1_word = operand_subword_force (op1, i, mode);
11001 else
11003 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11004 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11007 /* All but high-order word must be compared as unsigned. */
11008 comp = compare_from_rtx (op0_word, op1_word,
11009 (unsignedp || i > 0) ? GTU : GT,
11010 unsignedp, word_mode, NULL_RTX, 0);
11011 if (comp == const_true_rtx)
11012 emit_jump (if_true_label);
11013 else if (comp != const0_rtx)
11014 do_jump_for_compare (comp, NULL_RTX, if_true_label);
11016 /* Consider lower words only if these are equal. */
11017 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11018 NULL_RTX, 0);
11019 if (comp == const_true_rtx)
11020 emit_jump (if_false_label);
11021 else if (comp != const0_rtx)
11022 do_jump_for_compare (comp, NULL_RTX, if_false_label);
11025 if (if_false_label)
11026 emit_jump (if_false_label);
11027 if (drop_through_label)
11028 emit_label (drop_through_label);
11031 /* Compare OP0 with OP1, word at a time, in mode MODE.
11032 UNSIGNEDP says to do unsigned comparison.
11033 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
11035 void
11036 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
11037 enum machine_mode mode;
11038 int unsignedp;
11039 rtx op0, op1;
11040 rtx if_false_label, if_true_label;
11042 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11043 rtx drop_through_label = 0;
11044 int i;
11046 if (! if_true_label || ! if_false_label)
11047 drop_through_label = gen_label_rtx ();
11048 if (! if_true_label)
11049 if_true_label = drop_through_label;
11050 if (! if_false_label)
11051 if_false_label = drop_through_label;
11053 /* Compare a word at a time, high order first. */
11054 for (i = 0; i < nwords; i++)
11056 rtx comp;
11057 rtx op0_word, op1_word;
11059 if (WORDS_BIG_ENDIAN)
11061 op0_word = operand_subword_force (op0, i, mode);
11062 op1_word = operand_subword_force (op1, i, mode);
11064 else
11066 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11067 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11070 /* All but high-order word must be compared as unsigned. */
11071 comp = compare_from_rtx (op0_word, op1_word,
11072 (unsignedp || i > 0) ? GTU : GT,
11073 unsignedp, word_mode, NULL_RTX, 0);
11074 if (comp == const_true_rtx)
11075 emit_jump (if_true_label);
11076 else if (comp != const0_rtx)
11077 do_jump_for_compare (comp, NULL_RTX, if_true_label);
11079 /* Consider lower words only if these are equal. */
11080 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11081 NULL_RTX, 0);
11082 if (comp == const_true_rtx)
11083 emit_jump (if_false_label);
11084 else if (comp != const0_rtx)
11085 do_jump_for_compare (comp, NULL_RTX, if_false_label);
11088 if (if_false_label)
11089 emit_jump (if_false_label);
11090 if (drop_through_label)
11091 emit_label (drop_through_label);
11094 /* Given an EQ_EXPR expression EXP for values too wide to be compared
11095 with one insn, test the comparison and jump to the appropriate label. */
11097 static void
11098 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
11099 tree exp;
11100 rtx if_false_label, if_true_label;
11102 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11103 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11104 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11105 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11106 int i;
11107 rtx drop_through_label = 0;
11109 if (! if_false_label)
11110 drop_through_label = if_false_label = gen_label_rtx ();
11112 for (i = 0; i < nwords; i++)
11114 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
11115 operand_subword_force (op1, i, mode),
11116 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
11117 word_mode, NULL_RTX, 0);
11118 if (comp == const_true_rtx)
11119 emit_jump (if_false_label);
11120 else if (comp != const0_rtx)
11121 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11124 if (if_true_label)
11125 emit_jump (if_true_label);
11126 if (drop_through_label)
11127 emit_label (drop_through_label);
11130 /* Jump according to whether OP0 is 0.
11131 We assume that OP0 has an integer mode that is too wide
11132 for the available compare insns. */
11134 void
11135 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
11136 rtx op0;
11137 rtx if_false_label, if_true_label;
11139 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11140 rtx part;
11141 int i;
11142 rtx drop_through_label = 0;
11144 /* The fastest way of doing this comparison on almost any machine is to
11145 "or" all the words and compare the result. If all have to be loaded
11146 from memory and this is a very wide item, it's possible this may
11147 be slower, but that's highly unlikely. */
11149 part = gen_reg_rtx (word_mode);
11150 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11151 for (i = 1; i < nwords && part != 0; i++)
11152 part = expand_binop (word_mode, ior_optab, part,
11153 operand_subword_force (op0, i, GET_MODE (op0)),
11154 part, 1, OPTAB_WIDEN);
11156 if (part != 0)
11158 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11159 NULL_RTX, 0);
11161 if (comp == const_true_rtx)
11162 emit_jump (if_false_label);
11163 else if (comp == const0_rtx)
11164 emit_jump (if_true_label);
11165 else
11166 do_jump_for_compare (comp, if_false_label, if_true_label);
11168 return;
11171 /* If we couldn't do the "or" simply, do this with a series of compares. */
11172 if (! if_false_label)
11173 drop_through_label = if_false_label = gen_label_rtx ();
11175 for (i = 0; i < nwords; i++)
11177 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11178 GET_MODE (op0)),
11179 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11180 if (comp == const_true_rtx)
11181 emit_jump (if_false_label);
11182 else if (comp != const0_rtx)
11183 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11186 if (if_true_label)
11187 emit_jump (if_true_label);
11189 if (drop_through_label)
11190 emit_label (drop_through_label);
11193 /* Given a comparison expression in rtl form, output conditional branches to
11194 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11196 static void
11197 do_jump_for_compare (comparison, if_false_label, if_true_label)
11198 rtx comparison, if_false_label, if_true_label;
11200 if (if_true_label)
11202 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11203 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11204 (if_true_label));
11205 else
11206 abort ();
11208 if (if_false_label)
11209 emit_jump (if_false_label);
11211 else if (if_false_label)
11213 rtx first = get_last_insn (), insn, branch;
11214 int br_count;
11216 /* Output the branch with the opposite condition. Then try to invert
11217 what is generated. If more than one insn is a branch, or if the
11218 branch is not the last insn written, abort. If we can't invert
11219 the branch, emit make a true label, redirect this jump to that,
11220 emit a jump to the false label and define the true label. */
11221 /* ??? Note that we wouldn't have to do any of this nonsense if
11222 we passed both labels into a combined compare-and-branch.
11223 Ah well, jump threading does a good job of repairing the damage. */
11225 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11226 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11227 (if_false_label));
11228 else
11229 abort ();
11231 /* Here we get the first insn that was just emitted. It used to be the
11232 case that, on some machines, emitting the branch would discard
11233 the previous compare insn and emit a replacement. This isn't
11234 done anymore, but abort if we see that FIRST is deleted. */
11236 if (first == 0)
11237 first = get_insns ();
11238 else if (INSN_DELETED_P (first))
11239 abort ();
11240 else
11241 first = NEXT_INSN (first);
11243 /* Look for multiple branches in this sequence, as might be generated
11244 for a multi-word integer comparison. */
11246 br_count = 0;
11247 branch = NULL_RTX;
11248 for (insn = first; insn ; insn = NEXT_INSN (insn))
11249 if (GET_CODE (insn) == JUMP_INSN)
11251 branch = insn;
11252 br_count += 1;
11255 /* If we've got one branch at the end of the sequence,
11256 we can try to reverse it. */
11258 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11260 rtx insn_label;
11261 insn_label = XEXP (condjump_label (branch), 0);
11262 JUMP_LABEL (branch) = insn_label;
11264 if (insn_label != if_false_label)
11265 abort ();
11267 if (invert_jump (branch, if_false_label))
11268 return;
11271 /* Multiple branches, or reversion failed. Convert to branches
11272 around an unconditional jump. */
11274 if_true_label = gen_label_rtx ();
11275 for (insn = first; insn; insn = NEXT_INSN (insn))
11276 if (GET_CODE (insn) == JUMP_INSN)
11278 rtx insn_label;
11279 insn_label = XEXP (condjump_label (insn), 0);
11280 JUMP_LABEL (insn) = insn_label;
11282 if (insn_label == if_false_label)
11283 redirect_jump (insn, if_true_label);
11285 emit_jump (if_false_label);
11286 emit_label (if_true_label);
11290 /* Generate code for a comparison expression EXP
11291 (including code to compute the values to be compared)
11292 and set (CC0) according to the result.
11293 SIGNED_CODE should be the rtx operation for this comparison for
11294 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11296 We force a stack adjustment unless there are currently
11297 things pushed on the stack that aren't yet used. */
11299 static rtx
11300 compare (exp, signed_code, unsigned_code)
11301 register tree exp;
11302 enum rtx_code signed_code, unsigned_code;
11304 register rtx op0, op1;
11305 register tree type;
11306 register enum machine_mode mode;
11307 int unsignedp;
11308 enum rtx_code code;
11310 /* Don't crash if the comparison was erroneous. */
11311 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11312 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
11313 return op0;
11315 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11316 type = TREE_TYPE (TREE_OPERAND (exp, 0));
11317 mode = TYPE_MODE (type);
11318 unsignedp = TREE_UNSIGNED (type);
11319 code = unsignedp ? unsigned_code : signed_code;
11321 #ifdef HAVE_canonicalize_funcptr_for_compare
11322 /* If function pointers need to be "canonicalized" before they can
11323 be reliably compared, then canonicalize them. */
11324 if (HAVE_canonicalize_funcptr_for_compare
11325 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11326 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11327 == FUNCTION_TYPE))
11329 rtx new_op0 = gen_reg_rtx (mode);
11331 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11332 op0 = new_op0;
11335 if (HAVE_canonicalize_funcptr_for_compare
11336 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11337 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11338 == FUNCTION_TYPE))
11340 rtx new_op1 = gen_reg_rtx (mode);
11342 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11343 op1 = new_op1;
11345 #endif
11347 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11348 ((mode == BLKmode)
11349 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11350 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11353 /* Like compare but expects the values to compare as two rtx's.
11354 The decision as to signed or unsigned comparison must be made by the caller.
11356 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11357 compared.
11359 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11360 size of MODE should be used. */
11363 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11364 register rtx op0, op1;
11365 enum rtx_code code;
11366 int unsignedp;
11367 enum machine_mode mode;
11368 rtx size;
11369 int align;
11371 rtx tem;
11373 /* If one operand is constant, make it the second one. Only do this
11374 if the other operand is not constant as well. */
11376 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11377 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11379 tem = op0;
11380 op0 = op1;
11381 op1 = tem;
11382 code = swap_condition (code);
11385 if (flag_force_mem)
11387 op0 = force_not_mem (op0);
11388 op1 = force_not_mem (op1);
11391 do_pending_stack_adjust ();
11393 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11394 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11395 return tem;
11397 #if 0
11398 /* There's no need to do this now that combine.c can eliminate lots of
11399 sign extensions. This can be less efficient in certain cases on other
11400 machines. */
11402 /* If this is a signed equality comparison, we can do it as an
11403 unsigned comparison since zero-extension is cheaper than sign
11404 extension and comparisons with zero are done as unsigned. This is
11405 the case even on machines that can do fast sign extension, since
11406 zero-extension is easier to combine with other operations than
11407 sign-extension is. If we are comparing against a constant, we must
11408 convert it to what it would look like unsigned. */
11409 if ((code == EQ || code == NE) && ! unsignedp
11410 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11412 if (GET_CODE (op1) == CONST_INT
11413 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11414 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11415 unsignedp = 1;
11417 #endif
11419 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11421 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11424 /* Generate code to calculate EXP using a store-flag instruction
11425 and return an rtx for the result. EXP is either a comparison
11426 or a TRUTH_NOT_EXPR whose operand is a comparison.
11428 If TARGET is nonzero, store the result there if convenient.
11430 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11431 cheap.
11433 Return zero if there is no suitable set-flag instruction
11434 available on this machine.
11436 Once expand_expr has been called on the arguments of the comparison,
11437 we are committed to doing the store flag, since it is not safe to
11438 re-evaluate the expression. We emit the store-flag insn by calling
11439 emit_store_flag, but only expand the arguments if we have a reason
11440 to believe that emit_store_flag will be successful. If we think that
11441 it will, but it isn't, we have to simulate the store-flag with a
11442 set/jump/set sequence. */
11444 static rtx
11445 do_store_flag (exp, target, mode, only_cheap)
11446 tree exp;
11447 rtx target;
11448 enum machine_mode mode;
11449 int only_cheap;
11451 enum rtx_code code;
11452 tree arg0, arg1, type;
11453 tree tem;
11454 enum machine_mode operand_mode;
11455 int invert = 0;
11456 int unsignedp;
11457 rtx op0, op1;
11458 enum insn_code icode;
11459 rtx subtarget = target;
11460 rtx result, label;
11462 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11463 result at the end. We can't simply invert the test since it would
11464 have already been inverted if it were valid. This case occurs for
11465 some floating-point comparisons. */
11467 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11468 invert = 1, exp = TREE_OPERAND (exp, 0);
11470 arg0 = TREE_OPERAND (exp, 0);
11471 arg1 = TREE_OPERAND (exp, 1);
11472 type = TREE_TYPE (arg0);
11473 operand_mode = TYPE_MODE (type);
11474 unsignedp = TREE_UNSIGNED (type);
11476 /* We won't bother with BLKmode store-flag operations because it would mean
11477 passing a lot of information to emit_store_flag. */
11478 if (operand_mode == BLKmode)
11479 return 0;
11481 /* We won't bother with store-flag operations involving function pointers
11482 when function pointers must be canonicalized before comparisons. */
11483 #ifdef HAVE_canonicalize_funcptr_for_compare
11484 if (HAVE_canonicalize_funcptr_for_compare
11485 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11486 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11487 == FUNCTION_TYPE))
11488 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11489 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11490 == FUNCTION_TYPE))))
11491 return 0;
11492 #endif
11494 STRIP_NOPS (arg0);
11495 STRIP_NOPS (arg1);
11497 /* Get the rtx comparison code to use. We know that EXP is a comparison
11498 operation of some type. Some comparisons against 1 and -1 can be
11499 converted to comparisons with zero. Do so here so that the tests
11500 below will be aware that we have a comparison with zero. These
11501 tests will not catch constants in the first operand, but constants
11502 are rarely passed as the first operand. */
11504 switch (TREE_CODE (exp))
11506 case EQ_EXPR:
11507 code = EQ;
11508 break;
11509 case NE_EXPR:
11510 code = NE;
11511 break;
11512 case LT_EXPR:
11513 if (integer_onep (arg1))
11514 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11515 else
11516 code = unsignedp ? LTU : LT;
11517 break;
11518 case LE_EXPR:
11519 if (! unsignedp && integer_all_onesp (arg1))
11520 arg1 = integer_zero_node, code = LT;
11521 else
11522 code = unsignedp ? LEU : LE;
11523 break;
11524 case GT_EXPR:
11525 if (! unsignedp && integer_all_onesp (arg1))
11526 arg1 = integer_zero_node, code = GE;
11527 else
11528 code = unsignedp ? GTU : GT;
11529 break;
11530 case GE_EXPR:
11531 if (integer_onep (arg1))
11532 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11533 else
11534 code = unsignedp ? GEU : GE;
11535 break;
11536 default:
11537 abort ();
11540 /* Put a constant second. */
11541 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11543 tem = arg0; arg0 = arg1; arg1 = tem;
11544 code = swap_condition (code);
11547 /* If this is an equality or inequality test of a single bit, we can
11548 do this by shifting the bit being tested to the low-order bit and
11549 masking the result with the constant 1. If the condition was EQ,
11550 we xor it with 1. This does not require an scc insn and is faster
11551 than an scc insn even if we have it. */
11553 if ((code == NE || code == EQ)
11554 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11555 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11557 tree inner = TREE_OPERAND (arg0, 0);
11558 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11559 int ops_unsignedp;
11561 /* If INNER is a right shift of a constant and it plus BITNUM does
11562 not overflow, adjust BITNUM and INNER. */
11564 if (TREE_CODE (inner) == RSHIFT_EXPR
11565 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11566 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11567 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11568 < TYPE_PRECISION (type)))
11570 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11571 inner = TREE_OPERAND (inner, 0);
11574 /* If we are going to be able to omit the AND below, we must do our
11575 operations as unsigned. If we must use the AND, we have a choice.
11576 Normally unsigned is faster, but for some machines signed is. */
11577 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11578 #ifdef LOAD_EXTEND_OP
11579 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11580 #else
11582 #endif
11585 if (subtarget == 0 || GET_CODE (subtarget) != REG
11586 || GET_MODE (subtarget) != operand_mode
11587 || ! safe_from_p (subtarget, inner, 1))
11588 subtarget = 0;
11590 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11592 if (bitnum != 0)
11593 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11594 size_int (bitnum), subtarget, ops_unsignedp);
11596 if (GET_MODE (op0) != mode)
11597 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11599 if ((code == EQ && ! invert) || (code == NE && invert))
11600 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11601 ops_unsignedp, OPTAB_LIB_WIDEN);
11603 /* Put the AND last so it can combine with more things. */
11604 if (bitnum != TYPE_PRECISION (type) - 1)
11605 op0 = expand_and (op0, const1_rtx, subtarget);
11607 return op0;
11610 /* Now see if we are likely to be able to do this. Return if not. */
11611 if (! can_compare_p (operand_mode))
11612 return 0;
11613 icode = setcc_gen_code[(int) code];
11614 if (icode == CODE_FOR_nothing
11615 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11617 /* We can only do this if it is one of the special cases that
11618 can be handled without an scc insn. */
11619 if ((code == LT && integer_zerop (arg1))
11620 || (! only_cheap && code == GE && integer_zerop (arg1)))
11622 else if (BRANCH_COST >= 0
11623 && ! only_cheap && (code == NE || code == EQ)
11624 && TREE_CODE (type) != REAL_TYPE
11625 && ((abs_optab->handlers[(int) operand_mode].insn_code
11626 != CODE_FOR_nothing)
11627 || (ffs_optab->handlers[(int) operand_mode].insn_code
11628 != CODE_FOR_nothing)))
11630 else
11631 return 0;
11634 preexpand_calls (exp);
11635 if (subtarget == 0 || GET_CODE (subtarget) != REG
11636 || GET_MODE (subtarget) != operand_mode
11637 || ! safe_from_p (subtarget, arg1, 1))
11638 subtarget = 0;
11640 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11641 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11643 if (target == 0)
11644 target = gen_reg_rtx (mode);
11646 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11647 because, if the emit_store_flag does anything it will succeed and
11648 OP0 and OP1 will not be used subsequently. */
11650 result = emit_store_flag (target, code,
11651 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11652 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11653 operand_mode, unsignedp, 1);
11655 if (result)
11657 if (invert)
11658 result = expand_binop (mode, xor_optab, result, const1_rtx,
11659 result, 0, OPTAB_LIB_WIDEN);
11660 return result;
11663 /* If this failed, we have to do this with set/compare/jump/set code. */
11664 if (GET_CODE (target) != REG
11665 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11666 target = gen_reg_rtx (GET_MODE (target));
11668 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11669 result = compare_from_rtx (op0, op1, code, unsignedp,
11670 operand_mode, NULL_RTX, 0);
11671 if (GET_CODE (result) == CONST_INT)
11672 return (((result == const0_rtx && ! invert)
11673 || (result != const0_rtx && invert))
11674 ? const0_rtx : const1_rtx);
11676 label = gen_label_rtx ();
11677 if (bcc_gen_fctn[(int) code] == 0)
11678 abort ();
11680 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11681 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11682 emit_label (label);
11684 return target;
11687 /* Generate a tablejump instruction (used for switch statements). */
11689 #ifdef HAVE_tablejump
11691 /* INDEX is the value being switched on, with the lowest value
11692 in the table already subtracted.
11693 MODE is its expected mode (needed if INDEX is constant).
11694 RANGE is the length of the jump table.
11695 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11697 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11698 index value is out of range. */
11700 void
11701 do_tablejump (index, mode, range, table_label, default_label)
11702 rtx index, range, table_label, default_label;
11703 enum machine_mode mode;
11705 register rtx temp, vector;
11707 /* Do an unsigned comparison (in the proper mode) between the index
11708 expression and the value which represents the length of the range.
11709 Since we just finished subtracting the lower bound of the range
11710 from the index expression, this comparison allows us to simultaneously
11711 check that the original index expression value is both greater than
11712 or equal to the minimum value of the range and less than or equal to
11713 the maximum value of the range. */
11715 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11716 0, default_label);
11718 /* If index is in range, it must fit in Pmode.
11719 Convert to Pmode so we can index with it. */
11720 if (mode != Pmode)
11721 index = convert_to_mode (Pmode, index, 1);
11723 /* Don't let a MEM slip thru, because then INDEX that comes
11724 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11725 and break_out_memory_refs will go to work on it and mess it up. */
11726 #ifdef PIC_CASE_VECTOR_ADDRESS
11727 if (flag_pic && GET_CODE (index) != REG)
11728 index = copy_to_mode_reg (Pmode, index);
11729 #endif
11731 /* If flag_force_addr were to affect this address
11732 it could interfere with the tricky assumptions made
11733 about addresses that contain label-refs,
11734 which may be valid only very near the tablejump itself. */
11735 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11736 GET_MODE_SIZE, because this indicates how large insns are. The other
11737 uses should all be Pmode, because they are addresses. This code
11738 could fail if addresses and insns are not the same size. */
11739 index = gen_rtx_PLUS (Pmode,
11740 gen_rtx_MULT (Pmode, index,
11741 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11742 gen_rtx_LABEL_REF (Pmode, table_label));
11743 #ifdef PIC_CASE_VECTOR_ADDRESS
11744 if (flag_pic)
11745 index = PIC_CASE_VECTOR_ADDRESS (index);
11746 else
11747 #endif
11748 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11749 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11750 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11751 RTX_UNCHANGING_P (vector) = 1;
11752 convert_move (temp, vector, 0);
11754 emit_jump_insn (gen_tablejump (temp, table_label));
11756 /* If we are generating PIC code or if the table is PC-relative, the
11757 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11758 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11759 emit_barrier ();
11762 #endif /* HAVE_tablejump */