* config/i370/xm-mvs.h, config/i370/xm-oe.h,
[official-gcc.git] / gcc / expr.c
blobff3f5bf354d7ad489527290bf7db1d828d48b0a3
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "intl.h"
45 #include "tm_p.h"
47 #ifndef ACCUMULATE_OUTGOING_ARGS
48 #define ACCUMULATE_OUTGOING_ARGS 0
49 #endif
51 /* Supply a default definition for PUSH_ARGS. */
52 #ifndef PUSH_ARGS
53 #ifdef PUSH_ROUNDING
54 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
55 #else
56 #define PUSH_ARGS 0
57 #endif
58 #endif
60 /* Decide whether a function's arguments should be processed
61 from first to last or from last to first.
63 They should if the stack and args grow in opposite directions, but
64 only if we have push insns. */
66 #ifdef PUSH_ROUNDING
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED /* If it's last to first. */
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
82 /* Assume that case vectors are not pc-relative. */
83 #ifndef CASE_VECTOR_PC_RELATIVE
84 #define CASE_VECTOR_PC_RELATIVE 0
85 #endif
87 /* Hook called by safe_from_p for language-specific tree codes. It is
88 up to the language front-end to install a hook if it has any such
89 codes that safe_from_p needs to know about. Since same_from_p will
90 recursively explore the TREE_OPERANDs of an expression, this hook
91 should not reexamine those pieces. This routine may recursively
92 call safe_from_p; it should always pass `0' as the TOP_P
93 parameter. */
94 int (*lang_safe_from_p) PARAMS ((rtx, tree));
96 /* If this is nonzero, we do not bother generating VOLATILE
97 around volatile memory references, and we are willing to
98 output indirect addresses. If cse is to follow, we reject
99 indirect addresses so a useful potential cse is generated;
100 if it is used only once, instruction combination will produce
101 the same indirect address eventually. */
102 int cse_not_expected;
104 /* Don't check memory usage, since code is being emitted to check a memory
105 usage. Used when current_function_check_memory_usage is true, to avoid
106 infinite recursion. */
107 static int in_check_memory_usage;
109 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
110 static tree placeholder_list = 0;
112 /* This structure is used by move_by_pieces to describe the move to
113 be performed. */
114 struct move_by_pieces
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 rtx from;
121 rtx from_addr;
122 int autinc_from;
123 int explicit_inc_from;
124 unsigned HOST_WIDE_INT len;
125 HOST_WIDE_INT offset;
126 int reverse;
129 /* This structure is used by store_by_pieces to describe the clear to
130 be performed. */
132 struct store_by_pieces
134 rtx to;
135 rtx to_addr;
136 int autinc_to;
137 int explicit_inc_to;
138 unsigned HOST_WIDE_INT len;
139 HOST_WIDE_INT offset;
140 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
141 PTR constfundata;
142 int reverse;
145 extern struct obstack permanent_obstack;
147 static rtx get_push_address PARAMS ((int));
149 static rtx enqueue_insn PARAMS ((rtx, rtx));
150 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
151 PARAMS ((unsigned HOST_WIDE_INT,
152 unsigned int));
153 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
154 struct move_by_pieces *));
155 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
156 enum machine_mode));
157 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
158 unsigned int));
159 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
160 unsigned int));
161 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
162 enum machine_mode,
163 struct store_by_pieces *));
164 static rtx get_subtarget PARAMS ((rtx));
165 static int is_zeros_p PARAMS ((tree));
166 static int mostly_zeros_p PARAMS ((tree));
167 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
168 HOST_WIDE_INT, enum machine_mode,
169 tree, tree, unsigned int, int,
170 int));
171 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
172 HOST_WIDE_INT));
173 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
174 HOST_WIDE_INT, enum machine_mode,
175 tree, enum machine_mode, int,
176 unsigned int, HOST_WIDE_INT, int));
177 static enum memory_use_mode
178 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
179 static tree save_noncopied_parts PARAMS ((tree, tree));
180 static tree init_noncopied_parts PARAMS ((tree, tree));
181 static int fixed_type_p PARAMS ((tree));
182 static rtx var_rtx PARAMS ((tree));
183 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
184 static rtx expand_increment PARAMS ((tree, int, int));
185 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
186 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
187 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
188 rtx, rtx));
189 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
191 /* Record for each mode whether we can move a register directly to or
192 from an object of that mode in memory. If we can't, we won't try
193 to use that mode directly when accessing a field of that mode. */
195 static char direct_load[NUM_MACHINE_MODES];
196 static char direct_store[NUM_MACHINE_MODES];
198 /* If a memory-to-memory move would take MOVE_RATIO or more simple
199 move-instruction sequences, we will do a movstr or libcall instead. */
201 #ifndef MOVE_RATIO
202 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
203 #define MOVE_RATIO 2
204 #else
205 /* If we are optimizing for space (-Os), cut down the default move ratio. */
206 #define MOVE_RATIO (optimize_size ? 3 : 15)
207 #endif
208 #endif
210 /* This macro is used to determine whether move_by_pieces should be called
211 to perform a structure copy. */
212 #ifndef MOVE_BY_PIECES_P
213 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
214 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
215 #endif
217 /* This array records the insn_code of insns to perform block moves. */
218 enum insn_code movstr_optab[NUM_MACHINE_MODES];
220 /* This array records the insn_code of insns to perform block clears. */
221 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
223 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
225 #ifndef SLOW_UNALIGNED_ACCESS
226 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
227 #endif
229 /* This is run once per compilation to set up which modes can be used
230 directly in memory and to initialize the block move optab. */
232 void
233 init_expr_once ()
235 rtx insn, pat;
236 enum machine_mode mode;
237 int num_clobbers;
238 rtx mem, mem1;
240 start_sequence ();
242 /* Try indexing by frame ptr and try by stack ptr.
243 It is known that on the Convex the stack ptr isn't a valid index.
244 With luck, one or the other is valid on any machine. */
245 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
246 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
248 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
249 pat = PATTERN (insn);
251 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
252 mode = (enum machine_mode) ((int) mode + 1))
254 int regno;
255 rtx reg;
257 direct_load[(int) mode] = direct_store[(int) mode] = 0;
258 PUT_MODE (mem, mode);
259 PUT_MODE (mem1, mode);
261 /* See if there is some register that can be used in this mode and
262 directly loaded or stored from memory. */
264 if (mode != VOIDmode && mode != BLKmode)
265 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
266 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
267 regno++)
269 if (! HARD_REGNO_MODE_OK (regno, mode))
270 continue;
272 reg = gen_rtx_REG (mode, regno);
274 SET_SRC (pat) = mem;
275 SET_DEST (pat) = reg;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_load[(int) mode] = 1;
279 SET_SRC (pat) = mem1;
280 SET_DEST (pat) = reg;
281 if (recog (pat, insn, &num_clobbers) >= 0)
282 direct_load[(int) mode] = 1;
284 SET_SRC (pat) = reg;
285 SET_DEST (pat) = mem;
286 if (recog (pat, insn, &num_clobbers) >= 0)
287 direct_store[(int) mode] = 1;
289 SET_SRC (pat) = reg;
290 SET_DEST (pat) = mem1;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_store[(int) mode] = 1;
296 end_sequence ();
299 /* This is run at the start of compiling a function. */
301 void
302 init_expr ()
304 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
306 pending_chain = 0;
307 pending_stack_adjust = 0;
308 stack_pointer_delta = 0;
309 inhibit_defer_pop = 0;
310 saveregs_value = 0;
311 apply_args_value = 0;
312 forced_labels = 0;
315 void
316 mark_expr_status (p)
317 struct expr_status *p;
319 if (p == NULL)
320 return;
322 ggc_mark_rtx (p->x_saveregs_value);
323 ggc_mark_rtx (p->x_apply_args_value);
324 ggc_mark_rtx (p->x_forced_labels);
327 void
328 free_expr_status (f)
329 struct function *f;
331 free (f->expr);
332 f->expr = NULL;
335 /* Small sanity check that the queue is empty at the end of a function. */
337 void
338 finish_expr_for_function ()
340 if (pending_chain)
341 abort ();
344 /* Manage the queue of increment instructions to be output
345 for POSTINCREMENT_EXPR expressions, etc. */
347 /* Queue up to increment (or change) VAR later. BODY says how:
348 BODY should be the same thing you would pass to emit_insn
349 to increment right away. It will go to emit_insn later on.
351 The value is a QUEUED expression to be used in place of VAR
352 where you want to guarantee the pre-incrementation value of VAR. */
354 static rtx
355 enqueue_insn (var, body)
356 rtx var, body;
358 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
359 body, pending_chain);
360 return pending_chain;
363 /* Use protect_from_queue to convert a QUEUED expression
364 into something that you can put immediately into an instruction.
365 If the queued incrementation has not happened yet,
366 protect_from_queue returns the variable itself.
367 If the incrementation has happened, protect_from_queue returns a temp
368 that contains a copy of the old value of the variable.
370 Any time an rtx which might possibly be a QUEUED is to be put
371 into an instruction, it must be passed through protect_from_queue first.
372 QUEUED expressions are not meaningful in instructions.
374 Do not pass a value through protect_from_queue and then hold
375 on to it for a while before putting it in an instruction!
376 If the queue is flushed in between, incorrect code will result. */
379 protect_from_queue (x, modify)
380 register rtx x;
381 int modify;
383 register RTX_CODE code = GET_CODE (x);
385 #if 0 /* A QUEUED can hang around after the queue is forced out. */
386 /* Shortcut for most common case. */
387 if (pending_chain == 0)
388 return x;
389 #endif
391 if (code != QUEUED)
393 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
394 use of autoincrement. Make a copy of the contents of the memory
395 location rather than a copy of the address, but not if the value is
396 of mode BLKmode. Don't modify X in place since it might be
397 shared. */
398 if (code == MEM && GET_MODE (x) != BLKmode
399 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
401 register rtx y = XEXP (x, 0);
402 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
404 MEM_COPY_ATTRIBUTES (new, x);
406 if (QUEUED_INSN (y))
408 register rtx temp = gen_reg_rtx (GET_MODE (new));
409 emit_insn_before (gen_move_insn (temp, new),
410 QUEUED_INSN (y));
411 return temp;
413 return new;
415 /* Otherwise, recursively protect the subexpressions of all
416 the kinds of rtx's that can contain a QUEUED. */
417 if (code == MEM)
419 rtx tem = protect_from_queue (XEXP (x, 0), 0);
420 if (tem != XEXP (x, 0))
422 x = copy_rtx (x);
423 XEXP (x, 0) = tem;
426 else if (code == PLUS || code == MULT)
428 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
429 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
430 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
432 x = copy_rtx (x);
433 XEXP (x, 0) = new0;
434 XEXP (x, 1) = new1;
437 return x;
439 /* If the increment has not happened, use the variable itself. */
440 if (QUEUED_INSN (x) == 0)
441 return QUEUED_VAR (x);
442 /* If the increment has happened and a pre-increment copy exists,
443 use that copy. */
444 if (QUEUED_COPY (x) != 0)
445 return QUEUED_COPY (x);
446 /* The increment has happened but we haven't set up a pre-increment copy.
447 Set one up now, and use it. */
448 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
449 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
450 QUEUED_INSN (x));
451 return QUEUED_COPY (x);
454 /* Return nonzero if X contains a QUEUED expression:
455 if it contains anything that will be altered by a queued increment.
456 We handle only combinations of MEM, PLUS, MINUS and MULT operators
457 since memory addresses generally contain only those. */
460 queued_subexp_p (x)
461 rtx x;
463 register enum rtx_code code = GET_CODE (x);
464 switch (code)
466 case QUEUED:
467 return 1;
468 case MEM:
469 return queued_subexp_p (XEXP (x, 0));
470 case MULT:
471 case PLUS:
472 case MINUS:
473 return (queued_subexp_p (XEXP (x, 0))
474 || queued_subexp_p (XEXP (x, 1)));
475 default:
476 return 0;
480 /* Perform all the pending incrementations. */
482 void
483 emit_queue ()
485 register rtx p;
486 while ((p = pending_chain))
488 rtx body = QUEUED_BODY (p);
490 if (GET_CODE (body) == SEQUENCE)
492 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
493 emit_insn (QUEUED_BODY (p));
495 else
496 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
497 pending_chain = QUEUED_NEXT (p);
501 /* Copy data from FROM to TO, where the machine modes are not the same.
502 Both modes may be integer, or both may be floating.
503 UNSIGNEDP should be nonzero if FROM is an unsigned type.
504 This causes zero-extension instead of sign-extension. */
506 void
507 convert_move (to, from, unsignedp)
508 register rtx to, from;
509 int unsignedp;
511 enum machine_mode to_mode = GET_MODE (to);
512 enum machine_mode from_mode = GET_MODE (from);
513 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
514 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
515 enum insn_code code;
516 rtx libcall;
518 /* rtx code for making an equivalent value. */
519 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
521 to = protect_from_queue (to, 1);
522 from = protect_from_queue (from, 0);
524 if (to_real != from_real)
525 abort ();
527 /* If FROM is a SUBREG that indicates that we have already done at least
528 the required extension, strip it. We don't handle such SUBREGs as
529 TO here. */
531 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
532 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
533 >= GET_MODE_SIZE (to_mode))
534 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
535 from = gen_lowpart (to_mode, from), from_mode = to_mode;
537 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
538 abort ();
540 if (to_mode == from_mode
541 || (from_mode == VOIDmode && CONSTANT_P (from)))
543 emit_move_insn (to, from);
544 return;
547 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
549 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
550 abort ();
552 if (VECTOR_MODE_P (to_mode))
553 from = gen_rtx_SUBREG (to_mode, from, 0);
554 else
555 to = gen_rtx_SUBREG (from_mode, to, 0);
557 emit_move_insn (to, from);
558 return;
561 if (to_real != from_real)
562 abort ();
564 if (to_real)
566 rtx value, insns;
568 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
570 /* Try converting directly if the insn is supported. */
571 if ((code = can_extend_p (to_mode, from_mode, 0))
572 != CODE_FOR_nothing)
574 emit_unop_insn (code, to, from, UNKNOWN);
575 return;
579 #ifdef HAVE_trunchfqf2
580 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
583 return;
585 #endif
586 #ifdef HAVE_trunctqfqf2
587 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
590 return;
592 #endif
593 #ifdef HAVE_truncsfqf2
594 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
597 return;
599 #endif
600 #ifdef HAVE_truncdfqf2
601 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
604 return;
606 #endif
607 #ifdef HAVE_truncxfqf2
608 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
610 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
611 return;
613 #endif
614 #ifdef HAVE_trunctfqf2
615 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
617 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
618 return;
620 #endif
622 #ifdef HAVE_trunctqfhf2
623 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
625 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
626 return;
628 #endif
629 #ifdef HAVE_truncsfhf2
630 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
632 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
633 return;
635 #endif
636 #ifdef HAVE_truncdfhf2
637 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
639 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
640 return;
642 #endif
643 #ifdef HAVE_truncxfhf2
644 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
646 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
647 return;
649 #endif
650 #ifdef HAVE_trunctfhf2
651 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
653 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
654 return;
656 #endif
658 #ifdef HAVE_truncsftqf2
659 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
661 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
662 return;
664 #endif
665 #ifdef HAVE_truncdftqf2
666 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
668 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
669 return;
671 #endif
672 #ifdef HAVE_truncxftqf2
673 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
675 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
676 return;
678 #endif
679 #ifdef HAVE_trunctftqf2
680 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
682 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
683 return;
685 #endif
687 #ifdef HAVE_truncdfsf2
688 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
690 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
691 return;
693 #endif
694 #ifdef HAVE_truncxfsf2
695 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
697 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
698 return;
700 #endif
701 #ifdef HAVE_trunctfsf2
702 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
704 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
705 return;
707 #endif
708 #ifdef HAVE_truncxfdf2
709 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
711 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
712 return;
714 #endif
715 #ifdef HAVE_trunctfdf2
716 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
718 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
719 return;
721 #endif
723 libcall = (rtx) 0;
724 switch (from_mode)
726 case SFmode:
727 switch (to_mode)
729 case DFmode:
730 libcall = extendsfdf2_libfunc;
731 break;
733 case XFmode:
734 libcall = extendsfxf2_libfunc;
735 break;
737 case TFmode:
738 libcall = extendsftf2_libfunc;
739 break;
741 default:
742 break;
744 break;
746 case DFmode:
747 switch (to_mode)
749 case SFmode:
750 libcall = truncdfsf2_libfunc;
751 break;
753 case XFmode:
754 libcall = extenddfxf2_libfunc;
755 break;
757 case TFmode:
758 libcall = extenddftf2_libfunc;
759 break;
761 default:
762 break;
764 break;
766 case XFmode:
767 switch (to_mode)
769 case SFmode:
770 libcall = truncxfsf2_libfunc;
771 break;
773 case DFmode:
774 libcall = truncxfdf2_libfunc;
775 break;
777 default:
778 break;
780 break;
782 case TFmode:
783 switch (to_mode)
785 case SFmode:
786 libcall = trunctfsf2_libfunc;
787 break;
789 case DFmode:
790 libcall = trunctfdf2_libfunc;
791 break;
793 default:
794 break;
796 break;
798 default:
799 break;
802 if (libcall == (rtx) 0)
803 /* This conversion is not implemented yet. */
804 abort ();
806 start_sequence ();
807 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
808 1, from, from_mode);
809 insns = get_insns ();
810 end_sequence ();
811 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
812 from));
813 return;
816 /* Now both modes are integers. */
818 /* Handle expanding beyond a word. */
819 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
820 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
822 rtx insns;
823 rtx lowpart;
824 rtx fill_value;
825 rtx lowfrom;
826 int i;
827 enum machine_mode lowpart_mode;
828 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
830 /* Try converting directly if the insn is supported. */
831 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
832 != CODE_FOR_nothing)
834 /* If FROM is a SUBREG, put it into a register. Do this
835 so that we always generate the same set of insns for
836 better cse'ing; if an intermediate assignment occurred,
837 we won't be doing the operation directly on the SUBREG. */
838 if (optimize > 0 && GET_CODE (from) == SUBREG)
839 from = force_reg (from_mode, from);
840 emit_unop_insn (code, to, from, equiv_code);
841 return;
843 /* Next, try converting via full word. */
844 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
845 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
846 != CODE_FOR_nothing))
848 if (GET_CODE (to) == REG)
849 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
850 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
851 emit_unop_insn (code, to,
852 gen_lowpart (word_mode, to), equiv_code);
853 return;
856 /* No special multiword conversion insn; do it by hand. */
857 start_sequence ();
859 /* Since we will turn this into a no conflict block, we must ensure
860 that the source does not overlap the target. */
862 if (reg_overlap_mentioned_p (to, from))
863 from = force_reg (from_mode, from);
865 /* Get a copy of FROM widened to a word, if necessary. */
866 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
867 lowpart_mode = word_mode;
868 else
869 lowpart_mode = from_mode;
871 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
873 lowpart = gen_lowpart (lowpart_mode, to);
874 emit_move_insn (lowpart, lowfrom);
876 /* Compute the value to put in each remaining word. */
877 if (unsignedp)
878 fill_value = const0_rtx;
879 else
881 #ifdef HAVE_slt
882 if (HAVE_slt
883 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
884 && STORE_FLAG_VALUE == -1)
886 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
887 lowpart_mode, 0, 0);
888 fill_value = gen_reg_rtx (word_mode);
889 emit_insn (gen_slt (fill_value));
891 else
892 #endif
894 fill_value
895 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
896 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
897 NULL_RTX, 0);
898 fill_value = convert_to_mode (word_mode, fill_value, 1);
902 /* Fill the remaining words. */
903 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
905 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
906 rtx subword = operand_subword (to, index, 1, to_mode);
908 if (subword == 0)
909 abort ();
911 if (fill_value != subword)
912 emit_move_insn (subword, fill_value);
915 insns = get_insns ();
916 end_sequence ();
918 emit_no_conflict_block (insns, to, from, NULL_RTX,
919 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
920 return;
923 /* Truncating multi-word to a word or less. */
924 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
925 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
927 if (!((GET_CODE (from) == MEM
928 && ! MEM_VOLATILE_P (from)
929 && direct_load[(int) to_mode]
930 && ! mode_dependent_address_p (XEXP (from, 0)))
931 || GET_CODE (from) == REG
932 || GET_CODE (from) == SUBREG))
933 from = force_reg (from_mode, from);
934 convert_move (to, gen_lowpart (word_mode, from), 0);
935 return;
938 /* Handle pointer conversion. */ /* SPEE 900220. */
939 if (to_mode == PQImode)
941 if (from_mode != QImode)
942 from = convert_to_mode (QImode, from, unsignedp);
944 #ifdef HAVE_truncqipqi2
945 if (HAVE_truncqipqi2)
947 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
948 return;
950 #endif /* HAVE_truncqipqi2 */
951 abort ();
954 if (from_mode == PQImode)
956 if (to_mode != QImode)
958 from = convert_to_mode (QImode, from, unsignedp);
959 from_mode = QImode;
961 else
963 #ifdef HAVE_extendpqiqi2
964 if (HAVE_extendpqiqi2)
966 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
967 return;
969 #endif /* HAVE_extendpqiqi2 */
970 abort ();
974 if (to_mode == PSImode)
976 if (from_mode != SImode)
977 from = convert_to_mode (SImode, from, unsignedp);
979 #ifdef HAVE_truncsipsi2
980 if (HAVE_truncsipsi2)
982 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
983 return;
985 #endif /* HAVE_truncsipsi2 */
986 abort ();
989 if (from_mode == PSImode)
991 if (to_mode != SImode)
993 from = convert_to_mode (SImode, from, unsignedp);
994 from_mode = SImode;
996 else
998 #ifdef HAVE_extendpsisi2
999 if (! unsignedp && HAVE_extendpsisi2)
1001 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1002 return;
1004 #endif /* HAVE_extendpsisi2 */
1005 #ifdef HAVE_zero_extendpsisi2
1006 if (unsignedp && HAVE_zero_extendpsisi2)
1008 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1009 return;
1011 #endif /* HAVE_zero_extendpsisi2 */
1012 abort ();
1016 if (to_mode == PDImode)
1018 if (from_mode != DImode)
1019 from = convert_to_mode (DImode, from, unsignedp);
1021 #ifdef HAVE_truncdipdi2
1022 if (HAVE_truncdipdi2)
1024 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1025 return;
1027 #endif /* HAVE_truncdipdi2 */
1028 abort ();
1031 if (from_mode == PDImode)
1033 if (to_mode != DImode)
1035 from = convert_to_mode (DImode, from, unsignedp);
1036 from_mode = DImode;
1038 else
1040 #ifdef HAVE_extendpdidi2
1041 if (HAVE_extendpdidi2)
1043 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1044 return;
1046 #endif /* HAVE_extendpdidi2 */
1047 abort ();
1051 /* Now follow all the conversions between integers
1052 no more than a word long. */
1054 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1055 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1056 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1057 GET_MODE_BITSIZE (from_mode)))
1059 if (!((GET_CODE (from) == MEM
1060 && ! MEM_VOLATILE_P (from)
1061 && direct_load[(int) to_mode]
1062 && ! mode_dependent_address_p (XEXP (from, 0)))
1063 || GET_CODE (from) == REG
1064 || GET_CODE (from) == SUBREG))
1065 from = force_reg (from_mode, from);
1066 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1067 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1068 from = copy_to_reg (from);
1069 emit_move_insn (to, gen_lowpart (to_mode, from));
1070 return;
1073 /* Handle extension. */
1074 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1076 /* Convert directly if that works. */
1077 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1078 != CODE_FOR_nothing)
1080 emit_unop_insn (code, to, from, equiv_code);
1081 return;
1083 else
1085 enum machine_mode intermediate;
1086 rtx tmp;
1087 tree shift_amount;
1089 /* Search for a mode to convert via. */
1090 for (intermediate = from_mode; intermediate != VOIDmode;
1091 intermediate = GET_MODE_WIDER_MODE (intermediate))
1092 if (((can_extend_p (to_mode, intermediate, unsignedp)
1093 != CODE_FOR_nothing)
1094 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1095 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1096 GET_MODE_BITSIZE (intermediate))))
1097 && (can_extend_p (intermediate, from_mode, unsignedp)
1098 != CODE_FOR_nothing))
1100 convert_move (to, convert_to_mode (intermediate, from,
1101 unsignedp), unsignedp);
1102 return;
1105 /* No suitable intermediate mode.
1106 Generate what we need with shifts. */
1107 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1108 - GET_MODE_BITSIZE (from_mode), 0);
1109 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1110 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1111 to, unsignedp);
1112 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1113 to, unsignedp);
1114 if (tmp != to)
1115 emit_move_insn (to, tmp);
1116 return;
1120 /* Support special truncate insns for certain modes. */
1122 if (from_mode == DImode && to_mode == SImode)
1124 #ifdef HAVE_truncdisi2
1125 if (HAVE_truncdisi2)
1127 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1128 return;
1130 #endif
1131 convert_move (to, force_reg (from_mode, from), unsignedp);
1132 return;
1135 if (from_mode == DImode && to_mode == HImode)
1137 #ifdef HAVE_truncdihi2
1138 if (HAVE_truncdihi2)
1140 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1141 return;
1143 #endif
1144 convert_move (to, force_reg (from_mode, from), unsignedp);
1145 return;
1148 if (from_mode == DImode && to_mode == QImode)
1150 #ifdef HAVE_truncdiqi2
1151 if (HAVE_truncdiqi2)
1153 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1154 return;
1156 #endif
1157 convert_move (to, force_reg (from_mode, from), unsignedp);
1158 return;
1161 if (from_mode == SImode && to_mode == HImode)
1163 #ifdef HAVE_truncsihi2
1164 if (HAVE_truncsihi2)
1166 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1167 return;
1169 #endif
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 return;
1174 if (from_mode == SImode && to_mode == QImode)
1176 #ifdef HAVE_truncsiqi2
1177 if (HAVE_truncsiqi2)
1179 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1180 return;
1182 #endif
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 return;
1187 if (from_mode == HImode && to_mode == QImode)
1189 #ifdef HAVE_trunchiqi2
1190 if (HAVE_trunchiqi2)
1192 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1193 return;
1195 #endif
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 return;
1200 if (from_mode == TImode && to_mode == DImode)
1202 #ifdef HAVE_trunctidi2
1203 if (HAVE_trunctidi2)
1205 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1206 return;
1208 #endif
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 return;
1213 if (from_mode == TImode && to_mode == SImode)
1215 #ifdef HAVE_trunctisi2
1216 if (HAVE_trunctisi2)
1218 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1219 return;
1221 #endif
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1223 return;
1226 if (from_mode == TImode && to_mode == HImode)
1228 #ifdef HAVE_trunctihi2
1229 if (HAVE_trunctihi2)
1231 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1232 return;
1234 #endif
1235 convert_move (to, force_reg (from_mode, from), unsignedp);
1236 return;
1239 if (from_mode == TImode && to_mode == QImode)
1241 #ifdef HAVE_trunctiqi2
1242 if (HAVE_trunctiqi2)
1244 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1245 return;
1247 #endif
1248 convert_move (to, force_reg (from_mode, from), unsignedp);
1249 return;
1252 /* Handle truncation of volatile memrefs, and so on;
1253 the things that couldn't be truncated directly,
1254 and for which there was no special instruction. */
1255 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1257 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1258 emit_move_insn (to, temp);
1259 return;
1262 /* Mode combination is not recognized. */
1263 abort ();
1266 /* Return an rtx for a value that would result
1267 from converting X to mode MODE.
1268 Both X and MODE may be floating, or both integer.
1269 UNSIGNEDP is nonzero if X is an unsigned value.
1270 This can be done by referring to a part of X in place
1271 or by copying to a new temporary with conversion.
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1277 convert_to_mode (mode, x, unsignedp)
1278 enum machine_mode mode;
1279 rtx x;
1280 int unsignedp;
1282 return convert_modes (mode, VOIDmode, x, unsignedp);
1285 /* Return an rtx for a value that would result
1286 from converting X from mode OLDMODE to mode MODE.
1287 Both modes may be floating, or both integer.
1288 UNSIGNEDP is nonzero if X is an unsigned value.
1290 This can be done by referring to a part of X in place
1291 or by copying to a new temporary with conversion.
1293 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1295 This function *must not* call protect_from_queue
1296 except when putting X into an insn (in which case convert_move does it). */
1299 convert_modes (mode, oldmode, x, unsignedp)
1300 enum machine_mode mode, oldmode;
1301 rtx x;
1302 int unsignedp;
1304 register rtx temp;
1306 /* If FROM is a SUBREG that indicates that we have already done at least
1307 the required extension, strip it. */
1309 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1310 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1311 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1312 x = gen_lowpart (mode, x);
1314 if (GET_MODE (x) != VOIDmode)
1315 oldmode = GET_MODE (x);
1317 if (mode == oldmode)
1318 return x;
1320 /* There is one case that we must handle specially: If we are converting
1321 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1322 we are to interpret the constant as unsigned, gen_lowpart will do
1323 the wrong if the constant appears negative. What we want to do is
1324 make the high-order word of the constant zero, not all ones. */
1326 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1327 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1328 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1330 HOST_WIDE_INT val = INTVAL (x);
1332 if (oldmode != VOIDmode
1333 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1335 int width = GET_MODE_BITSIZE (oldmode);
1337 /* We need to zero extend VAL. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1341 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1344 /* We can do this with a gen_lowpart if both desired and current modes
1345 are integer, and this is either a constant integer, a register, or a
1346 non-volatile MEM. Except for the constant case where MODE is no
1347 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1349 if ((GET_CODE (x) == CONST_INT
1350 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1351 || (GET_MODE_CLASS (mode) == MODE_INT
1352 && GET_MODE_CLASS (oldmode) == MODE_INT
1353 && (GET_CODE (x) == CONST_DOUBLE
1354 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1355 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1356 && direct_load[(int) mode])
1357 || (GET_CODE (x) == REG
1358 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1359 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1361 /* ?? If we don't know OLDMODE, we have to assume here that
1362 X does not need sign- or zero-extension. This may not be
1363 the case, but it's the best we can do. */
1364 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1365 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1367 HOST_WIDE_INT val = INTVAL (x);
1368 int width = GET_MODE_BITSIZE (oldmode);
1370 /* We must sign or zero-extend in this case. Start by
1371 zero-extending, then sign extend if we need to. */
1372 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1373 if (! unsignedp
1374 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1375 val |= (HOST_WIDE_INT) (-1) << width;
1377 return GEN_INT (val);
1380 return gen_lowpart (mode, x);
1383 temp = gen_reg_rtx (mode);
1384 convert_move (temp, x, unsignedp);
1385 return temp;
1388 /* This macro is used to determine what the largest unit size that
1389 move_by_pieces can use is. */
1391 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1392 move efficiently, as opposed to MOVE_MAX which is the maximum
1393 number of bytes we can move with a single instruction. */
1395 #ifndef MOVE_MAX_PIECES
1396 #define MOVE_MAX_PIECES MOVE_MAX
1397 #endif
1399 /* Generate several move instructions to copy LEN bytes
1400 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1401 The caller must pass FROM and TO
1402 through protect_from_queue before calling.
1403 ALIGN is maximum alignment we can assume. */
1405 void
1406 move_by_pieces (to, from, len, align)
1407 rtx to, from;
1408 unsigned HOST_WIDE_INT len;
1409 unsigned int align;
1411 struct move_by_pieces data;
1412 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1413 unsigned int max_size = MOVE_MAX_PIECES + 1;
1414 enum machine_mode mode = VOIDmode, tmode;
1415 enum insn_code icode;
1417 data.offset = 0;
1418 data.to_addr = to_addr;
1419 data.from_addr = from_addr;
1420 data.to = to;
1421 data.from = from;
1422 data.autinc_to
1423 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1424 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1425 data.autinc_from
1426 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1427 || GET_CODE (from_addr) == POST_INC
1428 || GET_CODE (from_addr) == POST_DEC);
1430 data.explicit_inc_from = 0;
1431 data.explicit_inc_to = 0;
1432 data.reverse
1433 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1434 if (data.reverse) data.offset = len;
1435 data.len = len;
1437 /* If copying requires more than two move insns,
1438 copy addresses to registers (to make displacements shorter)
1439 and use post-increment if available. */
1440 if (!(data.autinc_from && data.autinc_to)
1441 && move_by_pieces_ninsns (len, align) > 2)
1443 /* Find the mode of the largest move... */
1444 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1445 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1446 if (GET_MODE_SIZE (tmode) < max_size)
1447 mode = tmode;
1449 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1451 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1452 data.autinc_from = 1;
1453 data.explicit_inc_from = -1;
1455 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1457 data.from_addr = copy_addr_to_reg (from_addr);
1458 data.autinc_from = 1;
1459 data.explicit_inc_from = 1;
1461 if (!data.autinc_from && CONSTANT_P (from_addr))
1462 data.from_addr = copy_addr_to_reg (from_addr);
1463 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1465 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1466 data.autinc_to = 1;
1467 data.explicit_inc_to = -1;
1469 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1471 data.to_addr = copy_addr_to_reg (to_addr);
1472 data.autinc_to = 1;
1473 data.explicit_inc_to = 1;
1475 if (!data.autinc_to && CONSTANT_P (to_addr))
1476 data.to_addr = copy_addr_to_reg (to_addr);
1479 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1480 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1481 align = MOVE_MAX * BITS_PER_UNIT;
1483 /* First move what we can in the largest integer mode, then go to
1484 successively smaller modes. */
1486 while (max_size > 1)
1488 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1489 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1490 if (GET_MODE_SIZE (tmode) < max_size)
1491 mode = tmode;
1493 if (mode == VOIDmode)
1494 break;
1496 icode = mov_optab->handlers[(int) mode].insn_code;
1497 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1498 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1500 max_size = GET_MODE_SIZE (mode);
1503 /* The code above should have handled everything. */
1504 if (data.len > 0)
1505 abort ();
1508 /* Return number of insns required to move L bytes by pieces.
1509 ALIGN (in bytes) is maximum alignment we can assume. */
1511 static unsigned HOST_WIDE_INT
1512 move_by_pieces_ninsns (l, align)
1513 unsigned HOST_WIDE_INT l;
1514 unsigned int align;
1516 unsigned HOST_WIDE_INT n_insns = 0;
1517 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1519 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1520 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1521 align = MOVE_MAX * BITS_PER_UNIT;
1523 while (max_size > 1)
1525 enum machine_mode mode = VOIDmode, tmode;
1526 enum insn_code icode;
1528 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1529 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1530 if (GET_MODE_SIZE (tmode) < max_size)
1531 mode = tmode;
1533 if (mode == VOIDmode)
1534 break;
1536 icode = mov_optab->handlers[(int) mode].insn_code;
1537 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1538 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1540 max_size = GET_MODE_SIZE (mode);
1543 if (l)
1544 abort ();
1545 return n_insns;
1548 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1549 with move instructions for mode MODE. GENFUN is the gen_... function
1550 to make a move insn for that mode. DATA has all the other info. */
1552 static void
1553 move_by_pieces_1 (genfun, mode, data)
1554 rtx (*genfun) PARAMS ((rtx, ...));
1555 enum machine_mode mode;
1556 struct move_by_pieces *data;
1558 unsigned int size = GET_MODE_SIZE (mode);
1559 rtx to1, from1;
1561 while (data->len >= size)
1563 if (data->reverse)
1564 data->offset -= size;
1566 if (data->autinc_to)
1568 to1 = gen_rtx_MEM (mode, data->to_addr);
1569 MEM_COPY_ATTRIBUTES (to1, data->to);
1571 else
1572 to1 = change_address (data->to, mode,
1573 plus_constant (data->to_addr, data->offset));
1575 if (data->autinc_from)
1577 from1 = gen_rtx_MEM (mode, data->from_addr);
1578 MEM_COPY_ATTRIBUTES (from1, data->from);
1580 else
1581 from1 = change_address (data->from, mode,
1582 plus_constant (data->from_addr, data->offset));
1584 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1585 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1586 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1587 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1589 emit_insn ((*genfun) (to1, from1));
1591 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1592 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1593 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1594 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1596 if (! data->reverse)
1597 data->offset += size;
1599 data->len -= size;
1603 /* Emit code to move a block Y to a block X.
1604 This may be done with string-move instructions,
1605 with multiple scalar move instructions, or with a library call.
1607 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1608 with mode BLKmode.
1609 SIZE is an rtx that says how long they are.
1610 ALIGN is the maximum alignment we can assume they have.
1612 Return the address of the new block, if memcpy is called and returns it,
1613 0 otherwise. */
1616 emit_block_move (x, y, size, align)
1617 rtx x, y;
1618 rtx size;
1619 unsigned int align;
1621 rtx retval = 0;
1622 #ifdef TARGET_MEM_FUNCTIONS
1623 static tree fn;
1624 tree call_expr, arg_list;
1625 #endif
1627 if (GET_MODE (x) != BLKmode)
1628 abort ();
1630 if (GET_MODE (y) != BLKmode)
1631 abort ();
1633 x = protect_from_queue (x, 1);
1634 y = protect_from_queue (y, 0);
1635 size = protect_from_queue (size, 0);
1637 if (GET_CODE (x) != MEM)
1638 abort ();
1639 if (GET_CODE (y) != MEM)
1640 abort ();
1641 if (size == 0)
1642 abort ();
1644 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1645 move_by_pieces (x, y, INTVAL (size), align);
1646 else
1648 /* Try the most limited insn first, because there's no point
1649 including more than one in the machine description unless
1650 the more limited one has some advantage. */
1652 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1653 enum machine_mode mode;
1655 /* Since this is a move insn, we don't care about volatility. */
1656 volatile_ok = 1;
1658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1659 mode = GET_MODE_WIDER_MODE (mode))
1661 enum insn_code code = movstr_optab[(int) mode];
1662 insn_operand_predicate_fn pred;
1664 if (code != CODE_FOR_nothing
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
1667 returned by the macro, it will definitely be less than the
1668 actual mode mask. */
1669 && ((GET_CODE (size) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1671 <= (GET_MODE_MASK (mode) >> 1)))
1672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1673 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1674 || (*pred) (x, BLKmode))
1675 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1676 || (*pred) (y, BLKmode))
1677 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1678 || (*pred) (opalign, VOIDmode)))
1680 rtx op2;
1681 rtx last = get_last_insn ();
1682 rtx pat;
1684 op2 = convert_to_mode (mode, size, 1);
1685 pred = insn_data[(int) code].operand[2].predicate;
1686 if (pred != 0 && ! (*pred) (op2, mode))
1687 op2 = copy_to_mode_reg (mode, op2);
1689 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1690 if (pat)
1692 emit_insn (pat);
1693 volatile_ok = 0;
1694 return 0;
1696 else
1697 delete_insns_since (last);
1701 volatile_ok = 0;
1703 /* X, Y, or SIZE may have been passed through protect_from_queue.
1705 It is unsafe to save the value generated by protect_from_queue
1706 and reuse it later. Consider what happens if emit_queue is
1707 called before the return value from protect_from_queue is used.
1709 Expansion of the CALL_EXPR below will call emit_queue before
1710 we are finished emitting RTL for argument setup. So if we are
1711 not careful we could get the wrong value for an argument.
1713 To avoid this problem we go ahead and emit code to copy X, Y &
1714 SIZE into new pseudos. We can then place those new pseudos
1715 into an RTL_EXPR and use them later, even after a call to
1716 emit_queue.
1718 Note this is not strictly needed for library calls since they
1719 do not call emit_queue before loading their arguments. However,
1720 we may need to have library calls call emit_queue in the future
1721 since failing to do so could cause problems for targets which
1722 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1723 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1724 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1726 #ifdef TARGET_MEM_FUNCTIONS
1727 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1728 #else
1729 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1730 TREE_UNSIGNED (integer_type_node));
1731 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1732 #endif
1734 #ifdef TARGET_MEM_FUNCTIONS
1735 /* It is incorrect to use the libcall calling conventions to call
1736 memcpy in this context.
1738 This could be a user call to memcpy and the user may wish to
1739 examine the return value from memcpy.
1741 For targets where libcalls and normal calls have different conventions
1742 for returning pointers, we could end up generating incorrect code.
1744 So instead of using a libcall sequence we build up a suitable
1745 CALL_EXPR and expand the call in the normal fashion. */
1746 if (fn == NULL_TREE)
1748 tree fntype;
1750 /* This was copied from except.c, I don't know if all this is
1751 necessary in this context or not. */
1752 fn = get_identifier ("memcpy");
1753 fntype = build_pointer_type (void_type_node);
1754 fntype = build_function_type (fntype, NULL_TREE);
1755 fn = build_decl (FUNCTION_DECL, fn, fntype);
1756 ggc_add_tree_root (&fn, 1);
1757 DECL_EXTERNAL (fn) = 1;
1758 TREE_PUBLIC (fn) = 1;
1759 DECL_ARTIFICIAL (fn) = 1;
1760 make_decl_rtl (fn, NULL_PTR);
1761 assemble_external (fn);
1764 /* We need to make an argument list for the function call.
1766 memcpy has three arguments, the first two are void * addresses and
1767 the last is a size_t byte count for the copy. */
1768 arg_list
1769 = build_tree_list (NULL_TREE,
1770 make_tree (build_pointer_type (void_type_node), x));
1771 TREE_CHAIN (arg_list)
1772 = build_tree_list (NULL_TREE,
1773 make_tree (build_pointer_type (void_type_node), y));
1774 TREE_CHAIN (TREE_CHAIN (arg_list))
1775 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1776 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1778 /* Now we have to build up the CALL_EXPR itself. */
1779 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1780 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1781 call_expr, arg_list, NULL_TREE);
1782 TREE_SIDE_EFFECTS (call_expr) = 1;
1784 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1785 #else
1786 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1787 VOIDmode, 3, y, Pmode, x, Pmode,
1788 convert_to_mode (TYPE_MODE (integer_type_node), size,
1789 TREE_UNSIGNED (integer_type_node)),
1790 TYPE_MODE (integer_type_node));
1791 #endif
1794 return retval;
1797 /* Copy all or part of a value X into registers starting at REGNO.
1798 The number of registers to be filled is NREGS. */
1800 void
1801 move_block_to_reg (regno, x, nregs, mode)
1802 int regno;
1803 rtx x;
1804 int nregs;
1805 enum machine_mode mode;
1807 int i;
1808 #ifdef HAVE_load_multiple
1809 rtx pat;
1810 rtx last;
1811 #endif
1813 if (nregs == 0)
1814 return;
1816 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1817 x = validize_mem (force_const_mem (mode, x));
1819 /* See if the machine can do this with a load multiple insn. */
1820 #ifdef HAVE_load_multiple
1821 if (HAVE_load_multiple)
1823 last = get_last_insn ();
1824 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1825 GEN_INT (nregs));
1826 if (pat)
1828 emit_insn (pat);
1829 return;
1831 else
1832 delete_insns_since (last);
1834 #endif
1836 for (i = 0; i < nregs; i++)
1837 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1838 operand_subword_force (x, i, mode));
1841 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1842 The number of registers to be filled is NREGS. SIZE indicates the number
1843 of bytes in the object X. */
1845 void
1846 move_block_from_reg (regno, x, nregs, size)
1847 int regno;
1848 rtx x;
1849 int nregs;
1850 int size;
1852 int i;
1853 #ifdef HAVE_store_multiple
1854 rtx pat;
1855 rtx last;
1856 #endif
1857 enum machine_mode mode;
1859 if (nregs == 0)
1860 return;
1862 /* If SIZE is that of a mode no bigger than a word, just use that
1863 mode's store operation. */
1864 if (size <= UNITS_PER_WORD
1865 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1867 emit_move_insn (change_address (x, mode, NULL),
1868 gen_rtx_REG (mode, regno));
1869 return;
1872 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1873 to the left before storing to memory. Note that the previous test
1874 doesn't handle all cases (e.g. SIZE == 3). */
1875 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1877 rtx tem = operand_subword (x, 0, 1, BLKmode);
1878 rtx shift;
1880 if (tem == 0)
1881 abort ();
1883 shift = expand_shift (LSHIFT_EXPR, word_mode,
1884 gen_rtx_REG (word_mode, regno),
1885 build_int_2 ((UNITS_PER_WORD - size)
1886 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1887 emit_move_insn (tem, shift);
1888 return;
1891 /* See if the machine can do this with a store multiple insn. */
1892 #ifdef HAVE_store_multiple
1893 if (HAVE_store_multiple)
1895 last = get_last_insn ();
1896 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1897 GEN_INT (nregs));
1898 if (pat)
1900 emit_insn (pat);
1901 return;
1903 else
1904 delete_insns_since (last);
1906 #endif
1908 for (i = 0; i < nregs; i++)
1910 rtx tem = operand_subword (x, i, 1, BLKmode);
1912 if (tem == 0)
1913 abort ();
1915 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1919 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1920 registers represented by a PARALLEL. SSIZE represents the total size of
1921 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1922 SRC in bits. */
1923 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1924 the balance will be in what would be the low-order memory addresses, i.e.
1925 left justified for big endian, right justified for little endian. This
1926 happens to be true for the targets currently using this support. If this
1927 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1928 would be needed. */
1930 void
1931 emit_group_load (dst, orig_src, ssize, align)
1932 rtx dst, orig_src;
1933 unsigned int align;
1934 int ssize;
1936 rtx *tmps, src;
1937 int start, i;
1939 if (GET_CODE (dst) != PARALLEL)
1940 abort ();
1942 /* Check for a NULL entry, used to indicate that the parameter goes
1943 both on the stack and in registers. */
1944 if (XEXP (XVECEXP (dst, 0, 0), 0))
1945 start = 0;
1946 else
1947 start = 1;
1949 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1951 /* If we won't be loading directly from memory, protect the real source
1952 from strange tricks we might play. */
1953 src = orig_src;
1954 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1956 if (GET_MODE (src) == VOIDmode)
1957 src = gen_reg_rtx (GET_MODE (dst));
1958 else
1959 src = gen_reg_rtx (GET_MODE (orig_src));
1960 emit_move_insn (src, orig_src);
1963 /* Process the pieces. */
1964 for (i = start; i < XVECLEN (dst, 0); i++)
1966 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1967 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1968 unsigned int bytelen = GET_MODE_SIZE (mode);
1969 int shift = 0;
1971 /* Handle trailing fragments that run over the size of the struct. */
1972 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1974 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1975 bytelen = ssize - bytepos;
1976 if (bytelen <= 0)
1977 abort ();
1980 /* Optimize the access just a bit. */
1981 if (GET_CODE (src) == MEM
1982 && align >= GET_MODE_ALIGNMENT (mode)
1983 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1984 && bytelen == GET_MODE_SIZE (mode))
1986 tmps[i] = gen_reg_rtx (mode);
1987 emit_move_insn (tmps[i],
1988 change_address (src, mode,
1989 plus_constant (XEXP (src, 0),
1990 bytepos)));
1992 else if (GET_CODE (src) == CONCAT)
1994 if (bytepos == 0
1995 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1996 tmps[i] = XEXP (src, 0);
1997 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1998 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1999 tmps[i] = XEXP (src, 1);
2000 else
2001 abort ();
2003 else if ((CONSTANT_P (src)
2004 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2005 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2006 tmps[i] = src;
2007 else
2008 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2009 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2010 mode, mode, align, ssize);
2012 if (BYTES_BIG_ENDIAN && shift)
2013 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2014 tmps[i], 0, OPTAB_WIDEN);
2017 emit_queue ();
2019 /* Copy the extracted pieces into the proper (probable) hard regs. */
2020 for (i = start; i < XVECLEN (dst, 0); i++)
2021 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2024 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2025 registers represented by a PARALLEL. SSIZE represents the total size of
2026 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2028 void
2029 emit_group_store (orig_dst, src, ssize, align)
2030 rtx orig_dst, src;
2031 int ssize;
2032 unsigned int align;
2034 rtx *tmps, dst;
2035 int start, i;
2037 if (GET_CODE (src) != PARALLEL)
2038 abort ();
2040 /* Check for a NULL entry, used to indicate that the parameter goes
2041 both on the stack and in registers. */
2042 if (XEXP (XVECEXP (src, 0, 0), 0))
2043 start = 0;
2044 else
2045 start = 1;
2047 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2049 /* Copy the (probable) hard regs into pseudos. */
2050 for (i = start; i < XVECLEN (src, 0); i++)
2052 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2053 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2054 emit_move_insn (tmps[i], reg);
2056 emit_queue ();
2058 /* If we won't be storing directly into memory, protect the real destination
2059 from strange tricks we might play. */
2060 dst = orig_dst;
2061 if (GET_CODE (dst) == PARALLEL)
2063 rtx temp;
2065 /* We can get a PARALLEL dst if there is a conditional expression in
2066 a return statement. In that case, the dst and src are the same,
2067 so no action is necessary. */
2068 if (rtx_equal_p (dst, src))
2069 return;
2071 /* It is unclear if we can ever reach here, but we may as well handle
2072 it. Allocate a temporary, and split this into a store/load to/from
2073 the temporary. */
2075 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2076 emit_group_store (temp, src, ssize, align);
2077 emit_group_load (dst, temp, ssize, align);
2078 return;
2080 else if (GET_CODE (dst) != MEM)
2082 dst = gen_reg_rtx (GET_MODE (orig_dst));
2083 /* Make life a bit easier for combine. */
2084 emit_move_insn (dst, const0_rtx);
2087 /* Process the pieces. */
2088 for (i = start; i < XVECLEN (src, 0); i++)
2090 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2091 enum machine_mode mode = GET_MODE (tmps[i]);
2092 unsigned int bytelen = GET_MODE_SIZE (mode);
2094 /* Handle trailing fragments that run over the size of the struct. */
2095 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2097 if (BYTES_BIG_ENDIAN)
2099 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2100 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2101 tmps[i], 0, OPTAB_WIDEN);
2103 bytelen = ssize - bytepos;
2106 /* Optimize the access just a bit. */
2107 if (GET_CODE (dst) == MEM
2108 && align >= GET_MODE_ALIGNMENT (mode)
2109 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2110 && bytelen == GET_MODE_SIZE (mode))
2111 emit_move_insn (change_address (dst, mode,
2112 plus_constant (XEXP (dst, 0),
2113 bytepos)),
2114 tmps[i]);
2115 else
2116 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2117 mode, tmps[i], align, ssize);
2120 emit_queue ();
2122 /* Copy from the pseudo into the (probable) hard reg. */
2123 if (GET_CODE (dst) == REG)
2124 emit_move_insn (orig_dst, dst);
2127 /* Generate code to copy a BLKmode object of TYPE out of a
2128 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2129 is null, a stack temporary is created. TGTBLK is returned.
2131 The primary purpose of this routine is to handle functions
2132 that return BLKmode structures in registers. Some machines
2133 (the PA for example) want to return all small structures
2134 in registers regardless of the structure's alignment. */
2137 copy_blkmode_from_reg (tgtblk, srcreg, type)
2138 rtx tgtblk;
2139 rtx srcreg;
2140 tree type;
2142 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2143 rtx src = NULL, dst = NULL;
2144 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2145 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2147 if (tgtblk == 0)
2149 tgtblk = assign_temp (build_qualified_type (type,
2150 (TYPE_QUALS (type)
2151 | TYPE_QUAL_CONST)),
2152 0, 1, 1);
2153 preserve_temp_slots (tgtblk);
2156 /* This code assumes srcreg is at least a full word. If it isn't,
2157 copy it into a new pseudo which is a full word. */
2158 if (GET_MODE (srcreg) != BLKmode
2159 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2160 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2162 /* Structures whose size is not a multiple of a word are aligned
2163 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2164 machine, this means we must skip the empty high order bytes when
2165 calculating the bit offset. */
2166 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2167 big_endian_correction
2168 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2170 /* Copy the structure BITSIZE bites at a time.
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2174 time. */
2175 for (bitpos = 0, xbitpos = big_endian_correction;
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == big_endian_correction
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0
2183 || xbitpos == big_endian_correction)
2184 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2186 /* We need a new destination operand each time bitpos is on
2187 a word boundary. */
2188 if (bitpos % BITS_PER_WORD == 0)
2189 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2191 /* Use xbitpos for the source extraction (right justified) and
2192 xbitpos for the destination store (left justified). */
2193 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2194 extract_bit_field (src, bitsize,
2195 xbitpos % BITS_PER_WORD, 1,
2196 NULL_RTX, word_mode, word_mode,
2197 bitsize, BITS_PER_WORD),
2198 bitsize, BITS_PER_WORD);
2201 return tgtblk;
2204 /* Add a USE expression for REG to the (possibly empty) list pointed
2205 to by CALL_FUSAGE. REG must denote a hard register. */
2207 void
2208 use_reg (call_fusage, reg)
2209 rtx *call_fusage, reg;
2211 if (GET_CODE (reg) != REG
2212 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2213 abort ();
2215 *call_fusage
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2223 void
2224 use_regs (call_fusage, regno, nregs)
2225 rtx *call_fusage;
2226 int regno;
2227 int nregs;
2229 int i;
2231 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2232 abort ();
2234 for (i = 0; i < nregs; i++)
2235 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2238 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2239 PARALLEL REGS. This is for calls that pass values in multiple
2240 non-contiguous locations. The Irix 6 ABI has examples of this. */
2242 void
2243 use_group_regs (call_fusage, regs)
2244 rtx *call_fusage;
2245 rtx regs;
2247 int i;
2249 for (i = 0; i < XVECLEN (regs, 0); i++)
2251 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2253 /* A NULL entry means the parameter goes both on the stack and in
2254 registers. This can also be a MEM for targets that pass values
2255 partially on the stack and partially in registers. */
2256 if (reg != 0 && GET_CODE (reg) == REG)
2257 use_reg (call_fusage, reg);
2263 can_store_by_pieces (len, constfun, constfundata, align)
2264 unsigned HOST_WIDE_INT len;
2265 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2266 PTR constfundata;
2267 unsigned int align;
2269 unsigned HOST_WIDE_INT max_size, l;
2270 HOST_WIDE_INT offset = 0;
2271 enum machine_mode mode, tmode;
2272 enum insn_code icode;
2273 int reverse;
2274 rtx cst;
2276 if (! MOVE_BY_PIECES_P (len, align))
2277 return 0;
2279 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2280 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2281 align = MOVE_MAX * BITS_PER_UNIT;
2283 /* We would first store what we can in the largest integer mode, then go to
2284 successively smaller modes. */
2286 for (reverse = 0;
2287 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2288 reverse++)
2290 l = len;
2291 mode = VOIDmode;
2292 max_size = MOVE_MAX_PIECES + 1;
2293 while (max_size > 1)
2295 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2297 if (GET_MODE_SIZE (tmode) < max_size)
2298 mode = tmode;
2300 if (mode == VOIDmode)
2301 break;
2303 icode = mov_optab->handlers[(int) mode].insn_code;
2304 if (icode != CODE_FOR_nothing
2305 && align >= GET_MODE_ALIGNMENT (mode))
2307 unsigned int size = GET_MODE_SIZE (mode);
2309 while (l >= size)
2311 if (reverse)
2312 offset -= size;
2314 cst = (*constfun) (constfundata, offset, mode);
2315 if (!LEGITIMATE_CONSTANT_P (cst))
2316 return 0;
2318 if (!reverse)
2319 offset += size;
2321 l -= size;
2325 max_size = GET_MODE_SIZE (mode);
2328 /* The code above should have handled everything. */
2329 if (l != 0)
2330 abort ();
2333 return 1;
2336 /* Generate several move instructions to store LEN bytes generated by
2337 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2338 pointer which will be passed as argument in every CONSTFUN call.
2339 ALIGN is maximum alignment we can assume. */
2341 void
2342 store_by_pieces (to, len, constfun, constfundata, align)
2343 rtx to;
2344 unsigned HOST_WIDE_INT len;
2345 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2346 PTR constfundata;
2347 unsigned int align;
2349 struct store_by_pieces data;
2351 if (! MOVE_BY_PIECES_P (len, align))
2352 abort ();
2353 to = protect_from_queue (to, 1);
2354 data.constfun = constfun;
2355 data.constfundata = constfundata;
2356 data.len = len;
2357 data.to = to;
2358 store_by_pieces_1 (&data, align);
2361 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2362 rtx with BLKmode). The caller must pass TO through protect_from_queue
2363 before calling. ALIGN is maximum alignment we can assume. */
2365 static void
2366 clear_by_pieces (to, len, align)
2367 rtx to;
2368 unsigned HOST_WIDE_INT len;
2369 unsigned int align;
2371 struct store_by_pieces data;
2373 data.constfun = clear_by_pieces_1;
2374 data.constfundata = NULL_PTR;
2375 data.len = len;
2376 data.to = to;
2377 store_by_pieces_1 (&data, align);
2380 /* Callback routine for clear_by_pieces.
2381 Return const0_rtx unconditionally. */
2383 static rtx
2384 clear_by_pieces_1 (data, offset, mode)
2385 PTR data ATTRIBUTE_UNUSED;
2386 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2387 enum machine_mode mode ATTRIBUTE_UNUSED;
2389 return const0_rtx;
2392 /* Subroutine of clear_by_pieces and store_by_pieces.
2393 Generate several move instructions to store LEN bytes of block TO. (A MEM
2394 rtx with BLKmode). The caller must pass TO through protect_from_queue
2395 before calling. ALIGN is maximum alignment we can assume. */
2397 static void
2398 store_by_pieces_1 (data, align)
2399 struct store_by_pieces *data;
2400 unsigned int align;
2402 rtx to_addr = XEXP (data->to, 0);
2403 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2404 enum machine_mode mode = VOIDmode, tmode;
2405 enum insn_code icode;
2407 data->offset = 0;
2408 data->to_addr = to_addr;
2409 data->autinc_to
2410 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2411 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2413 data->explicit_inc_to = 0;
2414 data->reverse
2415 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2416 if (data->reverse)
2417 data->offset = data->len;
2419 /* If storing requires more than two move insns,
2420 copy addresses to registers (to make displacements shorter)
2421 and use post-increment if available. */
2422 if (!data->autinc_to
2423 && move_by_pieces_ninsns (data->len, align) > 2)
2425 /* Determine the main mode we'll be using. */
2426 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2427 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2428 if (GET_MODE_SIZE (tmode) < max_size)
2429 mode = tmode;
2431 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2433 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2434 data->autinc_to = 1;
2435 data->explicit_inc_to = -1;
2438 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2439 && ! data->autinc_to)
2441 data->to_addr = copy_addr_to_reg (to_addr);
2442 data->autinc_to = 1;
2443 data->explicit_inc_to = 1;
2446 if ( !data->autinc_to && CONSTANT_P (to_addr))
2447 data->to_addr = copy_addr_to_reg (to_addr);
2450 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2451 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2452 align = MOVE_MAX * BITS_PER_UNIT;
2454 /* First store what we can in the largest integer mode, then go to
2455 successively smaller modes. */
2457 while (max_size > 1)
2459 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2460 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2461 if (GET_MODE_SIZE (tmode) < max_size)
2462 mode = tmode;
2464 if (mode == VOIDmode)
2465 break;
2467 icode = mov_optab->handlers[(int) mode].insn_code;
2468 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2469 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2471 max_size = GET_MODE_SIZE (mode);
2474 /* The code above should have handled everything. */
2475 if (data->len != 0)
2476 abort ();
2479 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2480 with move instructions for mode MODE. GENFUN is the gen_... function
2481 to make a move insn for that mode. DATA has all the other info. */
2483 static void
2484 store_by_pieces_2 (genfun, mode, data)
2485 rtx (*genfun) PARAMS ((rtx, ...));
2486 enum machine_mode mode;
2487 struct store_by_pieces *data;
2489 unsigned int size = GET_MODE_SIZE (mode);
2490 rtx to1, cst;
2492 while (data->len >= size)
2494 if (data->reverse)
2495 data->offset -= size;
2497 if (data->autinc_to)
2499 to1 = gen_rtx_MEM (mode, data->to_addr);
2500 MEM_COPY_ATTRIBUTES (to1, data->to);
2502 else
2503 to1 = change_address (data->to, mode,
2504 plus_constant (data->to_addr, data->offset));
2506 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2507 emit_insn (gen_add2_insn (data->to_addr,
2508 GEN_INT (-(HOST_WIDE_INT) size)));
2510 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2511 emit_insn ((*genfun) (to1, cst));
2513 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2514 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2516 if (! data->reverse)
2517 data->offset += size;
2519 data->len -= size;
2523 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2524 its length in bytes and ALIGN is the maximum alignment we can is has.
2526 If we call a function that returns the length of the block, return it. */
2529 clear_storage (object, size, align)
2530 rtx object;
2531 rtx size;
2532 unsigned int align;
2534 #ifdef TARGET_MEM_FUNCTIONS
2535 static tree fn;
2536 tree call_expr, arg_list;
2537 #endif
2538 rtx retval = 0;
2540 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2541 just move a zero. Otherwise, do this a piece at a time. */
2542 if (GET_MODE (object) != BLKmode
2543 && GET_CODE (size) == CONST_INT
2544 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2545 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2546 else
2548 object = protect_from_queue (object, 1);
2549 size = protect_from_queue (size, 0);
2551 if (GET_CODE (size) == CONST_INT
2552 && MOVE_BY_PIECES_P (INTVAL (size), align))
2553 clear_by_pieces (object, INTVAL (size), align);
2554 else
2556 /* Try the most limited insn first, because there's no point
2557 including more than one in the machine description unless
2558 the more limited one has some advantage. */
2560 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2561 enum machine_mode mode;
2563 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2564 mode = GET_MODE_WIDER_MODE (mode))
2566 enum insn_code code = clrstr_optab[(int) mode];
2567 insn_operand_predicate_fn pred;
2569 if (code != CODE_FOR_nothing
2570 /* We don't need MODE to be narrower than
2571 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2572 the mode mask, as it is returned by the macro, it will
2573 definitely be less than the actual mode mask. */
2574 && ((GET_CODE (size) == CONST_INT
2575 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2576 <= (GET_MODE_MASK (mode) >> 1)))
2577 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2578 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2579 || (*pred) (object, BLKmode))
2580 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2581 || (*pred) (opalign, VOIDmode)))
2583 rtx op1;
2584 rtx last = get_last_insn ();
2585 rtx pat;
2587 op1 = convert_to_mode (mode, size, 1);
2588 pred = insn_data[(int) code].operand[1].predicate;
2589 if (pred != 0 && ! (*pred) (op1, mode))
2590 op1 = copy_to_mode_reg (mode, op1);
2592 pat = GEN_FCN ((int) code) (object, op1, opalign);
2593 if (pat)
2595 emit_insn (pat);
2596 return 0;
2598 else
2599 delete_insns_since (last);
2603 /* OBJECT or SIZE may have been passed through protect_from_queue.
2605 It is unsafe to save the value generated by protect_from_queue
2606 and reuse it later. Consider what happens if emit_queue is
2607 called before the return value from protect_from_queue is used.
2609 Expansion of the CALL_EXPR below will call emit_queue before
2610 we are finished emitting RTL for argument setup. So if we are
2611 not careful we could get the wrong value for an argument.
2613 To avoid this problem we go ahead and emit code to copy OBJECT
2614 and SIZE into new pseudos. We can then place those new pseudos
2615 into an RTL_EXPR and use them later, even after a call to
2616 emit_queue.
2618 Note this is not strictly needed for library calls since they
2619 do not call emit_queue before loading their arguments. However,
2620 we may need to have library calls call emit_queue in the future
2621 since failing to do so could cause problems for targets which
2622 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2623 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2625 #ifdef TARGET_MEM_FUNCTIONS
2626 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2627 #else
2628 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2629 TREE_UNSIGNED (integer_type_node));
2630 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2631 #endif
2633 #ifdef TARGET_MEM_FUNCTIONS
2634 /* It is incorrect to use the libcall calling conventions to call
2635 memset in this context.
2637 This could be a user call to memset and the user may wish to
2638 examine the return value from memset.
2640 For targets where libcalls and normal calls have different
2641 conventions for returning pointers, we could end up generating
2642 incorrect code.
2644 So instead of using a libcall sequence we build up a suitable
2645 CALL_EXPR and expand the call in the normal fashion. */
2646 if (fn == NULL_TREE)
2648 tree fntype;
2650 /* This was copied from except.c, I don't know if all this is
2651 necessary in this context or not. */
2652 fn = get_identifier ("memset");
2653 fntype = build_pointer_type (void_type_node);
2654 fntype = build_function_type (fntype, NULL_TREE);
2655 fn = build_decl (FUNCTION_DECL, fn, fntype);
2656 ggc_add_tree_root (&fn, 1);
2657 DECL_EXTERNAL (fn) = 1;
2658 TREE_PUBLIC (fn) = 1;
2659 DECL_ARTIFICIAL (fn) = 1;
2660 make_decl_rtl (fn, NULL_PTR);
2661 assemble_external (fn);
2664 /* We need to make an argument list for the function call.
2666 memset has three arguments, the first is a void * addresses, the
2667 second a integer with the initialization value, the last is a
2668 size_t byte count for the copy. */
2669 arg_list
2670 = build_tree_list (NULL_TREE,
2671 make_tree (build_pointer_type (void_type_node),
2672 object));
2673 TREE_CHAIN (arg_list)
2674 = build_tree_list (NULL_TREE,
2675 make_tree (integer_type_node, const0_rtx));
2676 TREE_CHAIN (TREE_CHAIN (arg_list))
2677 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2678 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2680 /* Now we have to build up the CALL_EXPR itself. */
2681 call_expr = build1 (ADDR_EXPR,
2682 build_pointer_type (TREE_TYPE (fn)), fn);
2683 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2684 call_expr, arg_list, NULL_TREE);
2685 TREE_SIDE_EFFECTS (call_expr) = 1;
2687 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2688 #else
2689 emit_library_call (bzero_libfunc, LCT_NORMAL,
2690 VOIDmode, 2, object, Pmode, size,
2691 TYPE_MODE (integer_type_node));
2692 #endif
2696 return retval;
2699 /* Generate code to copy Y into X.
2700 Both Y and X must have the same mode, except that
2701 Y can be a constant with VOIDmode.
2702 This mode cannot be BLKmode; use emit_block_move for that.
2704 Return the last instruction emitted. */
2707 emit_move_insn (x, y)
2708 rtx x, y;
2710 enum machine_mode mode = GET_MODE (x);
2711 rtx y_cst = NULL_RTX;
2712 rtx last_insn;
2714 x = protect_from_queue (x, 1);
2715 y = protect_from_queue (y, 0);
2717 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2718 abort ();
2720 /* Never force constant_p_rtx to memory. */
2721 if (GET_CODE (y) == CONSTANT_P_RTX)
2723 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2725 y_cst = y;
2726 y = force_const_mem (mode, y);
2729 /* If X or Y are memory references, verify that their addresses are valid
2730 for the machine. */
2731 if (GET_CODE (x) == MEM
2732 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2733 && ! push_operand (x, GET_MODE (x)))
2734 || (flag_force_addr
2735 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2736 x = change_address (x, VOIDmode, XEXP (x, 0));
2738 if (GET_CODE (y) == MEM
2739 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2740 || (flag_force_addr
2741 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2742 y = change_address (y, VOIDmode, XEXP (y, 0));
2744 if (mode == BLKmode)
2745 abort ();
2747 last_insn = emit_move_insn_1 (x, y);
2749 if (y_cst && GET_CODE (x) == REG)
2750 REG_NOTES (last_insn)
2751 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2753 return last_insn;
2756 /* Low level part of emit_move_insn.
2757 Called just like emit_move_insn, but assumes X and Y
2758 are basically valid. */
2761 emit_move_insn_1 (x, y)
2762 rtx x, y;
2764 enum machine_mode mode = GET_MODE (x);
2765 enum machine_mode submode;
2766 enum mode_class class = GET_MODE_CLASS (mode);
2767 unsigned int i;
2769 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2770 abort ();
2772 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2773 return
2774 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2776 /* Expand complex moves by moving real part and imag part, if possible. */
2777 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2778 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2779 * BITS_PER_UNIT),
2780 (class == MODE_COMPLEX_INT
2781 ? MODE_INT : MODE_FLOAT),
2783 && (mov_optab->handlers[(int) submode].insn_code
2784 != CODE_FOR_nothing))
2786 /* Don't split destination if it is a stack push. */
2787 int stack = push_operand (x, GET_MODE (x));
2789 /* If this is a stack, push the highpart first, so it
2790 will be in the argument order.
2792 In that case, change_address is used only to convert
2793 the mode, not to change the address. */
2794 if (stack)
2796 /* Note that the real part always precedes the imag part in memory
2797 regardless of machine's endianness. */
2798 #ifdef STACK_GROWS_DOWNWARD
2799 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2800 (gen_rtx_MEM (submode, XEXP (x, 0)),
2801 gen_imagpart (submode, y)));
2802 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2803 (gen_rtx_MEM (submode, XEXP (x, 0)),
2804 gen_realpart (submode, y)));
2805 #else
2806 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2807 (gen_rtx_MEM (submode, XEXP (x, 0)),
2808 gen_realpart (submode, y)));
2809 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2810 (gen_rtx_MEM (submode, XEXP (x, 0)),
2811 gen_imagpart (submode, y)));
2812 #endif
2814 else
2816 rtx realpart_x, realpart_y;
2817 rtx imagpart_x, imagpart_y;
2819 /* If this is a complex value with each part being smaller than a
2820 word, the usual calling sequence will likely pack the pieces into
2821 a single register. Unfortunately, SUBREG of hard registers only
2822 deals in terms of words, so we have a problem converting input
2823 arguments to the CONCAT of two registers that is used elsewhere
2824 for complex values. If this is before reload, we can copy it into
2825 memory and reload. FIXME, we should see about using extract and
2826 insert on integer registers, but complex short and complex char
2827 variables should be rarely used. */
2828 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2829 && (reload_in_progress | reload_completed) == 0)
2831 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2832 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2834 if (packed_dest_p || packed_src_p)
2836 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2837 ? MODE_FLOAT : MODE_INT);
2839 enum machine_mode reg_mode
2840 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2842 if (reg_mode != BLKmode)
2844 rtx mem = assign_stack_temp (reg_mode,
2845 GET_MODE_SIZE (mode), 0);
2846 rtx cmem = change_address (mem, mode, NULL_RTX);
2848 cfun->cannot_inline
2849 = N_("function using short complex types cannot be inline");
2851 if (packed_dest_p)
2853 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2854 emit_move_insn_1 (cmem, y);
2855 return emit_move_insn_1 (sreg, mem);
2857 else
2859 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2860 emit_move_insn_1 (mem, sreg);
2861 return emit_move_insn_1 (x, cmem);
2867 realpart_x = gen_realpart (submode, x);
2868 realpart_y = gen_realpart (submode, y);
2869 imagpart_x = gen_imagpart (submode, x);
2870 imagpart_y = gen_imagpart (submode, y);
2872 /* Show the output dies here. This is necessary for SUBREGs
2873 of pseudos since we cannot track their lifetimes correctly;
2874 hard regs shouldn't appear here except as return values.
2875 We never want to emit such a clobber after reload. */
2876 if (x != y
2877 && ! (reload_in_progress || reload_completed)
2878 && (GET_CODE (realpart_x) == SUBREG
2879 || GET_CODE (imagpart_x) == SUBREG))
2881 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2884 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2885 (realpart_x, realpart_y));
2886 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2887 (imagpart_x, imagpart_y));
2890 return get_last_insn ();
2893 /* This will handle any multi-word mode that lacks a move_insn pattern.
2894 However, you will get better code if you define such patterns,
2895 even if they must turn into multiple assembler instructions. */
2896 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2898 rtx last_insn = 0;
2899 rtx seq, inner;
2900 int need_clobber;
2902 #ifdef PUSH_ROUNDING
2904 /* If X is a push on the stack, do the push now and replace
2905 X with a reference to the stack pointer. */
2906 if (push_operand (x, GET_MODE (x)))
2908 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2909 x = change_address (x, VOIDmode, stack_pointer_rtx);
2911 #endif
2913 /* If we are in reload, see if either operand is a MEM whose address
2914 is scheduled for replacement. */
2915 if (reload_in_progress && GET_CODE (x) == MEM
2916 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2918 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2920 MEM_COPY_ATTRIBUTES (new, x);
2921 x = new;
2923 if (reload_in_progress && GET_CODE (y) == MEM
2924 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2926 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2928 MEM_COPY_ATTRIBUTES (new, y);
2929 y = new;
2932 start_sequence ();
2934 need_clobber = 0;
2935 for (i = 0;
2936 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2937 i++)
2939 rtx xpart = operand_subword (x, i, 1, mode);
2940 rtx ypart = operand_subword (y, i, 1, mode);
2942 /* If we can't get a part of Y, put Y into memory if it is a
2943 constant. Otherwise, force it into a register. If we still
2944 can't get a part of Y, abort. */
2945 if (ypart == 0 && CONSTANT_P (y))
2947 y = force_const_mem (mode, y);
2948 ypart = operand_subword (y, i, 1, mode);
2950 else if (ypart == 0)
2951 ypart = operand_subword_force (y, i, mode);
2953 if (xpart == 0 || ypart == 0)
2954 abort ();
2956 need_clobber |= (GET_CODE (xpart) == SUBREG);
2958 last_insn = emit_move_insn (xpart, ypart);
2961 seq = gen_sequence ();
2962 end_sequence ();
2964 /* Show the output dies here. This is necessary for SUBREGs
2965 of pseudos since we cannot track their lifetimes correctly;
2966 hard regs shouldn't appear here except as return values.
2967 We never want to emit such a clobber after reload. */
2968 if (x != y
2969 && ! (reload_in_progress || reload_completed)
2970 && need_clobber != 0)
2972 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2975 emit_insn (seq);
2977 return last_insn;
2979 else
2980 abort ();
2983 /* Pushing data onto the stack. */
2985 /* Push a block of length SIZE (perhaps variable)
2986 and return an rtx to address the beginning of the block.
2987 Note that it is not possible for the value returned to be a QUEUED.
2988 The value may be virtual_outgoing_args_rtx.
2990 EXTRA is the number of bytes of padding to push in addition to SIZE.
2991 BELOW nonzero means this padding comes at low addresses;
2992 otherwise, the padding comes at high addresses. */
2995 push_block (size, extra, below)
2996 rtx size;
2997 int extra, below;
2999 register rtx temp;
3001 size = convert_modes (Pmode, ptr_mode, size, 1);
3002 if (CONSTANT_P (size))
3003 anti_adjust_stack (plus_constant (size, extra));
3004 else if (GET_CODE (size) == REG && extra == 0)
3005 anti_adjust_stack (size);
3006 else
3008 temp = copy_to_mode_reg (Pmode, size);
3009 if (extra != 0)
3010 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3011 temp, 0, OPTAB_LIB_WIDEN);
3012 anti_adjust_stack (temp);
3015 #ifndef STACK_GROWS_DOWNWARD
3016 #ifdef ARGS_GROW_DOWNWARD
3017 if (!ACCUMULATE_OUTGOING_ARGS)
3018 #else
3019 if (0)
3020 #endif
3021 #else
3022 if (1)
3023 #endif
3025 /* Return the lowest stack address when STACK or ARGS grow downward and
3026 we are not aaccumulating outgoing arguments (the c4x port uses such
3027 conventions). */
3028 temp = virtual_outgoing_args_rtx;
3029 if (extra != 0 && below)
3030 temp = plus_constant (temp, extra);
3032 else
3034 if (GET_CODE (size) == CONST_INT)
3035 temp = plus_constant (virtual_outgoing_args_rtx,
3036 -INTVAL (size) - (below ? 0 : extra));
3037 else if (extra != 0 && !below)
3038 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3039 negate_rtx (Pmode, plus_constant (size, extra)));
3040 else
3041 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3042 negate_rtx (Pmode, size));
3045 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3049 gen_push_operand ()
3051 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3054 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3055 block of SIZE bytes. */
3057 static rtx
3058 get_push_address (size)
3059 int size;
3061 register rtx temp;
3063 if (STACK_PUSH_CODE == POST_DEC)
3064 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3065 else if (STACK_PUSH_CODE == POST_INC)
3066 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3067 else
3068 temp = stack_pointer_rtx;
3070 return copy_to_reg (temp);
3073 /* Generate code to push X onto the stack, assuming it has mode MODE and
3074 type TYPE.
3075 MODE is redundant except when X is a CONST_INT (since they don't
3076 carry mode info).
3077 SIZE is an rtx for the size of data to be copied (in bytes),
3078 needed only if X is BLKmode.
3080 ALIGN is maximum alignment we can assume.
3082 If PARTIAL and REG are both nonzero, then copy that many of the first
3083 words of X into registers starting with REG, and push the rest of X.
3084 The amount of space pushed is decreased by PARTIAL words,
3085 rounded *down* to a multiple of PARM_BOUNDARY.
3086 REG must be a hard register in this case.
3087 If REG is zero but PARTIAL is not, take any all others actions for an
3088 argument partially in registers, but do not actually load any
3089 registers.
3091 EXTRA is the amount in bytes of extra space to leave next to this arg.
3092 This is ignored if an argument block has already been allocated.
3094 On a machine that lacks real push insns, ARGS_ADDR is the address of
3095 the bottom of the argument block for this call. We use indexing off there
3096 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3097 argument block has not been preallocated.
3099 ARGS_SO_FAR is the size of args previously pushed for this call.
3101 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3102 for arguments passed in registers. If nonzero, it will be the number
3103 of bytes required. */
3105 void
3106 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3107 args_addr, args_so_far, reg_parm_stack_space,
3108 alignment_pad)
3109 register rtx x;
3110 enum machine_mode mode;
3111 tree type;
3112 rtx size;
3113 unsigned int align;
3114 int partial;
3115 rtx reg;
3116 int extra;
3117 rtx args_addr;
3118 rtx args_so_far;
3119 int reg_parm_stack_space;
3120 rtx alignment_pad;
3122 rtx xinner;
3123 enum direction stack_direction
3124 #ifdef STACK_GROWS_DOWNWARD
3125 = downward;
3126 #else
3127 = upward;
3128 #endif
3130 /* Decide where to pad the argument: `downward' for below,
3131 `upward' for above, or `none' for don't pad it.
3132 Default is below for small data on big-endian machines; else above. */
3133 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3135 /* Invert direction if stack is post-update. */
3136 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3137 if (where_pad != none)
3138 where_pad = (where_pad == downward ? upward : downward);
3140 xinner = x = protect_from_queue (x, 0);
3142 if (mode == BLKmode)
3144 /* Copy a block into the stack, entirely or partially. */
3146 register rtx temp;
3147 int used = partial * UNITS_PER_WORD;
3148 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3149 int skip;
3151 if (size == 0)
3152 abort ();
3154 used -= offset;
3156 /* USED is now the # of bytes we need not copy to the stack
3157 because registers will take care of them. */
3159 if (partial != 0)
3160 xinner = change_address (xinner, BLKmode,
3161 plus_constant (XEXP (xinner, 0), used));
3163 /* If the partial register-part of the arg counts in its stack size,
3164 skip the part of stack space corresponding to the registers.
3165 Otherwise, start copying to the beginning of the stack space,
3166 by setting SKIP to 0. */
3167 skip = (reg_parm_stack_space == 0) ? 0 : used;
3169 #ifdef PUSH_ROUNDING
3170 /* Do it with several push insns if that doesn't take lots of insns
3171 and if there is no difficulty with push insns that skip bytes
3172 on the stack for alignment purposes. */
3173 if (args_addr == 0
3174 && PUSH_ARGS
3175 && GET_CODE (size) == CONST_INT
3176 && skip == 0
3177 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3178 /* Here we avoid the case of a structure whose weak alignment
3179 forces many pushes of a small amount of data,
3180 and such small pushes do rounding that causes trouble. */
3181 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3182 || align >= BIGGEST_ALIGNMENT
3183 || PUSH_ROUNDING (align) == align)
3184 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3186 /* Push padding now if padding above and stack grows down,
3187 or if padding below and stack grows up.
3188 But if space already allocated, this has already been done. */
3189 if (extra && args_addr == 0
3190 && where_pad != none && where_pad != stack_direction)
3191 anti_adjust_stack (GEN_INT (extra));
3193 stack_pointer_delta += INTVAL (size) - used;
3194 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3195 INTVAL (size) - used, align);
3197 if (current_function_check_memory_usage && ! in_check_memory_usage)
3199 rtx temp;
3201 in_check_memory_usage = 1;
3202 temp = get_push_address (INTVAL (size) - used);
3203 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3204 emit_library_call (chkr_copy_bitmap_libfunc,
3205 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3206 Pmode, XEXP (xinner, 0), Pmode,
3207 GEN_INT (INTVAL (size) - used),
3208 TYPE_MODE (sizetype));
3209 else
3210 emit_library_call (chkr_set_right_libfunc,
3211 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3212 Pmode, GEN_INT (INTVAL (size) - used),
3213 TYPE_MODE (sizetype),
3214 GEN_INT (MEMORY_USE_RW),
3215 TYPE_MODE (integer_type_node));
3216 in_check_memory_usage = 0;
3219 else
3220 #endif /* PUSH_ROUNDING */
3222 rtx target;
3224 /* Otherwise make space on the stack and copy the data
3225 to the address of that space. */
3227 /* Deduct words put into registers from the size we must copy. */
3228 if (partial != 0)
3230 if (GET_CODE (size) == CONST_INT)
3231 size = GEN_INT (INTVAL (size) - used);
3232 else
3233 size = expand_binop (GET_MODE (size), sub_optab, size,
3234 GEN_INT (used), NULL_RTX, 0,
3235 OPTAB_LIB_WIDEN);
3238 /* Get the address of the stack space.
3239 In this case, we do not deal with EXTRA separately.
3240 A single stack adjust will do. */
3241 if (! args_addr)
3243 temp = push_block (size, extra, where_pad == downward);
3244 extra = 0;
3246 else if (GET_CODE (args_so_far) == CONST_INT)
3247 temp = memory_address (BLKmode,
3248 plus_constant (args_addr,
3249 skip + INTVAL (args_so_far)));
3250 else
3251 temp = memory_address (BLKmode,
3252 plus_constant (gen_rtx_PLUS (Pmode,
3253 args_addr,
3254 args_so_far),
3255 skip));
3256 if (current_function_check_memory_usage && ! in_check_memory_usage)
3258 in_check_memory_usage = 1;
3259 target = copy_to_reg (temp);
3260 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3261 emit_library_call (chkr_copy_bitmap_libfunc,
3262 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3263 target, Pmode,
3264 XEXP (xinner, 0), Pmode,
3265 size, TYPE_MODE (sizetype));
3266 else
3267 emit_library_call (chkr_set_right_libfunc,
3268 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3269 target, Pmode,
3270 size, TYPE_MODE (sizetype),
3271 GEN_INT (MEMORY_USE_RW),
3272 TYPE_MODE (integer_type_node));
3273 in_check_memory_usage = 0;
3276 target = gen_rtx_MEM (BLKmode, temp);
3278 if (type != 0)
3280 set_mem_attributes (target, type, 1);
3281 /* Function incoming arguments may overlap with sibling call
3282 outgoing arguments and we cannot allow reordering of reads
3283 from function arguments with stores to outgoing arguments
3284 of sibling calls. */
3285 MEM_ALIAS_SET (target) = 0;
3288 /* TEMP is the address of the block. Copy the data there. */
3289 if (GET_CODE (size) == CONST_INT
3290 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3292 move_by_pieces (target, xinner, INTVAL (size), align);
3293 goto ret;
3295 else
3297 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3298 enum machine_mode mode;
3300 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3301 mode != VOIDmode;
3302 mode = GET_MODE_WIDER_MODE (mode))
3304 enum insn_code code = movstr_optab[(int) mode];
3305 insn_operand_predicate_fn pred;
3307 if (code != CODE_FOR_nothing
3308 && ((GET_CODE (size) == CONST_INT
3309 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3310 <= (GET_MODE_MASK (mode) >> 1)))
3311 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3312 && (!(pred = insn_data[(int) code].operand[0].predicate)
3313 || ((*pred) (target, BLKmode)))
3314 && (!(pred = insn_data[(int) code].operand[1].predicate)
3315 || ((*pred) (xinner, BLKmode)))
3316 && (!(pred = insn_data[(int) code].operand[3].predicate)
3317 || ((*pred) (opalign, VOIDmode))))
3319 rtx op2 = convert_to_mode (mode, size, 1);
3320 rtx last = get_last_insn ();
3321 rtx pat;
3323 pred = insn_data[(int) code].operand[2].predicate;
3324 if (pred != 0 && ! (*pred) (op2, mode))
3325 op2 = copy_to_mode_reg (mode, op2);
3327 pat = GEN_FCN ((int) code) (target, xinner,
3328 op2, opalign);
3329 if (pat)
3331 emit_insn (pat);
3332 goto ret;
3334 else
3335 delete_insns_since (last);
3340 if (!ACCUMULATE_OUTGOING_ARGS)
3342 /* If the source is referenced relative to the stack pointer,
3343 copy it to another register to stabilize it. We do not need
3344 to do this if we know that we won't be changing sp. */
3346 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3347 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3348 temp = copy_to_reg (temp);
3351 /* Make inhibit_defer_pop nonzero around the library call
3352 to force it to pop the bcopy-arguments right away. */
3353 NO_DEFER_POP;
3354 #ifdef TARGET_MEM_FUNCTIONS
3355 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3356 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3357 convert_to_mode (TYPE_MODE (sizetype),
3358 size, TREE_UNSIGNED (sizetype)),
3359 TYPE_MODE (sizetype));
3360 #else
3361 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3362 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3363 convert_to_mode (TYPE_MODE (integer_type_node),
3364 size,
3365 TREE_UNSIGNED (integer_type_node)),
3366 TYPE_MODE (integer_type_node));
3367 #endif
3368 OK_DEFER_POP;
3371 else if (partial > 0)
3373 /* Scalar partly in registers. */
3375 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3376 int i;
3377 int not_stack;
3378 /* # words of start of argument
3379 that we must make space for but need not store. */
3380 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3381 int args_offset = INTVAL (args_so_far);
3382 int skip;
3384 /* Push padding now if padding above and stack grows down,
3385 or if padding below and stack grows up.
3386 But if space already allocated, this has already been done. */
3387 if (extra && args_addr == 0
3388 && where_pad != none && where_pad != stack_direction)
3389 anti_adjust_stack (GEN_INT (extra));
3391 /* If we make space by pushing it, we might as well push
3392 the real data. Otherwise, we can leave OFFSET nonzero
3393 and leave the space uninitialized. */
3394 if (args_addr == 0)
3395 offset = 0;
3397 /* Now NOT_STACK gets the number of words that we don't need to
3398 allocate on the stack. */
3399 not_stack = partial - offset;
3401 /* If the partial register-part of the arg counts in its stack size,
3402 skip the part of stack space corresponding to the registers.
3403 Otherwise, start copying to the beginning of the stack space,
3404 by setting SKIP to 0. */
3405 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3407 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3408 x = validize_mem (force_const_mem (mode, x));
3410 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3411 SUBREGs of such registers are not allowed. */
3412 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3413 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3414 x = copy_to_reg (x);
3416 /* Loop over all the words allocated on the stack for this arg. */
3417 /* We can do it by words, because any scalar bigger than a word
3418 has a size a multiple of a word. */
3419 #ifndef PUSH_ARGS_REVERSED
3420 for (i = not_stack; i < size; i++)
3421 #else
3422 for (i = size - 1; i >= not_stack; i--)
3423 #endif
3424 if (i >= not_stack + offset)
3425 emit_push_insn (operand_subword_force (x, i, mode),
3426 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3427 0, args_addr,
3428 GEN_INT (args_offset + ((i - not_stack + skip)
3429 * UNITS_PER_WORD)),
3430 reg_parm_stack_space, alignment_pad);
3432 else
3434 rtx addr;
3435 rtx target = NULL_RTX;
3436 rtx dest;
3438 /* Push padding now if padding above and stack grows down,
3439 or if padding below and stack grows up.
3440 But if space already allocated, this has already been done. */
3441 if (extra && args_addr == 0
3442 && where_pad != none && where_pad != stack_direction)
3443 anti_adjust_stack (GEN_INT (extra));
3445 #ifdef PUSH_ROUNDING
3446 if (args_addr == 0 && PUSH_ARGS)
3448 addr = gen_push_operand ();
3449 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3451 else
3452 #endif
3454 if (GET_CODE (args_so_far) == CONST_INT)
3455 addr
3456 = memory_address (mode,
3457 plus_constant (args_addr,
3458 INTVAL (args_so_far)));
3459 else
3460 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3461 args_so_far));
3462 target = addr;
3465 dest = gen_rtx_MEM (mode, addr);
3466 if (type != 0)
3468 set_mem_attributes (dest, type, 1);
3469 /* Function incoming arguments may overlap with sibling call
3470 outgoing arguments and we cannot allow reordering of reads
3471 from function arguments with stores to outgoing arguments
3472 of sibling calls. */
3473 MEM_ALIAS_SET (dest) = 0;
3476 emit_move_insn (dest, x);
3478 if (current_function_check_memory_usage && ! in_check_memory_usage)
3480 in_check_memory_usage = 1;
3481 if (target == 0)
3482 target = get_push_address (GET_MODE_SIZE (mode));
3484 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3485 emit_library_call (chkr_copy_bitmap_libfunc,
3486 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3487 Pmode, XEXP (x, 0), Pmode,
3488 GEN_INT (GET_MODE_SIZE (mode)),
3489 TYPE_MODE (sizetype));
3490 else
3491 emit_library_call (chkr_set_right_libfunc,
3492 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3493 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3494 TYPE_MODE (sizetype),
3495 GEN_INT (MEMORY_USE_RW),
3496 TYPE_MODE (integer_type_node));
3497 in_check_memory_usage = 0;
3501 ret:
3502 /* If part should go in registers, copy that part
3503 into the appropriate registers. Do this now, at the end,
3504 since mem-to-mem copies above may do function calls. */
3505 if (partial > 0 && reg != 0)
3507 /* Handle calls that pass values in multiple non-contiguous locations.
3508 The Irix 6 ABI has examples of this. */
3509 if (GET_CODE (reg) == PARALLEL)
3510 emit_group_load (reg, x, -1, align); /* ??? size? */
3511 else
3512 move_block_to_reg (REGNO (reg), x, partial, mode);
3515 if (extra && args_addr == 0 && where_pad == stack_direction)
3516 anti_adjust_stack (GEN_INT (extra));
3518 if (alignment_pad && args_addr == 0)
3519 anti_adjust_stack (alignment_pad);
3522 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3523 operations. */
3525 static rtx
3526 get_subtarget (x)
3527 rtx x;
3529 return ((x == 0
3530 /* Only registers can be subtargets. */
3531 || GET_CODE (x) != REG
3532 /* If the register is readonly, it can't be set more than once. */
3533 || RTX_UNCHANGING_P (x)
3534 /* Don't use hard regs to avoid extending their life. */
3535 || REGNO (x) < FIRST_PSEUDO_REGISTER
3536 /* Avoid subtargets inside loops,
3537 since they hide some invariant expressions. */
3538 || preserve_subexpressions_p ())
3539 ? 0 : x);
3542 /* Expand an assignment that stores the value of FROM into TO.
3543 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3544 (This may contain a QUEUED rtx;
3545 if the value is constant, this rtx is a constant.)
3546 Otherwise, the returned value is NULL_RTX.
3548 SUGGEST_REG is no longer actually used.
3549 It used to mean, copy the value through a register
3550 and return that register, if that is possible.
3551 We now use WANT_VALUE to decide whether to do this. */
3554 expand_assignment (to, from, want_value, suggest_reg)
3555 tree to, from;
3556 int want_value;
3557 int suggest_reg ATTRIBUTE_UNUSED;
3559 register rtx to_rtx = 0;
3560 rtx result;
3562 /* Don't crash if the lhs of the assignment was erroneous. */
3564 if (TREE_CODE (to) == ERROR_MARK)
3566 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3567 return want_value ? result : NULL_RTX;
3570 /* Assignment of a structure component needs special treatment
3571 if the structure component's rtx is not simply a MEM.
3572 Assignment of an array element at a constant index, and assignment of
3573 an array element in an unaligned packed structure field, has the same
3574 problem. */
3576 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3577 || TREE_CODE (to) == ARRAY_REF)
3579 enum machine_mode mode1;
3580 HOST_WIDE_INT bitsize, bitpos;
3581 tree offset;
3582 int unsignedp;
3583 int volatilep = 0;
3584 tree tem;
3585 unsigned int alignment;
3587 push_temp_slots ();
3588 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3589 &unsignedp, &volatilep, &alignment);
3591 /* If we are going to use store_bit_field and extract_bit_field,
3592 make sure to_rtx will be safe for multiple use. */
3594 if (mode1 == VOIDmode && want_value)
3595 tem = stabilize_reference (tem);
3597 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3598 if (offset != 0)
3600 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3602 if (GET_CODE (to_rtx) != MEM)
3603 abort ();
3605 if (GET_MODE (offset_rtx) != ptr_mode)
3607 #ifdef POINTERS_EXTEND_UNSIGNED
3608 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3609 #else
3610 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3611 #endif
3614 /* A constant address in TO_RTX can have VOIDmode, we must not try
3615 to call force_reg for that case. Avoid that case. */
3616 if (GET_CODE (to_rtx) == MEM
3617 && GET_MODE (to_rtx) == BLKmode
3618 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3619 && bitsize
3620 && (bitpos % bitsize) == 0
3621 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3622 && alignment == GET_MODE_ALIGNMENT (mode1))
3624 rtx temp = change_address (to_rtx, mode1,
3625 plus_constant (XEXP (to_rtx, 0),
3626 (bitpos /
3627 BITS_PER_UNIT)));
3628 if (GET_CODE (XEXP (temp, 0)) == REG)
3629 to_rtx = temp;
3630 else
3631 to_rtx = change_address (to_rtx, mode1,
3632 force_reg (GET_MODE (XEXP (temp, 0)),
3633 XEXP (temp, 0)));
3634 bitpos = 0;
3637 to_rtx = change_address (to_rtx, VOIDmode,
3638 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3639 force_reg (ptr_mode,
3640 offset_rtx)));
3643 if (volatilep)
3645 if (GET_CODE (to_rtx) == MEM)
3647 /* When the offset is zero, to_rtx is the address of the
3648 structure we are storing into, and hence may be shared.
3649 We must make a new MEM before setting the volatile bit. */
3650 if (offset == 0)
3651 to_rtx = copy_rtx (to_rtx);
3653 MEM_VOLATILE_P (to_rtx) = 1;
3655 #if 0 /* This was turned off because, when a field is volatile
3656 in an object which is not volatile, the object may be in a register,
3657 and then we would abort over here. */
3658 else
3659 abort ();
3660 #endif
3663 if (TREE_CODE (to) == COMPONENT_REF
3664 && TREE_READONLY (TREE_OPERAND (to, 1)))
3666 if (offset == 0)
3667 to_rtx = copy_rtx (to_rtx);
3669 RTX_UNCHANGING_P (to_rtx) = 1;
3672 /* Check the access. */
3673 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3675 rtx to_addr;
3676 int size;
3677 int best_mode_size;
3678 enum machine_mode best_mode;
3680 best_mode = get_best_mode (bitsize, bitpos,
3681 TYPE_ALIGN (TREE_TYPE (tem)),
3682 mode1, volatilep);
3683 if (best_mode == VOIDmode)
3684 best_mode = QImode;
3686 best_mode_size = GET_MODE_BITSIZE (best_mode);
3687 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3688 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3689 size *= GET_MODE_SIZE (best_mode);
3691 /* Check the access right of the pointer. */
3692 in_check_memory_usage = 1;
3693 if (size)
3694 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3695 VOIDmode, 3, to_addr, Pmode,
3696 GEN_INT (size), TYPE_MODE (sizetype),
3697 GEN_INT (MEMORY_USE_WO),
3698 TYPE_MODE (integer_type_node));
3699 in_check_memory_usage = 0;
3702 /* If this is a varying-length object, we must get the address of
3703 the source and do an explicit block move. */
3704 if (bitsize < 0)
3706 unsigned int from_align;
3707 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3708 rtx inner_to_rtx
3709 = change_address (to_rtx, VOIDmode,
3710 plus_constant (XEXP (to_rtx, 0),
3711 bitpos / BITS_PER_UNIT));
3713 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3714 MIN (alignment, from_align));
3715 free_temp_slots ();
3716 pop_temp_slots ();
3717 return to_rtx;
3719 else
3721 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3722 (want_value
3723 /* Spurious cast for HPUX compiler. */
3724 ? ((enum machine_mode)
3725 TYPE_MODE (TREE_TYPE (to)))
3726 : VOIDmode),
3727 unsignedp,
3728 alignment,
3729 int_size_in_bytes (TREE_TYPE (tem)),
3730 get_alias_set (to));
3732 preserve_temp_slots (result);
3733 free_temp_slots ();
3734 pop_temp_slots ();
3736 /* If the value is meaningful, convert RESULT to the proper mode.
3737 Otherwise, return nothing. */
3738 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3739 TYPE_MODE (TREE_TYPE (from)),
3740 result,
3741 TREE_UNSIGNED (TREE_TYPE (to)))
3742 : NULL_RTX);
3746 /* If the rhs is a function call and its value is not an aggregate,
3747 call the function before we start to compute the lhs.
3748 This is needed for correct code for cases such as
3749 val = setjmp (buf) on machines where reference to val
3750 requires loading up part of an address in a separate insn.
3752 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3753 since it might be a promoted variable where the zero- or sign- extension
3754 needs to be done. Handling this in the normal way is safe because no
3755 computation is done before the call. */
3756 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3757 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3758 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3759 && GET_CODE (DECL_RTL (to)) == REG))
3761 rtx value;
3763 push_temp_slots ();
3764 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3765 if (to_rtx == 0)
3766 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3768 /* Handle calls that return values in multiple non-contiguous locations.
3769 The Irix 6 ABI has examples of this. */
3770 if (GET_CODE (to_rtx) == PARALLEL)
3771 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3772 TYPE_ALIGN (TREE_TYPE (from)));
3773 else if (GET_MODE (to_rtx) == BLKmode)
3774 emit_block_move (to_rtx, value, expr_size (from),
3775 TYPE_ALIGN (TREE_TYPE (from)));
3776 else
3778 #ifdef POINTERS_EXTEND_UNSIGNED
3779 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3780 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3781 value = convert_memory_address (GET_MODE (to_rtx), value);
3782 #endif
3783 emit_move_insn (to_rtx, value);
3785 preserve_temp_slots (to_rtx);
3786 free_temp_slots ();
3787 pop_temp_slots ();
3788 return want_value ? to_rtx : NULL_RTX;
3791 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3792 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3794 if (to_rtx == 0)
3796 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3797 if (GET_CODE (to_rtx) == MEM)
3798 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3801 /* Don't move directly into a return register. */
3802 if (TREE_CODE (to) == RESULT_DECL
3803 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3805 rtx temp;
3807 push_temp_slots ();
3808 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3810 if (GET_CODE (to_rtx) == PARALLEL)
3811 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3812 TYPE_ALIGN (TREE_TYPE (from)));
3813 else
3814 emit_move_insn (to_rtx, temp);
3816 preserve_temp_slots (to_rtx);
3817 free_temp_slots ();
3818 pop_temp_slots ();
3819 return want_value ? to_rtx : NULL_RTX;
3822 /* In case we are returning the contents of an object which overlaps
3823 the place the value is being stored, use a safe function when copying
3824 a value through a pointer into a structure value return block. */
3825 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3826 && current_function_returns_struct
3827 && !current_function_returns_pcc_struct)
3829 rtx from_rtx, size;
3831 push_temp_slots ();
3832 size = expr_size (from);
3833 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3834 EXPAND_MEMORY_USE_DONT);
3836 /* Copy the rights of the bitmap. */
3837 if (current_function_check_memory_usage)
3838 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3839 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3840 XEXP (from_rtx, 0), Pmode,
3841 convert_to_mode (TYPE_MODE (sizetype),
3842 size, TREE_UNSIGNED (sizetype)),
3843 TYPE_MODE (sizetype));
3845 #ifdef TARGET_MEM_FUNCTIONS
3846 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3847 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3848 XEXP (from_rtx, 0), Pmode,
3849 convert_to_mode (TYPE_MODE (sizetype),
3850 size, TREE_UNSIGNED (sizetype)),
3851 TYPE_MODE (sizetype));
3852 #else
3853 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3854 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3855 XEXP (to_rtx, 0), Pmode,
3856 convert_to_mode (TYPE_MODE (integer_type_node),
3857 size, TREE_UNSIGNED (integer_type_node)),
3858 TYPE_MODE (integer_type_node));
3859 #endif
3861 preserve_temp_slots (to_rtx);
3862 free_temp_slots ();
3863 pop_temp_slots ();
3864 return want_value ? to_rtx : NULL_RTX;
3867 /* Compute FROM and store the value in the rtx we got. */
3869 push_temp_slots ();
3870 result = store_expr (from, to_rtx, want_value);
3871 preserve_temp_slots (result);
3872 free_temp_slots ();
3873 pop_temp_slots ();
3874 return want_value ? result : NULL_RTX;
3877 /* Generate code for computing expression EXP,
3878 and storing the value into TARGET.
3879 TARGET may contain a QUEUED rtx.
3881 If WANT_VALUE is nonzero, return a copy of the value
3882 not in TARGET, so that we can be sure to use the proper
3883 value in a containing expression even if TARGET has something
3884 else stored in it. If possible, we copy the value through a pseudo
3885 and return that pseudo. Or, if the value is constant, we try to
3886 return the constant. In some cases, we return a pseudo
3887 copied *from* TARGET.
3889 If the mode is BLKmode then we may return TARGET itself.
3890 It turns out that in BLKmode it doesn't cause a problem.
3891 because C has no operators that could combine two different
3892 assignments into the same BLKmode object with different values
3893 with no sequence point. Will other languages need this to
3894 be more thorough?
3896 If WANT_VALUE is 0, we return NULL, to make sure
3897 to catch quickly any cases where the caller uses the value
3898 and fails to set WANT_VALUE. */
3901 store_expr (exp, target, want_value)
3902 register tree exp;
3903 register rtx target;
3904 int want_value;
3906 register rtx temp;
3907 int dont_return_target = 0;
3909 if (TREE_CODE (exp) == COMPOUND_EXPR)
3911 /* Perform first part of compound expression, then assign from second
3912 part. */
3913 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3914 emit_queue ();
3915 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3917 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3919 /* For conditional expression, get safe form of the target. Then
3920 test the condition, doing the appropriate assignment on either
3921 side. This avoids the creation of unnecessary temporaries.
3922 For non-BLKmode, it is more efficient not to do this. */
3924 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3926 emit_queue ();
3927 target = protect_from_queue (target, 1);
3929 do_pending_stack_adjust ();
3930 NO_DEFER_POP;
3931 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3932 start_cleanup_deferral ();
3933 store_expr (TREE_OPERAND (exp, 1), target, 0);
3934 end_cleanup_deferral ();
3935 emit_queue ();
3936 emit_jump_insn (gen_jump (lab2));
3937 emit_barrier ();
3938 emit_label (lab1);
3939 start_cleanup_deferral ();
3940 store_expr (TREE_OPERAND (exp, 2), target, 0);
3941 end_cleanup_deferral ();
3942 emit_queue ();
3943 emit_label (lab2);
3944 OK_DEFER_POP;
3946 return want_value ? target : NULL_RTX;
3948 else if (queued_subexp_p (target))
3949 /* If target contains a postincrement, let's not risk
3950 using it as the place to generate the rhs. */
3952 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3954 /* Expand EXP into a new pseudo. */
3955 temp = gen_reg_rtx (GET_MODE (target));
3956 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3958 else
3959 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3961 /* If target is volatile, ANSI requires accessing the value
3962 *from* the target, if it is accessed. So make that happen.
3963 In no case return the target itself. */
3964 if (! MEM_VOLATILE_P (target) && want_value)
3965 dont_return_target = 1;
3967 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3968 && GET_MODE (target) != BLKmode)
3969 /* If target is in memory and caller wants value in a register instead,
3970 arrange that. Pass TARGET as target for expand_expr so that,
3971 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3972 We know expand_expr will not use the target in that case.
3973 Don't do this if TARGET is volatile because we are supposed
3974 to write it and then read it. */
3976 temp = expand_expr (exp, target, GET_MODE (target), 0);
3977 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3978 temp = copy_to_reg (temp);
3979 dont_return_target = 1;
3981 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3982 /* If this is an scalar in a register that is stored in a wider mode
3983 than the declared mode, compute the result into its declared mode
3984 and then convert to the wider mode. Our value is the computed
3985 expression. */
3987 /* If we don't want a value, we can do the conversion inside EXP,
3988 which will often result in some optimizations. Do the conversion
3989 in two steps: first change the signedness, if needed, then
3990 the extend. But don't do this if the type of EXP is a subtype
3991 of something else since then the conversion might involve
3992 more than just converting modes. */
3993 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3994 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3996 if (TREE_UNSIGNED (TREE_TYPE (exp))
3997 != SUBREG_PROMOTED_UNSIGNED_P (target))
3999 = convert
4000 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4001 TREE_TYPE (exp)),
4002 exp);
4004 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4005 SUBREG_PROMOTED_UNSIGNED_P (target)),
4006 exp);
4009 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4011 /* If TEMP is a volatile MEM and we want a result value, make
4012 the access now so it gets done only once. Likewise if
4013 it contains TARGET. */
4014 if (GET_CODE (temp) == MEM && want_value
4015 && (MEM_VOLATILE_P (temp)
4016 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4017 temp = copy_to_reg (temp);
4019 /* If TEMP is a VOIDmode constant, use convert_modes to make
4020 sure that we properly convert it. */
4021 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4022 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4023 TYPE_MODE (TREE_TYPE (exp)), temp,
4024 SUBREG_PROMOTED_UNSIGNED_P (target));
4026 convert_move (SUBREG_REG (target), temp,
4027 SUBREG_PROMOTED_UNSIGNED_P (target));
4029 /* If we promoted a constant, change the mode back down to match
4030 target. Otherwise, the caller might get confused by a result whose
4031 mode is larger than expected. */
4033 if (want_value && GET_MODE (temp) != GET_MODE (target)
4034 && GET_MODE (temp) != VOIDmode)
4036 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4037 SUBREG_PROMOTED_VAR_P (temp) = 1;
4038 SUBREG_PROMOTED_UNSIGNED_P (temp)
4039 = SUBREG_PROMOTED_UNSIGNED_P (target);
4042 return want_value ? temp : NULL_RTX;
4044 else
4046 temp = expand_expr (exp, target, GET_MODE (target), 0);
4047 /* Return TARGET if it's a specified hardware register.
4048 If TARGET is a volatile mem ref, either return TARGET
4049 or return a reg copied *from* TARGET; ANSI requires this.
4051 Otherwise, if TEMP is not TARGET, return TEMP
4052 if it is constant (for efficiency),
4053 or if we really want the correct value. */
4054 if (!(target && GET_CODE (target) == REG
4055 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4056 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4057 && ! rtx_equal_p (temp, target)
4058 && (CONSTANT_P (temp) || want_value))
4059 dont_return_target = 1;
4062 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4063 the same as that of TARGET, adjust the constant. This is needed, for
4064 example, in case it is a CONST_DOUBLE and we want only a word-sized
4065 value. */
4066 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4067 && TREE_CODE (exp) != ERROR_MARK
4068 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4069 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4070 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4072 if (current_function_check_memory_usage
4073 && GET_CODE (target) == MEM
4074 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4076 in_check_memory_usage = 1;
4077 if (GET_CODE (temp) == MEM)
4078 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4079 VOIDmode, 3, XEXP (target, 0), Pmode,
4080 XEXP (temp, 0), Pmode,
4081 expr_size (exp), TYPE_MODE (sizetype));
4082 else
4083 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4084 VOIDmode, 3, XEXP (target, 0), Pmode,
4085 expr_size (exp), TYPE_MODE (sizetype),
4086 GEN_INT (MEMORY_USE_WO),
4087 TYPE_MODE (integer_type_node));
4088 in_check_memory_usage = 0;
4091 /* If value was not generated in the target, store it there.
4092 Convert the value to TARGET's type first if nec. */
4093 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4094 one or both of them are volatile memory refs, we have to distinguish
4095 two cases:
4096 - expand_expr has used TARGET. In this case, we must not generate
4097 another copy. This can be detected by TARGET being equal according
4098 to == .
4099 - expand_expr has not used TARGET - that means that the source just
4100 happens to have the same RTX form. Since temp will have been created
4101 by expand_expr, it will compare unequal according to == .
4102 We must generate a copy in this case, to reach the correct number
4103 of volatile memory references. */
4105 if ((! rtx_equal_p (temp, target)
4106 || (temp != target && (side_effects_p (temp)
4107 || side_effects_p (target))))
4108 && TREE_CODE (exp) != ERROR_MARK)
4110 target = protect_from_queue (target, 1);
4111 if (GET_MODE (temp) != GET_MODE (target)
4112 && GET_MODE (temp) != VOIDmode)
4114 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4115 if (dont_return_target)
4117 /* In this case, we will return TEMP,
4118 so make sure it has the proper mode.
4119 But don't forget to store the value into TARGET. */
4120 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4121 emit_move_insn (target, temp);
4123 else
4124 convert_move (target, temp, unsignedp);
4127 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4129 /* Handle copying a string constant into an array.
4130 The string constant may be shorter than the array.
4131 So copy just the string's actual length, and clear the rest. */
4132 rtx size;
4133 rtx addr;
4135 /* Get the size of the data type of the string,
4136 which is actually the size of the target. */
4137 size = expr_size (exp);
4138 if (GET_CODE (size) == CONST_INT
4139 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4140 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4141 else
4143 /* Compute the size of the data to copy from the string. */
4144 tree copy_size
4145 = size_binop (MIN_EXPR,
4146 make_tree (sizetype, size),
4147 size_int (TREE_STRING_LENGTH (exp)));
4148 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4149 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4150 VOIDmode, 0);
4151 rtx label = 0;
4153 /* Copy that much. */
4154 emit_block_move (target, temp, copy_size_rtx,
4155 TYPE_ALIGN (TREE_TYPE (exp)));
4157 /* Figure out how much is left in TARGET that we have to clear.
4158 Do all calculations in ptr_mode. */
4160 addr = XEXP (target, 0);
4161 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4163 if (GET_CODE (copy_size_rtx) == CONST_INT)
4165 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4166 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4167 align = MIN (align,
4168 (unsigned int) (BITS_PER_UNIT
4169 * (INTVAL (copy_size_rtx)
4170 & - INTVAL (copy_size_rtx))));
4172 else
4174 addr = force_reg (ptr_mode, addr);
4175 addr = expand_binop (ptr_mode, add_optab, addr,
4176 copy_size_rtx, NULL_RTX, 0,
4177 OPTAB_LIB_WIDEN);
4179 size = expand_binop (ptr_mode, sub_optab, size,
4180 copy_size_rtx, NULL_RTX, 0,
4181 OPTAB_LIB_WIDEN);
4183 align = BITS_PER_UNIT;
4184 label = gen_label_rtx ();
4185 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4186 GET_MODE (size), 0, 0, label);
4188 align = MIN (align, expr_align (copy_size));
4190 if (size != const0_rtx)
4192 rtx dest = gen_rtx_MEM (BLKmode, addr);
4194 MEM_COPY_ATTRIBUTES (dest, target);
4196 /* Be sure we can write on ADDR. */
4197 in_check_memory_usage = 1;
4198 if (current_function_check_memory_usage)
4199 emit_library_call (chkr_check_addr_libfunc,
4200 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4201 addr, Pmode,
4202 size, TYPE_MODE (sizetype),
4203 GEN_INT (MEMORY_USE_WO),
4204 TYPE_MODE (integer_type_node));
4205 in_check_memory_usage = 0;
4206 clear_storage (dest, size, align);
4209 if (label)
4210 emit_label (label);
4213 /* Handle calls that return values in multiple non-contiguous locations.
4214 The Irix 6 ABI has examples of this. */
4215 else if (GET_CODE (target) == PARALLEL)
4216 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4217 TYPE_ALIGN (TREE_TYPE (exp)));
4218 else if (GET_MODE (temp) == BLKmode)
4219 emit_block_move (target, temp, expr_size (exp),
4220 TYPE_ALIGN (TREE_TYPE (exp)));
4221 else
4222 emit_move_insn (target, temp);
4225 /* If we don't want a value, return NULL_RTX. */
4226 if (! want_value)
4227 return NULL_RTX;
4229 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4230 ??? The latter test doesn't seem to make sense. */
4231 else if (dont_return_target && GET_CODE (temp) != MEM)
4232 return temp;
4234 /* Return TARGET itself if it is a hard register. */
4235 else if (want_value && GET_MODE (target) != BLKmode
4236 && ! (GET_CODE (target) == REG
4237 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4238 return copy_to_reg (target);
4240 else
4241 return target;
4244 /* Return 1 if EXP just contains zeros. */
4246 static int
4247 is_zeros_p (exp)
4248 tree exp;
4250 tree elt;
4252 switch (TREE_CODE (exp))
4254 case CONVERT_EXPR:
4255 case NOP_EXPR:
4256 case NON_LVALUE_EXPR:
4257 return is_zeros_p (TREE_OPERAND (exp, 0));
4259 case INTEGER_CST:
4260 return integer_zerop (exp);
4262 case COMPLEX_CST:
4263 return
4264 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4266 case REAL_CST:
4267 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4269 case CONSTRUCTOR:
4270 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4271 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4272 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4273 if (! is_zeros_p (TREE_VALUE (elt)))
4274 return 0;
4276 return 1;
4278 default:
4279 return 0;
4283 /* Return 1 if EXP contains mostly (3/4) zeros. */
4285 static int
4286 mostly_zeros_p (exp)
4287 tree exp;
4289 if (TREE_CODE (exp) == CONSTRUCTOR)
4291 int elts = 0, zeros = 0;
4292 tree elt = CONSTRUCTOR_ELTS (exp);
4293 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4295 /* If there are no ranges of true bits, it is all zero. */
4296 return elt == NULL_TREE;
4298 for (; elt; elt = TREE_CHAIN (elt))
4300 /* We do not handle the case where the index is a RANGE_EXPR,
4301 so the statistic will be somewhat inaccurate.
4302 We do make a more accurate count in store_constructor itself,
4303 so since this function is only used for nested array elements,
4304 this should be close enough. */
4305 if (mostly_zeros_p (TREE_VALUE (elt)))
4306 zeros++;
4307 elts++;
4310 return 4 * zeros >= 3 * elts;
4313 return is_zeros_p (exp);
4316 /* Helper function for store_constructor.
4317 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4318 TYPE is the type of the CONSTRUCTOR, not the element type.
4319 ALIGN and CLEARED are as for store_constructor.
4320 ALIAS_SET is the alias set to use for any stores.
4322 This provides a recursive shortcut back to store_constructor when it isn't
4323 necessary to go through store_field. This is so that we can pass through
4324 the cleared field to let store_constructor know that we may not have to
4325 clear a substructure if the outer structure has already been cleared. */
4327 static void
4328 store_constructor_field (target, bitsize, bitpos,
4329 mode, exp, type, align, cleared, alias_set)
4330 rtx target;
4331 unsigned HOST_WIDE_INT bitsize;
4332 HOST_WIDE_INT bitpos;
4333 enum machine_mode mode;
4334 tree exp, type;
4335 unsigned int align;
4336 int cleared;
4337 int alias_set;
4339 if (TREE_CODE (exp) == CONSTRUCTOR
4340 && bitpos % BITS_PER_UNIT == 0
4341 /* If we have a non-zero bitpos for a register target, then we just
4342 let store_field do the bitfield handling. This is unlikely to
4343 generate unnecessary clear instructions anyways. */
4344 && (bitpos == 0 || GET_CODE (target) == MEM))
4346 if (bitpos != 0)
4347 target
4348 = change_address (target,
4349 GET_MODE (target) == BLKmode
4350 || 0 != (bitpos
4351 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4352 ? BLKmode : VOIDmode,
4353 plus_constant (XEXP (target, 0),
4354 bitpos / BITS_PER_UNIT));
4357 /* Show the alignment may no longer be what it was and update the alias
4358 set, if required. */
4359 if (bitpos != 0)
4360 align = MIN (align, (unsigned int) bitpos & - bitpos);
4361 if (GET_CODE (target) == MEM)
4362 MEM_ALIAS_SET (target) = alias_set;
4364 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4366 else
4367 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4368 int_size_in_bytes (type), alias_set);
4371 /* Store the value of constructor EXP into the rtx TARGET.
4372 TARGET is either a REG or a MEM.
4373 ALIGN is the maximum known alignment for TARGET.
4374 CLEARED is true if TARGET is known to have been zero'd.
4375 SIZE is the number of bytes of TARGET we are allowed to modify: this
4376 may not be the same as the size of EXP if we are assigning to a field
4377 which has been packed to exclude padding bits. */
4379 static void
4380 store_constructor (exp, target, align, cleared, size)
4381 tree exp;
4382 rtx target;
4383 unsigned int align;
4384 int cleared;
4385 HOST_WIDE_INT size;
4387 tree type = TREE_TYPE (exp);
4388 #ifdef WORD_REGISTER_OPERATIONS
4389 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4390 #endif
4392 /* We know our target cannot conflict, since safe_from_p has been called. */
4393 #if 0
4394 /* Don't try copying piece by piece into a hard register
4395 since that is vulnerable to being clobbered by EXP.
4396 Instead, construct in a pseudo register and then copy it all. */
4397 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4399 rtx temp = gen_reg_rtx (GET_MODE (target));
4400 store_constructor (exp, temp, align, cleared, size);
4401 emit_move_insn (target, temp);
4402 return;
4404 #endif
4406 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4407 || TREE_CODE (type) == QUAL_UNION_TYPE)
4409 register tree elt;
4411 /* Inform later passes that the whole union value is dead. */
4412 if ((TREE_CODE (type) == UNION_TYPE
4413 || TREE_CODE (type) == QUAL_UNION_TYPE)
4414 && ! cleared)
4416 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4418 /* If the constructor is empty, clear the union. */
4419 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4420 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4423 /* If we are building a static constructor into a register,
4424 set the initial value as zero so we can fold the value into
4425 a constant. But if more than one register is involved,
4426 this probably loses. */
4427 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4428 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4430 if (! cleared)
4431 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4433 cleared = 1;
4436 /* If the constructor has fewer fields than the structure
4437 or if we are initializing the structure to mostly zeros,
4438 clear the whole structure first. Don't do this is TARGET is
4439 register whose mode size isn't equal to SIZE since clear_storage
4440 can't handle this case. */
4441 else if (size > 0
4442 && ((list_length (CONSTRUCTOR_ELTS (exp))
4443 != fields_length (type))
4444 || mostly_zeros_p (exp))
4445 && (GET_CODE (target) != REG
4446 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4448 if (! cleared)
4449 clear_storage (target, GEN_INT (size), align);
4451 cleared = 1;
4453 else if (! cleared)
4454 /* Inform later passes that the old value is dead. */
4455 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4457 /* Store each element of the constructor into
4458 the corresponding field of TARGET. */
4460 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4462 register tree field = TREE_PURPOSE (elt);
4463 #ifdef WORD_REGISTER_OPERATIONS
4464 tree value = TREE_VALUE (elt);
4465 #endif
4466 register enum machine_mode mode;
4467 HOST_WIDE_INT bitsize;
4468 HOST_WIDE_INT bitpos = 0;
4469 int unsignedp;
4470 tree offset;
4471 rtx to_rtx = target;
4473 /* Just ignore missing fields.
4474 We cleared the whole structure, above,
4475 if any fields are missing. */
4476 if (field == 0)
4477 continue;
4479 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4480 continue;
4482 if (host_integerp (DECL_SIZE (field), 1))
4483 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4484 else
4485 bitsize = -1;
4487 unsignedp = TREE_UNSIGNED (field);
4488 mode = DECL_MODE (field);
4489 if (DECL_BIT_FIELD (field))
4490 mode = VOIDmode;
4492 offset = DECL_FIELD_OFFSET (field);
4493 if (host_integerp (offset, 0)
4494 && host_integerp (bit_position (field), 0))
4496 bitpos = int_bit_position (field);
4497 offset = 0;
4499 else
4500 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4502 if (offset)
4504 rtx offset_rtx;
4506 if (contains_placeholder_p (offset))
4507 offset = build (WITH_RECORD_EXPR, sizetype,
4508 offset, make_tree (TREE_TYPE (exp), target));
4510 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4511 if (GET_CODE (to_rtx) != MEM)
4512 abort ();
4514 if (GET_MODE (offset_rtx) != ptr_mode)
4516 #ifdef POINTERS_EXTEND_UNSIGNED
4517 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4518 #else
4519 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4520 #endif
4523 to_rtx
4524 = change_address (to_rtx, VOIDmode,
4525 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4526 force_reg (ptr_mode,
4527 offset_rtx)));
4528 align = DECL_OFFSET_ALIGN (field);
4531 if (TREE_READONLY (field))
4533 if (GET_CODE (to_rtx) == MEM)
4534 to_rtx = copy_rtx (to_rtx);
4536 RTX_UNCHANGING_P (to_rtx) = 1;
4539 #ifdef WORD_REGISTER_OPERATIONS
4540 /* If this initializes a field that is smaller than a word, at the
4541 start of a word, try to widen it to a full word.
4542 This special case allows us to output C++ member function
4543 initializations in a form that the optimizers can understand. */
4544 if (GET_CODE (target) == REG
4545 && bitsize < BITS_PER_WORD
4546 && bitpos % BITS_PER_WORD == 0
4547 && GET_MODE_CLASS (mode) == MODE_INT
4548 && TREE_CODE (value) == INTEGER_CST
4549 && exp_size >= 0
4550 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4552 tree type = TREE_TYPE (value);
4553 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4555 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4556 value = convert (type, value);
4558 if (BYTES_BIG_ENDIAN)
4559 value
4560 = fold (build (LSHIFT_EXPR, type, value,
4561 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4562 bitsize = BITS_PER_WORD;
4563 mode = word_mode;
4565 #endif
4566 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4567 TREE_VALUE (elt), type, align, cleared,
4568 (DECL_NONADDRESSABLE_P (field)
4569 && GET_CODE (to_rtx) == MEM)
4570 ? MEM_ALIAS_SET (to_rtx)
4571 : get_alias_set (TREE_TYPE (field)));
4574 else if (TREE_CODE (type) == ARRAY_TYPE)
4576 register tree elt;
4577 register int i;
4578 int need_to_clear;
4579 tree domain = TYPE_DOMAIN (type);
4580 tree elttype = TREE_TYPE (type);
4581 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4582 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4583 HOST_WIDE_INT minelt;
4584 HOST_WIDE_INT maxelt;
4586 /* If we have constant bounds for the range of the type, get them. */
4587 if (const_bounds_p)
4589 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4590 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4593 /* If the constructor has fewer elements than the array,
4594 clear the whole array first. Similarly if this is
4595 static constructor of a non-BLKmode object. */
4596 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4597 need_to_clear = 1;
4598 else
4600 HOST_WIDE_INT count = 0, zero_count = 0;
4601 need_to_clear = ! const_bounds_p;
4603 /* This loop is a more accurate version of the loop in
4604 mostly_zeros_p (it handles RANGE_EXPR in an index).
4605 It is also needed to check for missing elements. */
4606 for (elt = CONSTRUCTOR_ELTS (exp);
4607 elt != NULL_TREE && ! need_to_clear;
4608 elt = TREE_CHAIN (elt))
4610 tree index = TREE_PURPOSE (elt);
4611 HOST_WIDE_INT this_node_count;
4613 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4615 tree lo_index = TREE_OPERAND (index, 0);
4616 tree hi_index = TREE_OPERAND (index, 1);
4618 if (! host_integerp (lo_index, 1)
4619 || ! host_integerp (hi_index, 1))
4621 need_to_clear = 1;
4622 break;
4625 this_node_count = (tree_low_cst (hi_index, 1)
4626 - tree_low_cst (lo_index, 1) + 1);
4628 else
4629 this_node_count = 1;
4631 count += this_node_count;
4632 if (mostly_zeros_p (TREE_VALUE (elt)))
4633 zero_count += this_node_count;
4636 /* Clear the entire array first if there are any missing elements,
4637 or if the incidence of zero elements is >= 75%. */
4638 if (! need_to_clear
4639 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4640 need_to_clear = 1;
4643 if (need_to_clear && size > 0)
4645 if (! cleared)
4646 clear_storage (target, GEN_INT (size), align);
4647 cleared = 1;
4649 else
4650 /* Inform later passes that the old value is dead. */
4651 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4653 /* Store each element of the constructor into
4654 the corresponding element of TARGET, determined
4655 by counting the elements. */
4656 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4657 elt;
4658 elt = TREE_CHAIN (elt), i++)
4660 register enum machine_mode mode;
4661 HOST_WIDE_INT bitsize;
4662 HOST_WIDE_INT bitpos;
4663 int unsignedp;
4664 tree value = TREE_VALUE (elt);
4665 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4666 tree index = TREE_PURPOSE (elt);
4667 rtx xtarget = target;
4669 if (cleared && is_zeros_p (value))
4670 continue;
4672 unsignedp = TREE_UNSIGNED (elttype);
4673 mode = TYPE_MODE (elttype);
4674 if (mode == BLKmode)
4675 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4676 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4677 : -1);
4678 else
4679 bitsize = GET_MODE_BITSIZE (mode);
4681 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4683 tree lo_index = TREE_OPERAND (index, 0);
4684 tree hi_index = TREE_OPERAND (index, 1);
4685 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4686 struct nesting *loop;
4687 HOST_WIDE_INT lo, hi, count;
4688 tree position;
4690 /* If the range is constant and "small", unroll the loop. */
4691 if (const_bounds_p
4692 && host_integerp (lo_index, 0)
4693 && host_integerp (hi_index, 0)
4694 && (lo = tree_low_cst (lo_index, 0),
4695 hi = tree_low_cst (hi_index, 0),
4696 count = hi - lo + 1,
4697 (GET_CODE (target) != MEM
4698 || count <= 2
4699 || (host_integerp (TYPE_SIZE (elttype), 1)
4700 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4701 <= 40 * 8)))))
4703 lo -= minelt; hi -= minelt;
4704 for (; lo <= hi; lo++)
4706 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4707 store_constructor_field
4708 (target, bitsize, bitpos, mode, value, type, align,
4709 cleared,
4710 TYPE_NONALIASED_COMPONENT (type)
4711 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4714 else
4716 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4717 loop_top = gen_label_rtx ();
4718 loop_end = gen_label_rtx ();
4720 unsignedp = TREE_UNSIGNED (domain);
4722 index = build_decl (VAR_DECL, NULL_TREE, domain);
4724 DECL_RTL (index) = index_r
4725 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4726 &unsignedp, 0));
4728 if (TREE_CODE (value) == SAVE_EXPR
4729 && SAVE_EXPR_RTL (value) == 0)
4731 /* Make sure value gets expanded once before the
4732 loop. */
4733 expand_expr (value, const0_rtx, VOIDmode, 0);
4734 emit_queue ();
4736 store_expr (lo_index, index_r, 0);
4737 loop = expand_start_loop (0);
4739 /* Assign value to element index. */
4740 position
4741 = convert (ssizetype,
4742 fold (build (MINUS_EXPR, TREE_TYPE (index),
4743 index, TYPE_MIN_VALUE (domain))));
4744 position = size_binop (MULT_EXPR, position,
4745 convert (ssizetype,
4746 TYPE_SIZE_UNIT (elttype)));
4748 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4749 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4750 xtarget = change_address (target, mode, addr);
4751 if (TREE_CODE (value) == CONSTRUCTOR)
4752 store_constructor (value, xtarget, align, cleared,
4753 bitsize / BITS_PER_UNIT);
4754 else
4755 store_expr (value, xtarget, 0);
4757 expand_exit_loop_if_false (loop,
4758 build (LT_EXPR, integer_type_node,
4759 index, hi_index));
4761 expand_increment (build (PREINCREMENT_EXPR,
4762 TREE_TYPE (index),
4763 index, integer_one_node), 0, 0);
4764 expand_end_loop ();
4765 emit_label (loop_end);
4768 else if ((index != 0 && ! host_integerp (index, 0))
4769 || ! host_integerp (TYPE_SIZE (elttype), 1))
4771 rtx pos_rtx, addr;
4772 tree position;
4774 if (index == 0)
4775 index = ssize_int (1);
4777 if (minelt)
4778 index = convert (ssizetype,
4779 fold (build (MINUS_EXPR, index,
4780 TYPE_MIN_VALUE (domain))));
4782 position = size_binop (MULT_EXPR, index,
4783 convert (ssizetype,
4784 TYPE_SIZE_UNIT (elttype)));
4785 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4786 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4787 xtarget = change_address (target, mode, addr);
4788 store_expr (value, xtarget, 0);
4790 else
4792 if (index != 0)
4793 bitpos = ((tree_low_cst (index, 0) - minelt)
4794 * tree_low_cst (TYPE_SIZE (elttype), 1));
4795 else
4796 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4798 store_constructor_field (target, bitsize, bitpos, mode, value,
4799 type, align, cleared,
4800 TYPE_NONALIASED_COMPONENT (type)
4801 && GET_CODE (target) == MEM
4802 ? MEM_ALIAS_SET (target) :
4803 get_alias_set (elttype));
4809 /* Set constructor assignments. */
4810 else if (TREE_CODE (type) == SET_TYPE)
4812 tree elt = CONSTRUCTOR_ELTS (exp);
4813 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4814 tree domain = TYPE_DOMAIN (type);
4815 tree domain_min, domain_max, bitlength;
4817 /* The default implementation strategy is to extract the constant
4818 parts of the constructor, use that to initialize the target,
4819 and then "or" in whatever non-constant ranges we need in addition.
4821 If a large set is all zero or all ones, it is
4822 probably better to set it using memset (if available) or bzero.
4823 Also, if a large set has just a single range, it may also be
4824 better to first clear all the first clear the set (using
4825 bzero/memset), and set the bits we want. */
4827 /* Check for all zeros. */
4828 if (elt == NULL_TREE && size > 0)
4830 if (!cleared)
4831 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4832 return;
4835 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4836 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4837 bitlength = size_binop (PLUS_EXPR,
4838 size_diffop (domain_max, domain_min),
4839 ssize_int (1));
4841 nbits = tree_low_cst (bitlength, 1);
4843 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4844 are "complicated" (more than one range), initialize (the
4845 constant parts) by copying from a constant. */
4846 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4847 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4849 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4850 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4851 char *bit_buffer = (char *) alloca (nbits);
4852 HOST_WIDE_INT word = 0;
4853 unsigned int bit_pos = 0;
4854 unsigned int ibit = 0;
4855 unsigned int offset = 0; /* In bytes from beginning of set. */
4857 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4858 for (;;)
4860 if (bit_buffer[ibit])
4862 if (BYTES_BIG_ENDIAN)
4863 word |= (1 << (set_word_size - 1 - bit_pos));
4864 else
4865 word |= 1 << bit_pos;
4868 bit_pos++; ibit++;
4869 if (bit_pos >= set_word_size || ibit == nbits)
4871 if (word != 0 || ! cleared)
4873 rtx datum = GEN_INT (word);
4874 rtx to_rtx;
4876 /* The assumption here is that it is safe to use
4877 XEXP if the set is multi-word, but not if
4878 it's single-word. */
4879 if (GET_CODE (target) == MEM)
4881 to_rtx = plus_constant (XEXP (target, 0), offset);
4882 to_rtx = change_address (target, mode, to_rtx);
4884 else if (offset == 0)
4885 to_rtx = target;
4886 else
4887 abort ();
4888 emit_move_insn (to_rtx, datum);
4891 if (ibit == nbits)
4892 break;
4893 word = 0;
4894 bit_pos = 0;
4895 offset += set_word_size / BITS_PER_UNIT;
4899 else if (!cleared)
4900 /* Don't bother clearing storage if the set is all ones. */
4901 if (TREE_CHAIN (elt) != NULL_TREE
4902 || (TREE_PURPOSE (elt) == NULL_TREE
4903 ? nbits != 1
4904 : ( ! host_integerp (TREE_VALUE (elt), 0)
4905 || ! host_integerp (TREE_PURPOSE (elt), 0)
4906 || (tree_low_cst (TREE_VALUE (elt), 0)
4907 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4908 != (HOST_WIDE_INT) nbits))))
4909 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4911 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4913 /* Start of range of element or NULL. */
4914 tree startbit = TREE_PURPOSE (elt);
4915 /* End of range of element, or element value. */
4916 tree endbit = TREE_VALUE (elt);
4917 #ifdef TARGET_MEM_FUNCTIONS
4918 HOST_WIDE_INT startb, endb;
4919 #endif
4920 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4922 bitlength_rtx = expand_expr (bitlength,
4923 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4925 /* Handle non-range tuple element like [ expr ]. */
4926 if (startbit == NULL_TREE)
4928 startbit = save_expr (endbit);
4929 endbit = startbit;
4932 startbit = convert (sizetype, startbit);
4933 endbit = convert (sizetype, endbit);
4934 if (! integer_zerop (domain_min))
4936 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4937 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4939 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4940 EXPAND_CONST_ADDRESS);
4941 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4942 EXPAND_CONST_ADDRESS);
4944 if (REG_P (target))
4946 targetx
4947 = assign_temp
4948 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4949 TYPE_QUAL_CONST)),
4950 0, 1, 1);
4951 emit_move_insn (targetx, target);
4954 else if (GET_CODE (target) == MEM)
4955 targetx = target;
4956 else
4957 abort ();
4959 #ifdef TARGET_MEM_FUNCTIONS
4960 /* Optimization: If startbit and endbit are
4961 constants divisible by BITS_PER_UNIT,
4962 call memset instead. */
4963 if (TREE_CODE (startbit) == INTEGER_CST
4964 && TREE_CODE (endbit) == INTEGER_CST
4965 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4966 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4968 emit_library_call (memset_libfunc, LCT_NORMAL,
4969 VOIDmode, 3,
4970 plus_constant (XEXP (targetx, 0),
4971 startb / BITS_PER_UNIT),
4972 Pmode,
4973 constm1_rtx, TYPE_MODE (integer_type_node),
4974 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4975 TYPE_MODE (sizetype));
4977 else
4978 #endif
4979 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4980 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4981 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4982 startbit_rtx, TYPE_MODE (sizetype),
4983 endbit_rtx, TYPE_MODE (sizetype));
4985 if (REG_P (target))
4986 emit_move_insn (target, targetx);
4990 else
4991 abort ();
4994 /* Store the value of EXP (an expression tree)
4995 into a subfield of TARGET which has mode MODE and occupies
4996 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4997 If MODE is VOIDmode, it means that we are storing into a bit-field.
4999 If VALUE_MODE is VOIDmode, return nothing in particular.
5000 UNSIGNEDP is not used in this case.
5002 Otherwise, return an rtx for the value stored. This rtx
5003 has mode VALUE_MODE if that is convenient to do.
5004 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5006 ALIGN is the alignment that TARGET is known to have.
5007 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5009 ALIAS_SET is the alias set for the destination. This value will
5010 (in general) be different from that for TARGET, since TARGET is a
5011 reference to the containing structure. */
5013 static rtx
5014 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5015 unsignedp, align, total_size, alias_set)
5016 rtx target;
5017 HOST_WIDE_INT bitsize;
5018 HOST_WIDE_INT bitpos;
5019 enum machine_mode mode;
5020 tree exp;
5021 enum machine_mode value_mode;
5022 int unsignedp;
5023 unsigned int align;
5024 HOST_WIDE_INT total_size;
5025 int alias_set;
5027 HOST_WIDE_INT width_mask = 0;
5029 if (TREE_CODE (exp) == ERROR_MARK)
5030 return const0_rtx;
5032 if (bitsize < HOST_BITS_PER_WIDE_INT)
5033 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5035 /* If we are storing into an unaligned field of an aligned union that is
5036 in a register, we may have the mode of TARGET being an integer mode but
5037 MODE == BLKmode. In that case, get an aligned object whose size and
5038 alignment are the same as TARGET and store TARGET into it (we can avoid
5039 the store if the field being stored is the entire width of TARGET). Then
5040 call ourselves recursively to store the field into a BLKmode version of
5041 that object. Finally, load from the object into TARGET. This is not
5042 very efficient in general, but should only be slightly more expensive
5043 than the otherwise-required unaligned accesses. Perhaps this can be
5044 cleaned up later. */
5046 if (mode == BLKmode
5047 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5049 rtx object
5050 = assign_temp
5051 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5052 TYPE_QUAL_CONST),
5053 0, 1, 1);
5054 rtx blk_object = copy_rtx (object);
5056 PUT_MODE (blk_object, BLKmode);
5058 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5059 emit_move_insn (object, target);
5061 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5062 align, total_size, alias_set);
5064 /* Even though we aren't returning target, we need to
5065 give it the updated value. */
5066 emit_move_insn (target, object);
5068 return blk_object;
5071 if (GET_CODE (target) == CONCAT)
5073 /* We're storing into a struct containing a single __complex. */
5075 if (bitpos != 0)
5076 abort ();
5077 return store_expr (exp, target, 0);
5080 /* If the structure is in a register or if the component
5081 is a bit field, we cannot use addressing to access it.
5082 Use bit-field techniques or SUBREG to store in it. */
5084 if (mode == VOIDmode
5085 || (mode != BLKmode && ! direct_store[(int) mode]
5086 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5087 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5088 || GET_CODE (target) == REG
5089 || GET_CODE (target) == SUBREG
5090 /* If the field isn't aligned enough to store as an ordinary memref,
5091 store it as a bit field. */
5092 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5093 && (align < GET_MODE_ALIGNMENT (mode)
5094 || bitpos % GET_MODE_ALIGNMENT (mode)))
5095 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5096 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5097 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5098 /* If the RHS and field are a constant size and the size of the
5099 RHS isn't the same size as the bitfield, we must use bitfield
5100 operations. */
5101 || (bitsize >= 0
5102 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5103 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5105 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5107 /* If BITSIZE is narrower than the size of the type of EXP
5108 we will be narrowing TEMP. Normally, what's wanted are the
5109 low-order bits. However, if EXP's type is a record and this is
5110 big-endian machine, we want the upper BITSIZE bits. */
5111 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5112 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5113 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5114 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5115 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5116 - bitsize),
5117 temp, 1);
5119 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5120 MODE. */
5121 if (mode != VOIDmode && mode != BLKmode
5122 && mode != TYPE_MODE (TREE_TYPE (exp)))
5123 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5125 /* If the modes of TARGET and TEMP are both BLKmode, both
5126 must be in memory and BITPOS must be aligned on a byte
5127 boundary. If so, we simply do a block copy. */
5128 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5130 unsigned int exp_align = expr_align (exp);
5132 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5133 || bitpos % BITS_PER_UNIT != 0)
5134 abort ();
5136 target = change_address (target, VOIDmode,
5137 plus_constant (XEXP (target, 0),
5138 bitpos / BITS_PER_UNIT));
5140 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5141 align = MIN (exp_align, align);
5143 /* Find an alignment that is consistent with the bit position. */
5144 while ((bitpos % align) != 0)
5145 align >>= 1;
5147 emit_block_move (target, temp,
5148 bitsize == -1 ? expr_size (exp)
5149 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5150 / BITS_PER_UNIT),
5151 align);
5153 return value_mode == VOIDmode ? const0_rtx : target;
5156 /* Store the value in the bitfield. */
5157 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5158 if (value_mode != VOIDmode)
5160 /* The caller wants an rtx for the value. */
5161 /* If possible, avoid refetching from the bitfield itself. */
5162 if (width_mask != 0
5163 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5165 tree count;
5166 enum machine_mode tmode;
5168 if (unsignedp)
5169 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5170 tmode = GET_MODE (temp);
5171 if (tmode == VOIDmode)
5172 tmode = value_mode;
5173 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5174 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5175 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5177 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5178 NULL_RTX, value_mode, 0, align,
5179 total_size);
5181 return const0_rtx;
5183 else
5185 rtx addr = XEXP (target, 0);
5186 rtx to_rtx;
5188 /* If a value is wanted, it must be the lhs;
5189 so make the address stable for multiple use. */
5191 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5192 && ! CONSTANT_ADDRESS_P (addr)
5193 /* A frame-pointer reference is already stable. */
5194 && ! (GET_CODE (addr) == PLUS
5195 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5196 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5197 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5198 addr = copy_to_reg (addr);
5200 /* Now build a reference to just the desired component. */
5202 to_rtx = copy_rtx (change_address (target, mode,
5203 plus_constant (addr,
5204 (bitpos
5205 / BITS_PER_UNIT))));
5206 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5207 MEM_ALIAS_SET (to_rtx) = alias_set;
5209 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5213 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5214 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5215 ARRAY_REFs and find the ultimate containing object, which we return.
5217 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5218 bit position, and *PUNSIGNEDP to the signedness of the field.
5219 If the position of the field is variable, we store a tree
5220 giving the variable offset (in units) in *POFFSET.
5221 This offset is in addition to the bit position.
5222 If the position is not variable, we store 0 in *POFFSET.
5223 We set *PALIGNMENT to the alignment of the address that will be
5224 computed. This is the alignment of the thing we return if *POFFSET
5225 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5227 If any of the extraction expressions is volatile,
5228 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5230 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5231 is a mode that can be used to access the field. In that case, *PBITSIZE
5232 is redundant.
5234 If the field describes a variable-sized object, *PMODE is set to
5235 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5236 this case, but the address of the object can be found. */
5238 tree
5239 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5240 punsignedp, pvolatilep, palignment)
5241 tree exp;
5242 HOST_WIDE_INT *pbitsize;
5243 HOST_WIDE_INT *pbitpos;
5244 tree *poffset;
5245 enum machine_mode *pmode;
5246 int *punsignedp;
5247 int *pvolatilep;
5248 unsigned int *palignment;
5250 tree size_tree = 0;
5251 enum machine_mode mode = VOIDmode;
5252 tree offset = size_zero_node;
5253 tree bit_offset = bitsize_zero_node;
5254 unsigned int alignment = BIGGEST_ALIGNMENT;
5255 tree tem;
5257 /* First get the mode, signedness, and size. We do this from just the
5258 outermost expression. */
5259 if (TREE_CODE (exp) == COMPONENT_REF)
5261 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5262 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5263 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5265 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5267 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5269 size_tree = TREE_OPERAND (exp, 1);
5270 *punsignedp = TREE_UNSIGNED (exp);
5272 else
5274 mode = TYPE_MODE (TREE_TYPE (exp));
5275 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5277 if (mode == BLKmode)
5278 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5279 else
5280 *pbitsize = GET_MODE_BITSIZE (mode);
5283 if (size_tree != 0)
5285 if (! host_integerp (size_tree, 1))
5286 mode = BLKmode, *pbitsize = -1;
5287 else
5288 *pbitsize = tree_low_cst (size_tree, 1);
5291 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5292 and find the ultimate containing object. */
5293 while (1)
5295 if (TREE_CODE (exp) == BIT_FIELD_REF)
5296 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5297 else if (TREE_CODE (exp) == COMPONENT_REF)
5299 tree field = TREE_OPERAND (exp, 1);
5300 tree this_offset = DECL_FIELD_OFFSET (field);
5302 /* If this field hasn't been filled in yet, don't go
5303 past it. This should only happen when folding expressions
5304 made during type construction. */
5305 if (this_offset == 0)
5306 break;
5307 else if (! TREE_CONSTANT (this_offset)
5308 && contains_placeholder_p (this_offset))
5309 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5311 offset = size_binop (PLUS_EXPR, offset, this_offset);
5312 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5313 DECL_FIELD_BIT_OFFSET (field));
5315 if (! host_integerp (offset, 0))
5316 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5319 else if (TREE_CODE (exp) == ARRAY_REF)
5321 tree index = TREE_OPERAND (exp, 1);
5322 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5323 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5324 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5326 /* We assume all arrays have sizes that are a multiple of a byte.
5327 First subtract the lower bound, if any, in the type of the
5328 index, then convert to sizetype and multiply by the size of the
5329 array element. */
5330 if (low_bound != 0 && ! integer_zerop (low_bound))
5331 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5332 index, low_bound));
5334 /* If the index has a self-referential type, pass it to a
5335 WITH_RECORD_EXPR; if the component size is, pass our
5336 component to one. */
5337 if (! TREE_CONSTANT (index)
5338 && contains_placeholder_p (index))
5339 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5340 if (! TREE_CONSTANT (unit_size)
5341 && contains_placeholder_p (unit_size))
5342 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5343 TREE_OPERAND (exp, 0));
5345 offset = size_binop (PLUS_EXPR, offset,
5346 size_binop (MULT_EXPR,
5347 convert (sizetype, index),
5348 unit_size));
5351 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5352 && ! ((TREE_CODE (exp) == NOP_EXPR
5353 || TREE_CODE (exp) == CONVERT_EXPR)
5354 && (TYPE_MODE (TREE_TYPE (exp))
5355 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5356 break;
5358 /* If any reference in the chain is volatile, the effect is volatile. */
5359 if (TREE_THIS_VOLATILE (exp))
5360 *pvolatilep = 1;
5362 /* If the offset is non-constant already, then we can't assume any
5363 alignment more than the alignment here. */
5364 if (! TREE_CONSTANT (offset))
5365 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5367 exp = TREE_OPERAND (exp, 0);
5370 if (DECL_P (exp))
5371 alignment = MIN (alignment, DECL_ALIGN (exp));
5372 else if (TREE_TYPE (exp) != 0)
5373 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5375 /* If OFFSET is constant, see if we can return the whole thing as a
5376 constant bit position. Otherwise, split it up. */
5377 if (host_integerp (offset, 0)
5378 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5379 bitsize_unit_node))
5380 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5381 && host_integerp (tem, 0))
5382 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5383 else
5384 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5386 *pmode = mode;
5387 *palignment = alignment;
5388 return exp;
5391 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5393 static enum memory_use_mode
5394 get_memory_usage_from_modifier (modifier)
5395 enum expand_modifier modifier;
5397 switch (modifier)
5399 case EXPAND_NORMAL:
5400 case EXPAND_SUM:
5401 return MEMORY_USE_RO;
5402 break;
5403 case EXPAND_MEMORY_USE_WO:
5404 return MEMORY_USE_WO;
5405 break;
5406 case EXPAND_MEMORY_USE_RW:
5407 return MEMORY_USE_RW;
5408 break;
5409 case EXPAND_MEMORY_USE_DONT:
5410 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5411 MEMORY_USE_DONT, because they are modifiers to a call of
5412 expand_expr in the ADDR_EXPR case of expand_expr. */
5413 case EXPAND_CONST_ADDRESS:
5414 case EXPAND_INITIALIZER:
5415 return MEMORY_USE_DONT;
5416 case EXPAND_MEMORY_USE_BAD:
5417 default:
5418 abort ();
5422 /* Given an rtx VALUE that may contain additions and multiplications, return
5423 an equivalent value that just refers to a register, memory, or constant.
5424 This is done by generating instructions to perform the arithmetic and
5425 returning a pseudo-register containing the value.
5427 The returned value may be a REG, SUBREG, MEM or constant. */
5430 force_operand (value, target)
5431 rtx value, target;
5433 register optab binoptab = 0;
5434 /* Use a temporary to force order of execution of calls to
5435 `force_operand'. */
5436 rtx tmp;
5437 register rtx op2;
5438 /* Use subtarget as the target for operand 0 of a binary operation. */
5439 register rtx subtarget = get_subtarget (target);
5441 /* Check for a PIC address load. */
5442 if (flag_pic
5443 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5444 && XEXP (value, 0) == pic_offset_table_rtx
5445 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5446 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5447 || GET_CODE (XEXP (value, 1)) == CONST))
5449 if (!subtarget)
5450 subtarget = gen_reg_rtx (GET_MODE (value));
5451 emit_move_insn (subtarget, value);
5452 return subtarget;
5455 if (GET_CODE (value) == PLUS)
5456 binoptab = add_optab;
5457 else if (GET_CODE (value) == MINUS)
5458 binoptab = sub_optab;
5459 else if (GET_CODE (value) == MULT)
5461 op2 = XEXP (value, 1);
5462 if (!CONSTANT_P (op2)
5463 && !(GET_CODE (op2) == REG && op2 != subtarget))
5464 subtarget = 0;
5465 tmp = force_operand (XEXP (value, 0), subtarget);
5466 return expand_mult (GET_MODE (value), tmp,
5467 force_operand (op2, NULL_RTX),
5468 target, 1);
5471 if (binoptab)
5473 op2 = XEXP (value, 1);
5474 if (!CONSTANT_P (op2)
5475 && !(GET_CODE (op2) == REG && op2 != subtarget))
5476 subtarget = 0;
5477 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5479 binoptab = add_optab;
5480 op2 = negate_rtx (GET_MODE (value), op2);
5483 /* Check for an addition with OP2 a constant integer and our first
5484 operand a PLUS of a virtual register and something else. In that
5485 case, we want to emit the sum of the virtual register and the
5486 constant first and then add the other value. This allows virtual
5487 register instantiation to simply modify the constant rather than
5488 creating another one around this addition. */
5489 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5490 && GET_CODE (XEXP (value, 0)) == PLUS
5491 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5492 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5493 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5495 rtx temp = expand_binop (GET_MODE (value), binoptab,
5496 XEXP (XEXP (value, 0), 0), op2,
5497 subtarget, 0, OPTAB_LIB_WIDEN);
5498 return expand_binop (GET_MODE (value), binoptab, temp,
5499 force_operand (XEXP (XEXP (value, 0), 1), 0),
5500 target, 0, OPTAB_LIB_WIDEN);
5503 tmp = force_operand (XEXP (value, 0), subtarget);
5504 return expand_binop (GET_MODE (value), binoptab, tmp,
5505 force_operand (op2, NULL_RTX),
5506 target, 0, OPTAB_LIB_WIDEN);
5507 /* We give UNSIGNEDP = 0 to expand_binop
5508 because the only operations we are expanding here are signed ones. */
5510 return value;
5513 /* Subroutine of expand_expr:
5514 save the non-copied parts (LIST) of an expr (LHS), and return a list
5515 which can restore these values to their previous values,
5516 should something modify their storage. */
5518 static tree
5519 save_noncopied_parts (lhs, list)
5520 tree lhs;
5521 tree list;
5523 tree tail;
5524 tree parts = 0;
5526 for (tail = list; tail; tail = TREE_CHAIN (tail))
5527 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5528 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5529 else
5531 tree part = TREE_VALUE (tail);
5532 tree part_type = TREE_TYPE (part);
5533 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5534 rtx target
5535 = assign_temp (build_qualified_type (part_type,
5536 (TYPE_QUALS (part_type)
5537 | TYPE_QUAL_CONST)),
5538 0, 1, 1);
5540 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5541 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5542 parts = tree_cons (to_be_saved,
5543 build (RTL_EXPR, part_type, NULL_TREE,
5544 (tree) target),
5545 parts);
5546 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5548 return parts;
5551 /* Subroutine of expand_expr:
5552 record the non-copied parts (LIST) of an expr (LHS), and return a list
5553 which specifies the initial values of these parts. */
5555 static tree
5556 init_noncopied_parts (lhs, list)
5557 tree lhs;
5558 tree list;
5560 tree tail;
5561 tree parts = 0;
5563 for (tail = list; tail; tail = TREE_CHAIN (tail))
5564 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5565 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5566 else if (TREE_PURPOSE (tail))
5568 tree part = TREE_VALUE (tail);
5569 tree part_type = TREE_TYPE (part);
5570 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5571 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5573 return parts;
5576 /* Subroutine of expand_expr: return nonzero iff there is no way that
5577 EXP can reference X, which is being modified. TOP_P is nonzero if this
5578 call is going to be used to determine whether we need a temporary
5579 for EXP, as opposed to a recursive call to this function.
5581 It is always safe for this routine to return zero since it merely
5582 searches for optimization opportunities. */
5585 safe_from_p (x, exp, top_p)
5586 rtx x;
5587 tree exp;
5588 int top_p;
5590 rtx exp_rtl = 0;
5591 int i, nops;
5592 static tree save_expr_list;
5594 if (x == 0
5595 /* If EXP has varying size, we MUST use a target since we currently
5596 have no way of allocating temporaries of variable size
5597 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5598 So we assume here that something at a higher level has prevented a
5599 clash. This is somewhat bogus, but the best we can do. Only
5600 do this when X is BLKmode and when we are at the top level. */
5601 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5602 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5603 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5604 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5605 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5606 != INTEGER_CST)
5607 && GET_MODE (x) == BLKmode)
5608 /* If X is in the outgoing argument area, it is always safe. */
5609 || (GET_CODE (x) == MEM
5610 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5611 || (GET_CODE (XEXP (x, 0)) == PLUS
5612 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5613 return 1;
5615 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5616 find the underlying pseudo. */
5617 if (GET_CODE (x) == SUBREG)
5619 x = SUBREG_REG (x);
5620 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5621 return 0;
5624 /* A SAVE_EXPR might appear many times in the expression passed to the
5625 top-level safe_from_p call, and if it has a complex subexpression,
5626 examining it multiple times could result in a combinatorial explosion.
5627 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5628 with optimization took about 28 minutes to compile -- even though it was
5629 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5630 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5631 we have processed. Note that the only test of top_p was above. */
5633 if (top_p)
5635 int rtn;
5636 tree t;
5638 save_expr_list = 0;
5640 rtn = safe_from_p (x, exp, 0);
5642 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5643 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5645 return rtn;
5648 /* Now look at our tree code and possibly recurse. */
5649 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5651 case 'd':
5652 exp_rtl = DECL_RTL (exp);
5653 break;
5655 case 'c':
5656 return 1;
5658 case 'x':
5659 if (TREE_CODE (exp) == TREE_LIST)
5660 return ((TREE_VALUE (exp) == 0
5661 || safe_from_p (x, TREE_VALUE (exp), 0))
5662 && (TREE_CHAIN (exp) == 0
5663 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5664 else if (TREE_CODE (exp) == ERROR_MARK)
5665 return 1; /* An already-visited SAVE_EXPR? */
5666 else
5667 return 0;
5669 case '1':
5670 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5672 case '2':
5673 case '<':
5674 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5675 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5677 case 'e':
5678 case 'r':
5679 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5680 the expression. If it is set, we conflict iff we are that rtx or
5681 both are in memory. Otherwise, we check all operands of the
5682 expression recursively. */
5684 switch (TREE_CODE (exp))
5686 case ADDR_EXPR:
5687 return (staticp (TREE_OPERAND (exp, 0))
5688 || TREE_STATIC (exp)
5689 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5691 case INDIRECT_REF:
5692 if (GET_CODE (x) == MEM
5693 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5694 get_alias_set (exp)))
5695 return 0;
5696 break;
5698 case CALL_EXPR:
5699 /* Assume that the call will clobber all hard registers and
5700 all of memory. */
5701 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5702 || GET_CODE (x) == MEM)
5703 return 0;
5704 break;
5706 case RTL_EXPR:
5707 /* If a sequence exists, we would have to scan every instruction
5708 in the sequence to see if it was safe. This is probably not
5709 worthwhile. */
5710 if (RTL_EXPR_SEQUENCE (exp))
5711 return 0;
5713 exp_rtl = RTL_EXPR_RTL (exp);
5714 break;
5716 case WITH_CLEANUP_EXPR:
5717 exp_rtl = RTL_EXPR_RTL (exp);
5718 break;
5720 case CLEANUP_POINT_EXPR:
5721 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5723 case SAVE_EXPR:
5724 exp_rtl = SAVE_EXPR_RTL (exp);
5725 if (exp_rtl)
5726 break;
5728 /* If we've already scanned this, don't do it again. Otherwise,
5729 show we've scanned it and record for clearing the flag if we're
5730 going on. */
5731 if (TREE_PRIVATE (exp))
5732 return 1;
5734 TREE_PRIVATE (exp) = 1;
5735 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5737 TREE_PRIVATE (exp) = 0;
5738 return 0;
5741 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5742 return 1;
5744 case BIND_EXPR:
5745 /* The only operand we look at is operand 1. The rest aren't
5746 part of the expression. */
5747 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5749 case METHOD_CALL_EXPR:
5750 /* This takes a rtx argument, but shouldn't appear here. */
5751 abort ();
5753 default:
5754 break;
5757 /* If we have an rtx, we do not need to scan our operands. */
5758 if (exp_rtl)
5759 break;
5761 nops = first_rtl_op (TREE_CODE (exp));
5762 for (i = 0; i < nops; i++)
5763 if (TREE_OPERAND (exp, i) != 0
5764 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5765 return 0;
5767 /* If this is a language-specific tree code, it may require
5768 special handling. */
5769 if ((unsigned int) TREE_CODE (exp)
5770 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5771 && lang_safe_from_p
5772 && !(*lang_safe_from_p) (x, exp))
5773 return 0;
5776 /* If we have an rtl, find any enclosed object. Then see if we conflict
5777 with it. */
5778 if (exp_rtl)
5780 if (GET_CODE (exp_rtl) == SUBREG)
5782 exp_rtl = SUBREG_REG (exp_rtl);
5783 if (GET_CODE (exp_rtl) == REG
5784 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5785 return 0;
5788 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5789 are memory and they conflict. */
5790 return ! (rtx_equal_p (x, exp_rtl)
5791 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5792 && true_dependence (exp_rtl, GET_MODE (x), x,
5793 rtx_addr_varies_p)));
5796 /* If we reach here, it is safe. */
5797 return 1;
5800 /* Subroutine of expand_expr: return nonzero iff EXP is an
5801 expression whose type is statically determinable. */
5803 static int
5804 fixed_type_p (exp)
5805 tree exp;
5807 if (TREE_CODE (exp) == PARM_DECL
5808 || TREE_CODE (exp) == VAR_DECL
5809 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5810 || TREE_CODE (exp) == COMPONENT_REF
5811 || TREE_CODE (exp) == ARRAY_REF)
5812 return 1;
5813 return 0;
5816 /* Subroutine of expand_expr: return rtx if EXP is a
5817 variable or parameter; else return 0. */
5819 static rtx
5820 var_rtx (exp)
5821 tree exp;
5823 STRIP_NOPS (exp);
5824 switch (TREE_CODE (exp))
5826 case PARM_DECL:
5827 case VAR_DECL:
5828 return DECL_RTL (exp);
5829 default:
5830 return 0;
5834 #ifdef MAX_INTEGER_COMPUTATION_MODE
5836 void
5837 check_max_integer_computation_mode (exp)
5838 tree exp;
5840 enum tree_code code;
5841 enum machine_mode mode;
5843 /* Strip any NOPs that don't change the mode. */
5844 STRIP_NOPS (exp);
5845 code = TREE_CODE (exp);
5847 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5848 if (code == NOP_EXPR
5849 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5850 return;
5852 /* First check the type of the overall operation. We need only look at
5853 unary, binary and relational operations. */
5854 if (TREE_CODE_CLASS (code) == '1'
5855 || TREE_CODE_CLASS (code) == '2'
5856 || TREE_CODE_CLASS (code) == '<')
5858 mode = TYPE_MODE (TREE_TYPE (exp));
5859 if (GET_MODE_CLASS (mode) == MODE_INT
5860 && mode > MAX_INTEGER_COMPUTATION_MODE)
5861 internal_error ("unsupported wide integer operation");
5864 /* Check operand of a unary op. */
5865 if (TREE_CODE_CLASS (code) == '1')
5867 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5868 if (GET_MODE_CLASS (mode) == MODE_INT
5869 && mode > MAX_INTEGER_COMPUTATION_MODE)
5870 internal_error ("unsupported wide integer operation");
5873 /* Check operands of a binary/comparison op. */
5874 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5876 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5877 if (GET_MODE_CLASS (mode) == MODE_INT
5878 && mode > MAX_INTEGER_COMPUTATION_MODE)
5879 internal_error ("unsupported wide integer operation");
5881 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5882 if (GET_MODE_CLASS (mode) == MODE_INT
5883 && mode > MAX_INTEGER_COMPUTATION_MODE)
5884 internal_error ("unsupported wide integer operation");
5887 #endif
5889 /* expand_expr: generate code for computing expression EXP.
5890 An rtx for the computed value is returned. The value is never null.
5891 In the case of a void EXP, const0_rtx is returned.
5893 The value may be stored in TARGET if TARGET is nonzero.
5894 TARGET is just a suggestion; callers must assume that
5895 the rtx returned may not be the same as TARGET.
5897 If TARGET is CONST0_RTX, it means that the value will be ignored.
5899 If TMODE is not VOIDmode, it suggests generating the
5900 result in mode TMODE. But this is done only when convenient.
5901 Otherwise, TMODE is ignored and the value generated in its natural mode.
5902 TMODE is just a suggestion; callers must assume that
5903 the rtx returned may not have mode TMODE.
5905 Note that TARGET may have neither TMODE nor MODE. In that case, it
5906 probably will not be used.
5908 If MODIFIER is EXPAND_SUM then when EXP is an addition
5909 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5910 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5911 products as above, or REG or MEM, or constant.
5912 Ordinarily in such cases we would output mul or add instructions
5913 and then return a pseudo reg containing the sum.
5915 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5916 it also marks a label as absolutely required (it can't be dead).
5917 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5918 This is used for outputting expressions used in initializers.
5920 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5921 with a constant address even if that address is not normally legitimate.
5922 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5925 expand_expr (exp, target, tmode, modifier)
5926 register tree exp;
5927 rtx target;
5928 enum machine_mode tmode;
5929 enum expand_modifier modifier;
5931 register rtx op0, op1, temp;
5932 tree type = TREE_TYPE (exp);
5933 int unsignedp = TREE_UNSIGNED (type);
5934 register enum machine_mode mode;
5935 register enum tree_code code = TREE_CODE (exp);
5936 optab this_optab;
5937 rtx subtarget, original_target;
5938 int ignore;
5939 tree context;
5940 /* Used by check-memory-usage to make modifier read only. */
5941 enum expand_modifier ro_modifier;
5943 /* Handle ERROR_MARK before anybody tries to access its type. */
5944 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5946 op0 = CONST0_RTX (tmode);
5947 if (op0 != 0)
5948 return op0;
5949 return const0_rtx;
5952 mode = TYPE_MODE (type);
5953 /* Use subtarget as the target for operand 0 of a binary operation. */
5954 subtarget = get_subtarget (target);
5955 original_target = target;
5956 ignore = (target == const0_rtx
5957 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5958 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5959 || code == COND_EXPR)
5960 && TREE_CODE (type) == VOID_TYPE));
5962 /* Make a read-only version of the modifier. */
5963 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5964 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5965 ro_modifier = modifier;
5966 else
5967 ro_modifier = EXPAND_NORMAL;
5969 /* If we are going to ignore this result, we need only do something
5970 if there is a side-effect somewhere in the expression. If there
5971 is, short-circuit the most common cases here. Note that we must
5972 not call expand_expr with anything but const0_rtx in case this
5973 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5975 if (ignore)
5977 if (! TREE_SIDE_EFFECTS (exp))
5978 return const0_rtx;
5980 /* Ensure we reference a volatile object even if value is ignored, but
5981 don't do this if all we are doing is taking its address. */
5982 if (TREE_THIS_VOLATILE (exp)
5983 && TREE_CODE (exp) != FUNCTION_DECL
5984 && mode != VOIDmode && mode != BLKmode
5985 && modifier != EXPAND_CONST_ADDRESS)
5987 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5988 if (GET_CODE (temp) == MEM)
5989 temp = copy_to_reg (temp);
5990 return const0_rtx;
5993 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5994 || code == INDIRECT_REF || code == BUFFER_REF)
5995 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5996 VOIDmode, ro_modifier);
5997 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5998 || code == ARRAY_REF)
6000 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6001 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6002 return const0_rtx;
6004 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6005 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6006 /* If the second operand has no side effects, just evaluate
6007 the first. */
6008 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6009 VOIDmode, ro_modifier);
6010 else if (code == BIT_FIELD_REF)
6012 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6013 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6014 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6015 return const0_rtx;
6018 target = 0;
6021 #ifdef MAX_INTEGER_COMPUTATION_MODE
6022 /* Only check stuff here if the mode we want is different from the mode
6023 of the expression; if it's the same, check_max_integer_computiation_mode
6024 will handle it. Do we really need to check this stuff at all? */
6026 if (target
6027 && GET_MODE (target) != mode
6028 && TREE_CODE (exp) != INTEGER_CST
6029 && TREE_CODE (exp) != PARM_DECL
6030 && TREE_CODE (exp) != ARRAY_REF
6031 && TREE_CODE (exp) != COMPONENT_REF
6032 && TREE_CODE (exp) != BIT_FIELD_REF
6033 && TREE_CODE (exp) != INDIRECT_REF
6034 && TREE_CODE (exp) != CALL_EXPR
6035 && TREE_CODE (exp) != VAR_DECL
6036 && TREE_CODE (exp) != RTL_EXPR)
6038 enum machine_mode mode = GET_MODE (target);
6040 if (GET_MODE_CLASS (mode) == MODE_INT
6041 && mode > MAX_INTEGER_COMPUTATION_MODE)
6042 internal_error ("unsupported wide integer operation");
6045 if (tmode != mode
6046 && TREE_CODE (exp) != INTEGER_CST
6047 && TREE_CODE (exp) != PARM_DECL
6048 && TREE_CODE (exp) != ARRAY_REF
6049 && TREE_CODE (exp) != COMPONENT_REF
6050 && TREE_CODE (exp) != BIT_FIELD_REF
6051 && TREE_CODE (exp) != INDIRECT_REF
6052 && TREE_CODE (exp) != VAR_DECL
6053 && TREE_CODE (exp) != CALL_EXPR
6054 && TREE_CODE (exp) != RTL_EXPR
6055 && GET_MODE_CLASS (tmode) == MODE_INT
6056 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6057 internal_error ("unsupported wide integer operation");
6059 check_max_integer_computation_mode (exp);
6060 #endif
6062 /* If will do cse, generate all results into pseudo registers
6063 since 1) that allows cse to find more things
6064 and 2) otherwise cse could produce an insn the machine
6065 cannot support. */
6067 if (! cse_not_expected && mode != BLKmode && target
6068 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6069 target = subtarget;
6071 switch (code)
6073 case LABEL_DECL:
6075 tree function = decl_function_context (exp);
6076 /* Handle using a label in a containing function. */
6077 if (function != current_function_decl
6078 && function != inline_function_decl && function != 0)
6080 struct function *p = find_function_data (function);
6081 p->expr->x_forced_labels
6082 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6083 p->expr->x_forced_labels);
6085 else
6087 if (modifier == EXPAND_INITIALIZER)
6088 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6089 label_rtx (exp),
6090 forced_labels);
6093 temp = gen_rtx_MEM (FUNCTION_MODE,
6094 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6095 if (function != current_function_decl
6096 && function != inline_function_decl && function != 0)
6097 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6098 return temp;
6101 case PARM_DECL:
6102 if (DECL_RTL (exp) == 0)
6104 error_with_decl (exp, "prior parameter's size depends on `%s'");
6105 return CONST0_RTX (mode);
6108 /* ... fall through ... */
6110 case VAR_DECL:
6111 /* If a static var's type was incomplete when the decl was written,
6112 but the type is complete now, lay out the decl now. */
6113 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6114 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6116 layout_decl (exp, 0);
6117 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6120 /* Although static-storage variables start off initialized, according to
6121 ANSI C, a memcpy could overwrite them with uninitialized values. So
6122 we check them too. This also lets us check for read-only variables
6123 accessed via a non-const declaration, in case it won't be detected
6124 any other way (e.g., in an embedded system or OS kernel without
6125 memory protection).
6127 Aggregates are not checked here; they're handled elsewhere. */
6128 if (cfun && current_function_check_memory_usage
6129 && code == VAR_DECL
6130 && GET_CODE (DECL_RTL (exp)) == MEM
6131 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6133 enum memory_use_mode memory_usage;
6134 memory_usage = get_memory_usage_from_modifier (modifier);
6136 in_check_memory_usage = 1;
6137 if (memory_usage != MEMORY_USE_DONT)
6138 emit_library_call (chkr_check_addr_libfunc,
6139 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6140 XEXP (DECL_RTL (exp), 0), Pmode,
6141 GEN_INT (int_size_in_bytes (type)),
6142 TYPE_MODE (sizetype),
6143 GEN_INT (memory_usage),
6144 TYPE_MODE (integer_type_node));
6145 in_check_memory_usage = 0;
6148 /* ... fall through ... */
6150 case FUNCTION_DECL:
6151 case RESULT_DECL:
6152 if (DECL_RTL (exp) == 0)
6153 abort ();
6155 /* Ensure variable marked as used even if it doesn't go through
6156 a parser. If it hasn't be used yet, write out an external
6157 definition. */
6158 if (! TREE_USED (exp))
6160 assemble_external (exp);
6161 TREE_USED (exp) = 1;
6164 /* Show we haven't gotten RTL for this yet. */
6165 temp = 0;
6167 /* Handle variables inherited from containing functions. */
6168 context = decl_function_context (exp);
6170 /* We treat inline_function_decl as an alias for the current function
6171 because that is the inline function whose vars, types, etc.
6172 are being merged into the current function.
6173 See expand_inline_function. */
6175 if (context != 0 && context != current_function_decl
6176 && context != inline_function_decl
6177 /* If var is static, we don't need a static chain to access it. */
6178 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6179 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6181 rtx addr;
6183 /* Mark as non-local and addressable. */
6184 DECL_NONLOCAL (exp) = 1;
6185 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6186 abort ();
6187 mark_addressable (exp);
6188 if (GET_CODE (DECL_RTL (exp)) != MEM)
6189 abort ();
6190 addr = XEXP (DECL_RTL (exp), 0);
6191 if (GET_CODE (addr) == MEM)
6192 addr = change_address (addr, Pmode,
6193 fix_lexical_addr (XEXP (addr, 0), exp));
6194 else
6195 addr = fix_lexical_addr (addr, exp);
6197 temp = change_address (DECL_RTL (exp), mode, addr);
6200 /* This is the case of an array whose size is to be determined
6201 from its initializer, while the initializer is still being parsed.
6202 See expand_decl. */
6204 else if (GET_CODE (DECL_RTL (exp)) == MEM
6205 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6206 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6207 XEXP (DECL_RTL (exp), 0));
6209 /* If DECL_RTL is memory, we are in the normal case and either
6210 the address is not valid or it is not a register and -fforce-addr
6211 is specified, get the address into a register. */
6213 else if (GET_CODE (DECL_RTL (exp)) == MEM
6214 && modifier != EXPAND_CONST_ADDRESS
6215 && modifier != EXPAND_SUM
6216 && modifier != EXPAND_INITIALIZER
6217 && (! memory_address_p (DECL_MODE (exp),
6218 XEXP (DECL_RTL (exp), 0))
6219 || (flag_force_addr
6220 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6221 temp = change_address (DECL_RTL (exp), VOIDmode,
6222 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6224 /* If we got something, return it. But first, set the alignment
6225 the address is a register. */
6226 if (temp != 0)
6228 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6229 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6231 return temp;
6234 /* If the mode of DECL_RTL does not match that of the decl, it
6235 must be a promoted value. We return a SUBREG of the wanted mode,
6236 but mark it so that we know that it was already extended. */
6238 if (GET_CODE (DECL_RTL (exp)) == REG
6239 && GET_MODE (DECL_RTL (exp)) != mode)
6241 /* Get the signedness used for this variable. Ensure we get the
6242 same mode we got when the variable was declared. */
6243 if (GET_MODE (DECL_RTL (exp))
6244 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6245 abort ();
6247 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6248 SUBREG_PROMOTED_VAR_P (temp) = 1;
6249 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6250 return temp;
6253 return DECL_RTL (exp);
6255 case INTEGER_CST:
6256 return immed_double_const (TREE_INT_CST_LOW (exp),
6257 TREE_INT_CST_HIGH (exp), mode);
6259 case CONST_DECL:
6260 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6261 EXPAND_MEMORY_USE_BAD);
6263 case REAL_CST:
6264 /* If optimized, generate immediate CONST_DOUBLE
6265 which will be turned into memory by reload if necessary.
6267 We used to force a register so that loop.c could see it. But
6268 this does not allow gen_* patterns to perform optimizations with
6269 the constants. It also produces two insns in cases like "x = 1.0;".
6270 On most machines, floating-point constants are not permitted in
6271 many insns, so we'd end up copying it to a register in any case.
6273 Now, we do the copying in expand_binop, if appropriate. */
6274 return immed_real_const (exp);
6276 case COMPLEX_CST:
6277 case STRING_CST:
6278 if (! TREE_CST_RTL (exp))
6279 output_constant_def (exp, 1);
6281 /* TREE_CST_RTL probably contains a constant address.
6282 On RISC machines where a constant address isn't valid,
6283 make some insns to get that address into a register. */
6284 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6285 && modifier != EXPAND_CONST_ADDRESS
6286 && modifier != EXPAND_INITIALIZER
6287 && modifier != EXPAND_SUM
6288 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6289 || (flag_force_addr
6290 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6291 return change_address (TREE_CST_RTL (exp), VOIDmode,
6292 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6293 return TREE_CST_RTL (exp);
6295 case EXPR_WITH_FILE_LOCATION:
6297 rtx to_return;
6298 const char *saved_input_filename = input_filename;
6299 int saved_lineno = lineno;
6300 input_filename = EXPR_WFL_FILENAME (exp);
6301 lineno = EXPR_WFL_LINENO (exp);
6302 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6303 emit_line_note (input_filename, lineno);
6304 /* Possibly avoid switching back and force here. */
6305 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6306 input_filename = saved_input_filename;
6307 lineno = saved_lineno;
6308 return to_return;
6311 case SAVE_EXPR:
6312 context = decl_function_context (exp);
6314 /* If this SAVE_EXPR was at global context, assume we are an
6315 initialization function and move it into our context. */
6316 if (context == 0)
6317 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6319 /* We treat inline_function_decl as an alias for the current function
6320 because that is the inline function whose vars, types, etc.
6321 are being merged into the current function.
6322 See expand_inline_function. */
6323 if (context == current_function_decl || context == inline_function_decl)
6324 context = 0;
6326 /* If this is non-local, handle it. */
6327 if (context)
6329 /* The following call just exists to abort if the context is
6330 not of a containing function. */
6331 find_function_data (context);
6333 temp = SAVE_EXPR_RTL (exp);
6334 if (temp && GET_CODE (temp) == REG)
6336 put_var_into_stack (exp);
6337 temp = SAVE_EXPR_RTL (exp);
6339 if (temp == 0 || GET_CODE (temp) != MEM)
6340 abort ();
6341 return change_address (temp, mode,
6342 fix_lexical_addr (XEXP (temp, 0), exp));
6344 if (SAVE_EXPR_RTL (exp) == 0)
6346 if (mode == VOIDmode)
6347 temp = const0_rtx;
6348 else
6349 temp = assign_temp (build_qualified_type (type,
6350 (TYPE_QUALS (type)
6351 | TYPE_QUAL_CONST)),
6352 3, 0, 0);
6354 SAVE_EXPR_RTL (exp) = temp;
6355 if (!optimize && GET_CODE (temp) == REG)
6356 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6357 save_expr_regs);
6359 /* If the mode of TEMP does not match that of the expression, it
6360 must be a promoted value. We pass store_expr a SUBREG of the
6361 wanted mode but mark it so that we know that it was already
6362 extended. Note that `unsignedp' was modified above in
6363 this case. */
6365 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6367 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6368 SUBREG_PROMOTED_VAR_P (temp) = 1;
6369 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6372 if (temp == const0_rtx)
6373 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6374 EXPAND_MEMORY_USE_BAD);
6375 else
6376 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6378 TREE_USED (exp) = 1;
6381 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6382 must be a promoted value. We return a SUBREG of the wanted mode,
6383 but mark it so that we know that it was already extended. */
6385 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6386 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6388 /* Compute the signedness and make the proper SUBREG. */
6389 promote_mode (type, mode, &unsignedp, 0);
6390 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6391 SUBREG_PROMOTED_VAR_P (temp) = 1;
6392 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6393 return temp;
6396 return SAVE_EXPR_RTL (exp);
6398 case UNSAVE_EXPR:
6400 rtx temp;
6401 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6402 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6403 return temp;
6406 case PLACEHOLDER_EXPR:
6408 tree placeholder_expr;
6410 /* If there is an object on the head of the placeholder list,
6411 see if some object in it of type TYPE or a pointer to it. For
6412 further information, see tree.def. */
6413 for (placeholder_expr = placeholder_list;
6414 placeholder_expr != 0;
6415 placeholder_expr = TREE_CHAIN (placeholder_expr))
6417 tree need_type = TYPE_MAIN_VARIANT (type);
6418 tree object = 0;
6419 tree old_list = placeholder_list;
6420 tree elt;
6422 /* Find the outermost reference that is of the type we want.
6423 If none, see if any object has a type that is a pointer to
6424 the type we want. */
6425 for (elt = TREE_PURPOSE (placeholder_expr);
6426 elt != 0 && object == 0;
6428 = ((TREE_CODE (elt) == COMPOUND_EXPR
6429 || TREE_CODE (elt) == COND_EXPR)
6430 ? TREE_OPERAND (elt, 1)
6431 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6432 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6433 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6434 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6435 ? TREE_OPERAND (elt, 0) : 0))
6436 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6437 object = elt;
6439 for (elt = TREE_PURPOSE (placeholder_expr);
6440 elt != 0 && object == 0;
6442 = ((TREE_CODE (elt) == COMPOUND_EXPR
6443 || TREE_CODE (elt) == COND_EXPR)
6444 ? TREE_OPERAND (elt, 1)
6445 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6446 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6447 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6448 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6449 ? TREE_OPERAND (elt, 0) : 0))
6450 if (POINTER_TYPE_P (TREE_TYPE (elt))
6451 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6452 == need_type))
6453 object = build1 (INDIRECT_REF, need_type, elt);
6455 if (object != 0)
6457 /* Expand this object skipping the list entries before
6458 it was found in case it is also a PLACEHOLDER_EXPR.
6459 In that case, we want to translate it using subsequent
6460 entries. */
6461 placeholder_list = TREE_CHAIN (placeholder_expr);
6462 temp = expand_expr (object, original_target, tmode,
6463 ro_modifier);
6464 placeholder_list = old_list;
6465 return temp;
6470 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6471 abort ();
6473 case WITH_RECORD_EXPR:
6474 /* Put the object on the placeholder list, expand our first operand,
6475 and pop the list. */
6476 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6477 placeholder_list);
6478 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6479 tmode, ro_modifier);
6480 placeholder_list = TREE_CHAIN (placeholder_list);
6481 return target;
6483 case GOTO_EXPR:
6484 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6485 expand_goto (TREE_OPERAND (exp, 0));
6486 else
6487 expand_computed_goto (TREE_OPERAND (exp, 0));
6488 return const0_rtx;
6490 case EXIT_EXPR:
6491 expand_exit_loop_if_false (NULL_PTR,
6492 invert_truthvalue (TREE_OPERAND (exp, 0)));
6493 return const0_rtx;
6495 case LABELED_BLOCK_EXPR:
6496 if (LABELED_BLOCK_BODY (exp))
6497 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6498 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6499 return const0_rtx;
6501 case EXIT_BLOCK_EXPR:
6502 if (EXIT_BLOCK_RETURN (exp))
6503 sorry ("returned value in block_exit_expr");
6504 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6505 return const0_rtx;
6507 case LOOP_EXPR:
6508 push_temp_slots ();
6509 expand_start_loop (1);
6510 expand_expr_stmt (TREE_OPERAND (exp, 0));
6511 expand_end_loop ();
6512 pop_temp_slots ();
6514 return const0_rtx;
6516 case BIND_EXPR:
6518 tree vars = TREE_OPERAND (exp, 0);
6519 int vars_need_expansion = 0;
6521 /* Need to open a binding contour here because
6522 if there are any cleanups they must be contained here. */
6523 expand_start_bindings (2);
6525 /* Mark the corresponding BLOCK for output in its proper place. */
6526 if (TREE_OPERAND (exp, 2) != 0
6527 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6528 insert_block (TREE_OPERAND (exp, 2));
6530 /* If VARS have not yet been expanded, expand them now. */
6531 while (vars)
6533 if (DECL_RTL (vars) == 0)
6535 vars_need_expansion = 1;
6536 expand_decl (vars);
6538 expand_decl_init (vars);
6539 vars = TREE_CHAIN (vars);
6542 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6544 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6546 return temp;
6549 case RTL_EXPR:
6550 if (RTL_EXPR_SEQUENCE (exp))
6552 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6553 abort ();
6554 emit_insns (RTL_EXPR_SEQUENCE (exp));
6555 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6557 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6558 free_temps_for_rtl_expr (exp);
6559 return RTL_EXPR_RTL (exp);
6561 case CONSTRUCTOR:
6562 /* If we don't need the result, just ensure we evaluate any
6563 subexpressions. */
6564 if (ignore)
6566 tree elt;
6567 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6568 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6569 EXPAND_MEMORY_USE_BAD);
6570 return const0_rtx;
6573 /* All elts simple constants => refer to a constant in memory. But
6574 if this is a non-BLKmode mode, let it store a field at a time
6575 since that should make a CONST_INT or CONST_DOUBLE when we
6576 fold. Likewise, if we have a target we can use, it is best to
6577 store directly into the target unless the type is large enough
6578 that memcpy will be used. If we are making an initializer and
6579 all operands are constant, put it in memory as well. */
6580 else if ((TREE_STATIC (exp)
6581 && ((mode == BLKmode
6582 && ! (target != 0 && safe_from_p (target, exp, 1)))
6583 || TREE_ADDRESSABLE (exp)
6584 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6585 && (! MOVE_BY_PIECES_P
6586 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6587 TYPE_ALIGN (type)))
6588 && ! mostly_zeros_p (exp))))
6589 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6591 rtx constructor = output_constant_def (exp, 1);
6593 if (modifier != EXPAND_CONST_ADDRESS
6594 && modifier != EXPAND_INITIALIZER
6595 && modifier != EXPAND_SUM
6596 && (! memory_address_p (GET_MODE (constructor),
6597 XEXP (constructor, 0))
6598 || (flag_force_addr
6599 && GET_CODE (XEXP (constructor, 0)) != REG)))
6600 constructor = change_address (constructor, VOIDmode,
6601 XEXP (constructor, 0));
6602 return constructor;
6604 else
6606 /* Handle calls that pass values in multiple non-contiguous
6607 locations. The Irix 6 ABI has examples of this. */
6608 if (target == 0 || ! safe_from_p (target, exp, 1)
6609 || GET_CODE (target) == PARALLEL)
6610 target
6611 = assign_temp (build_qualified_type (type,
6612 (TYPE_QUALS (type)
6613 | (TREE_READONLY (exp)
6614 * TYPE_QUAL_CONST))),
6615 TREE_ADDRESSABLE (exp), 1, 1);
6617 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6618 int_size_in_bytes (TREE_TYPE (exp)));
6619 return target;
6622 case INDIRECT_REF:
6624 tree exp1 = TREE_OPERAND (exp, 0);
6625 tree index;
6626 tree string = string_constant (exp1, &index);
6628 /* Try to optimize reads from const strings. */
6629 if (string
6630 && TREE_CODE (string) == STRING_CST
6631 && TREE_CODE (index) == INTEGER_CST
6632 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6633 && GET_MODE_CLASS (mode) == MODE_INT
6634 && GET_MODE_SIZE (mode) == 1
6635 && modifier != EXPAND_MEMORY_USE_WO)
6636 return
6637 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6639 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6640 op0 = memory_address (mode, op0);
6642 if (cfun && current_function_check_memory_usage
6643 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6645 enum memory_use_mode memory_usage;
6646 memory_usage = get_memory_usage_from_modifier (modifier);
6648 if (memory_usage != MEMORY_USE_DONT)
6650 in_check_memory_usage = 1;
6651 emit_library_call (chkr_check_addr_libfunc,
6652 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6653 Pmode, GEN_INT (int_size_in_bytes (type)),
6654 TYPE_MODE (sizetype),
6655 GEN_INT (memory_usage),
6656 TYPE_MODE (integer_type_node));
6657 in_check_memory_usage = 0;
6661 temp = gen_rtx_MEM (mode, op0);
6662 set_mem_attributes (temp, exp, 0);
6664 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6665 here, because, in C and C++, the fact that a location is accessed
6666 through a pointer to const does not mean that the value there can
6667 never change. Languages where it can never change should
6668 also set TREE_STATIC. */
6669 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6671 /* If we are writing to this object and its type is a record with
6672 readonly fields, we must mark it as readonly so it will
6673 conflict with readonly references to those fields. */
6674 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6675 RTX_UNCHANGING_P (temp) = 1;
6677 return temp;
6680 case ARRAY_REF:
6681 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6682 abort ();
6685 tree array = TREE_OPERAND (exp, 0);
6686 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6687 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6688 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6689 HOST_WIDE_INT i;
6691 /* Optimize the special-case of a zero lower bound.
6693 We convert the low_bound to sizetype to avoid some problems
6694 with constant folding. (E.g. suppose the lower bound is 1,
6695 and its mode is QI. Without the conversion, (ARRAY
6696 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6697 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6699 if (! integer_zerop (low_bound))
6700 index = size_diffop (index, convert (sizetype, low_bound));
6702 /* Fold an expression like: "foo"[2].
6703 This is not done in fold so it won't happen inside &.
6704 Don't fold if this is for wide characters since it's too
6705 difficult to do correctly and this is a very rare case. */
6707 if (TREE_CODE (array) == STRING_CST
6708 && TREE_CODE (index) == INTEGER_CST
6709 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6710 && GET_MODE_CLASS (mode) == MODE_INT
6711 && GET_MODE_SIZE (mode) == 1)
6712 return
6713 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6715 /* If this is a constant index into a constant array,
6716 just get the value from the array. Handle both the cases when
6717 we have an explicit constructor and when our operand is a variable
6718 that was declared const. */
6720 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6721 && TREE_CODE (index) == INTEGER_CST
6722 && 0 > compare_tree_int (index,
6723 list_length (CONSTRUCTOR_ELTS
6724 (TREE_OPERAND (exp, 0)))))
6726 tree elem;
6728 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6729 i = TREE_INT_CST_LOW (index);
6730 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6733 if (elem)
6734 return expand_expr (fold (TREE_VALUE (elem)), target,
6735 tmode, ro_modifier);
6738 else if (optimize >= 1
6739 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6740 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6741 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6743 if (TREE_CODE (index) == INTEGER_CST)
6745 tree init = DECL_INITIAL (array);
6747 if (TREE_CODE (init) == CONSTRUCTOR)
6749 tree elem;
6751 for (elem = CONSTRUCTOR_ELTS (init);
6752 (elem
6753 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6754 elem = TREE_CHAIN (elem))
6757 if (elem)
6758 return expand_expr (fold (TREE_VALUE (elem)), target,
6759 tmode, ro_modifier);
6761 else if (TREE_CODE (init) == STRING_CST
6762 && 0 > compare_tree_int (index,
6763 TREE_STRING_LENGTH (init)))
6765 tree type = TREE_TYPE (TREE_TYPE (init));
6766 enum machine_mode mode = TYPE_MODE (type);
6768 if (GET_MODE_CLASS (mode) == MODE_INT
6769 && GET_MODE_SIZE (mode) == 1)
6770 return (GEN_INT
6771 (TREE_STRING_POINTER
6772 (init)[TREE_INT_CST_LOW (index)]));
6777 /* Fall through. */
6779 case COMPONENT_REF:
6780 case BIT_FIELD_REF:
6781 /* If the operand is a CONSTRUCTOR, we can just extract the
6782 appropriate field if it is present. Don't do this if we have
6783 already written the data since we want to refer to that copy
6784 and varasm.c assumes that's what we'll do. */
6785 if (code != ARRAY_REF
6786 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6787 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6789 tree elt;
6791 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6792 elt = TREE_CHAIN (elt))
6793 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6794 /* We can normally use the value of the field in the
6795 CONSTRUCTOR. However, if this is a bitfield in
6796 an integral mode that we can fit in a HOST_WIDE_INT,
6797 we must mask only the number of bits in the bitfield,
6798 since this is done implicitly by the constructor. If
6799 the bitfield does not meet either of those conditions,
6800 we can't do this optimization. */
6801 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6802 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6803 == MODE_INT)
6804 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6805 <= HOST_BITS_PER_WIDE_INT))))
6807 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6808 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6810 HOST_WIDE_INT bitsize
6811 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6813 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6815 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6816 op0 = expand_and (op0, op1, target);
6818 else
6820 enum machine_mode imode
6821 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6822 tree count
6823 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6826 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6827 target, 0);
6828 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6829 target, 0);
6833 return op0;
6838 enum machine_mode mode1;
6839 HOST_WIDE_INT bitsize, bitpos;
6840 tree offset;
6841 int volatilep = 0;
6842 unsigned int alignment;
6843 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6844 &mode1, &unsignedp, &volatilep,
6845 &alignment);
6847 /* If we got back the original object, something is wrong. Perhaps
6848 we are evaluating an expression too early. In any event, don't
6849 infinitely recurse. */
6850 if (tem == exp)
6851 abort ();
6853 /* If TEM's type is a union of variable size, pass TARGET to the inner
6854 computation, since it will need a temporary and TARGET is known
6855 to have to do. This occurs in unchecked conversion in Ada. */
6857 op0 = expand_expr (tem,
6858 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6859 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6860 != INTEGER_CST)
6861 ? target : NULL_RTX),
6862 VOIDmode,
6863 (modifier == EXPAND_INITIALIZER
6864 || modifier == EXPAND_CONST_ADDRESS)
6865 ? modifier : EXPAND_NORMAL);
6867 /* If this is a constant, put it into a register if it is a
6868 legitimate constant and OFFSET is 0 and memory if it isn't. */
6869 if (CONSTANT_P (op0))
6871 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6872 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6873 && offset == 0)
6874 op0 = force_reg (mode, op0);
6875 else
6876 op0 = validize_mem (force_const_mem (mode, op0));
6879 if (offset != 0)
6881 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6883 /* If this object is in memory, put it into a register.
6884 This case can't occur in C, but can in Ada if we have
6885 unchecked conversion of an expression from a scalar type to
6886 an array or record type. */
6887 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6888 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6890 tree nt = build_qualified_type (TREE_TYPE (tem),
6891 (TYPE_QUALS (TREE_TYPE (tem))
6892 | TYPE_QUAL_CONST));
6893 rtx memloc = assign_temp (nt, 1, 1, 1);
6895 mark_temp_addr_taken (memloc);
6896 emit_move_insn (memloc, op0);
6897 op0 = memloc;
6900 if (GET_CODE (op0) != MEM)
6901 abort ();
6903 if (GET_MODE (offset_rtx) != ptr_mode)
6905 #ifdef POINTERS_EXTEND_UNSIGNED
6906 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6907 #else
6908 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6909 #endif
6912 /* A constant address in OP0 can have VOIDmode, we must not try
6913 to call force_reg for that case. Avoid that case. */
6914 if (GET_CODE (op0) == MEM
6915 && GET_MODE (op0) == BLKmode
6916 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6917 && bitsize != 0
6918 && (bitpos % bitsize) == 0
6919 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6920 && alignment == GET_MODE_ALIGNMENT (mode1))
6922 rtx temp = change_address (op0, mode1,
6923 plus_constant (XEXP (op0, 0),
6924 (bitpos /
6925 BITS_PER_UNIT)));
6926 if (GET_CODE (XEXP (temp, 0)) == REG)
6927 op0 = temp;
6928 else
6929 op0 = change_address (op0, mode1,
6930 force_reg (GET_MODE (XEXP (temp, 0)),
6931 XEXP (temp, 0)));
6932 bitpos = 0;
6935 op0 = change_address (op0, VOIDmode,
6936 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6937 force_reg (ptr_mode,
6938 offset_rtx)));
6941 /* Don't forget about volatility even if this is a bitfield. */
6942 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6944 op0 = copy_rtx (op0);
6945 MEM_VOLATILE_P (op0) = 1;
6948 /* Check the access. */
6949 if (cfun != 0 && current_function_check_memory_usage
6950 && GET_CODE (op0) == MEM)
6952 enum memory_use_mode memory_usage;
6953 memory_usage = get_memory_usage_from_modifier (modifier);
6955 if (memory_usage != MEMORY_USE_DONT)
6957 rtx to;
6958 int size;
6960 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6961 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6963 /* Check the access right of the pointer. */
6964 in_check_memory_usage = 1;
6965 if (size > BITS_PER_UNIT)
6966 emit_library_call (chkr_check_addr_libfunc,
6967 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6968 Pmode, GEN_INT (size / BITS_PER_UNIT),
6969 TYPE_MODE (sizetype),
6970 GEN_INT (memory_usage),
6971 TYPE_MODE (integer_type_node));
6972 in_check_memory_usage = 0;
6976 /* In cases where an aligned union has an unaligned object
6977 as a field, we might be extracting a BLKmode value from
6978 an integer-mode (e.g., SImode) object. Handle this case
6979 by doing the extract into an object as wide as the field
6980 (which we know to be the width of a basic mode), then
6981 storing into memory, and changing the mode to BLKmode.
6982 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6983 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6984 if (mode1 == VOIDmode
6985 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6986 || (modifier != EXPAND_CONST_ADDRESS
6987 && modifier != EXPAND_INITIALIZER
6988 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6989 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6990 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6991 /* If the field isn't aligned enough to fetch as a memref,
6992 fetch it as a bit field. */
6993 || (mode1 != BLKmode
6994 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6995 && ((TYPE_ALIGN (TREE_TYPE (tem))
6996 < GET_MODE_ALIGNMENT (mode))
6997 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6998 /* If the type and the field are a constant size and the
6999 size of the type isn't the same size as the bitfield,
7000 we must use bitfield operations. */
7001 || ((bitsize >= 0
7002 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7003 == INTEGER_CST)
7004 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7005 bitsize)))))
7006 || (modifier != EXPAND_CONST_ADDRESS
7007 && modifier != EXPAND_INITIALIZER
7008 && mode == BLKmode
7009 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7010 && (TYPE_ALIGN (type) > alignment
7011 || bitpos % TYPE_ALIGN (type) != 0)))
7013 enum machine_mode ext_mode = mode;
7015 if (ext_mode == BLKmode
7016 && ! (target != 0 && GET_CODE (op0) == MEM
7017 && GET_CODE (target) == MEM
7018 && bitpos % BITS_PER_UNIT == 0))
7019 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7021 if (ext_mode == BLKmode)
7023 /* In this case, BITPOS must start at a byte boundary and
7024 TARGET, if specified, must be a MEM. */
7025 if (GET_CODE (op0) != MEM
7026 || (target != 0 && GET_CODE (target) != MEM)
7027 || bitpos % BITS_PER_UNIT != 0)
7028 abort ();
7030 op0 = change_address (op0, VOIDmode,
7031 plus_constant (XEXP (op0, 0),
7032 bitpos / BITS_PER_UNIT));
7033 if (target == 0)
7034 target = assign_temp (type, 0, 1, 1);
7036 emit_block_move (target, op0,
7037 bitsize == -1 ? expr_size (exp)
7038 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7039 / BITS_PER_UNIT),
7040 BITS_PER_UNIT);
7042 return target;
7045 op0 = validize_mem (op0);
7047 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7048 mark_reg_pointer (XEXP (op0, 0), alignment);
7050 op0 = extract_bit_field (op0, bitsize, bitpos,
7051 unsignedp, target, ext_mode, ext_mode,
7052 alignment,
7053 int_size_in_bytes (TREE_TYPE (tem)));
7055 /* If the result is a record type and BITSIZE is narrower than
7056 the mode of OP0, an integral mode, and this is a big endian
7057 machine, we must put the field into the high-order bits. */
7058 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7059 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7060 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7061 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7062 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7063 - bitsize),
7064 op0, 1);
7066 if (mode == BLKmode)
7068 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7069 TYPE_QUAL_CONST);
7070 rtx new = assign_temp (nt, 0, 1, 1);
7072 emit_move_insn (new, op0);
7073 op0 = copy_rtx (new);
7074 PUT_MODE (op0, BLKmode);
7077 return op0;
7080 /* If the result is BLKmode, use that to access the object
7081 now as well. */
7082 if (mode == BLKmode)
7083 mode1 = BLKmode;
7085 /* Get a reference to just this component. */
7086 if (modifier == EXPAND_CONST_ADDRESS
7087 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7089 rtx new = gen_rtx_MEM (mode1,
7090 plus_constant (XEXP (op0, 0),
7091 (bitpos / BITS_PER_UNIT)));
7093 MEM_COPY_ATTRIBUTES (new, op0);
7094 op0 = new;
7096 else
7097 op0 = change_address (op0, mode1,
7098 plus_constant (XEXP (op0, 0),
7099 (bitpos / BITS_PER_UNIT)));
7101 set_mem_attributes (op0, exp, 0);
7102 if (GET_CODE (XEXP (op0, 0)) == REG)
7103 mark_reg_pointer (XEXP (op0, 0), alignment);
7105 MEM_VOLATILE_P (op0) |= volatilep;
7106 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7107 || modifier == EXPAND_CONST_ADDRESS
7108 || modifier == EXPAND_INITIALIZER)
7109 return op0;
7110 else if (target == 0)
7111 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7113 convert_move (target, op0, unsignedp);
7114 return target;
7117 /* Intended for a reference to a buffer of a file-object in Pascal.
7118 But it's not certain that a special tree code will really be
7119 necessary for these. INDIRECT_REF might work for them. */
7120 case BUFFER_REF:
7121 abort ();
7123 case IN_EXPR:
7125 /* Pascal set IN expression.
7127 Algorithm:
7128 rlo = set_low - (set_low%bits_per_word);
7129 the_word = set [ (index - rlo)/bits_per_word ];
7130 bit_index = index % bits_per_word;
7131 bitmask = 1 << bit_index;
7132 return !!(the_word & bitmask); */
7134 tree set = TREE_OPERAND (exp, 0);
7135 tree index = TREE_OPERAND (exp, 1);
7136 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7137 tree set_type = TREE_TYPE (set);
7138 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7139 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7140 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7141 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7142 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7143 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7144 rtx setaddr = XEXP (setval, 0);
7145 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7146 rtx rlow;
7147 rtx diff, quo, rem, addr, bit, result;
7149 /* If domain is empty, answer is no. Likewise if index is constant
7150 and out of bounds. */
7151 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7152 && TREE_CODE (set_low_bound) == INTEGER_CST
7153 && tree_int_cst_lt (set_high_bound, set_low_bound))
7154 || (TREE_CODE (index) == INTEGER_CST
7155 && TREE_CODE (set_low_bound) == INTEGER_CST
7156 && tree_int_cst_lt (index, set_low_bound))
7157 || (TREE_CODE (set_high_bound) == INTEGER_CST
7158 && TREE_CODE (index) == INTEGER_CST
7159 && tree_int_cst_lt (set_high_bound, index))))
7160 return const0_rtx;
7162 if (target == 0)
7163 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7165 /* If we get here, we have to generate the code for both cases
7166 (in range and out of range). */
7168 op0 = gen_label_rtx ();
7169 op1 = gen_label_rtx ();
7171 if (! (GET_CODE (index_val) == CONST_INT
7172 && GET_CODE (lo_r) == CONST_INT))
7174 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7175 GET_MODE (index_val), iunsignedp, 0, op1);
7178 if (! (GET_CODE (index_val) == CONST_INT
7179 && GET_CODE (hi_r) == CONST_INT))
7181 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7182 GET_MODE (index_val), iunsignedp, 0, op1);
7185 /* Calculate the element number of bit zero in the first word
7186 of the set. */
7187 if (GET_CODE (lo_r) == CONST_INT)
7188 rlow = GEN_INT (INTVAL (lo_r)
7189 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7190 else
7191 rlow = expand_binop (index_mode, and_optab, lo_r,
7192 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7193 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7195 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7196 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7198 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7199 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7200 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7201 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7203 addr = memory_address (byte_mode,
7204 expand_binop (index_mode, add_optab, diff,
7205 setaddr, NULL_RTX, iunsignedp,
7206 OPTAB_LIB_WIDEN));
7208 /* Extract the bit we want to examine. */
7209 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7210 gen_rtx_MEM (byte_mode, addr),
7211 make_tree (TREE_TYPE (index), rem),
7212 NULL_RTX, 1);
7213 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7214 GET_MODE (target) == byte_mode ? target : 0,
7215 1, OPTAB_LIB_WIDEN);
7217 if (result != target)
7218 convert_move (target, result, 1);
7220 /* Output the code to handle the out-of-range case. */
7221 emit_jump (op0);
7222 emit_label (op1);
7223 emit_move_insn (target, const0_rtx);
7224 emit_label (op0);
7225 return target;
7228 case WITH_CLEANUP_EXPR:
7229 if (RTL_EXPR_RTL (exp) == 0)
7231 RTL_EXPR_RTL (exp)
7232 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7233 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7235 /* That's it for this cleanup. */
7236 TREE_OPERAND (exp, 2) = 0;
7238 return RTL_EXPR_RTL (exp);
7240 case CLEANUP_POINT_EXPR:
7242 /* Start a new binding layer that will keep track of all cleanup
7243 actions to be performed. */
7244 expand_start_bindings (2);
7246 target_temp_slot_level = temp_slot_level;
7248 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7249 /* If we're going to use this value, load it up now. */
7250 if (! ignore)
7251 op0 = force_not_mem (op0);
7252 preserve_temp_slots (op0);
7253 expand_end_bindings (NULL_TREE, 0, 0);
7255 return op0;
7257 case CALL_EXPR:
7258 /* Check for a built-in function. */
7259 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7260 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7261 == FUNCTION_DECL)
7262 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7264 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7265 == BUILT_IN_FRONTEND)
7266 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7267 else
7268 return expand_builtin (exp, target, subtarget, tmode, ignore);
7271 return expand_call (exp, target, ignore);
7273 case NON_LVALUE_EXPR:
7274 case NOP_EXPR:
7275 case CONVERT_EXPR:
7276 case REFERENCE_EXPR:
7277 if (TREE_OPERAND (exp, 0) == error_mark_node)
7278 return const0_rtx;
7280 if (TREE_CODE (type) == UNION_TYPE)
7282 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7284 /* If both input and output are BLKmode, this conversion
7285 isn't actually doing anything unless we need to make the
7286 alignment stricter. */
7287 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7288 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7289 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7290 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7291 modifier);
7293 if (target == 0)
7294 target = assign_temp (type, 0, 1, 1);
7296 if (GET_CODE (target) == MEM)
7297 /* Store data into beginning of memory target. */
7298 store_expr (TREE_OPERAND (exp, 0),
7299 change_address (target, TYPE_MODE (valtype), 0), 0);
7301 else if (GET_CODE (target) == REG)
7302 /* Store this field into a union of the proper type. */
7303 store_field (target,
7304 MIN ((int_size_in_bytes (TREE_TYPE
7305 (TREE_OPERAND (exp, 0)))
7306 * BITS_PER_UNIT),
7307 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7308 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7309 VOIDmode, 0, BITS_PER_UNIT,
7310 int_size_in_bytes (type), 0);
7311 else
7312 abort ();
7314 /* Return the entire union. */
7315 return target;
7318 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7320 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7321 ro_modifier);
7323 /* If the signedness of the conversion differs and OP0 is
7324 a promoted SUBREG, clear that indication since we now
7325 have to do the proper extension. */
7326 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7327 && GET_CODE (op0) == SUBREG)
7328 SUBREG_PROMOTED_VAR_P (op0) = 0;
7330 return op0;
7333 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7334 if (GET_MODE (op0) == mode)
7335 return op0;
7337 /* If OP0 is a constant, just convert it into the proper mode. */
7338 if (CONSTANT_P (op0))
7339 return
7340 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7341 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7343 if (modifier == EXPAND_INITIALIZER)
7344 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7346 if (target == 0)
7347 return
7348 convert_to_mode (mode, op0,
7349 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7350 else
7351 convert_move (target, op0,
7352 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7353 return target;
7355 case PLUS_EXPR:
7356 /* We come here from MINUS_EXPR when the second operand is a
7357 constant. */
7358 plus_expr:
7359 this_optab = ! unsignedp && flag_trapv
7360 && (GET_MODE_CLASS(mode) == MODE_INT)
7361 ? addv_optab : add_optab;
7363 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7364 something else, make sure we add the register to the constant and
7365 then to the other thing. This case can occur during strength
7366 reduction and doing it this way will produce better code if the
7367 frame pointer or argument pointer is eliminated.
7369 fold-const.c will ensure that the constant is always in the inner
7370 PLUS_EXPR, so the only case we need to do anything about is if
7371 sp, ap, or fp is our second argument, in which case we must swap
7372 the innermost first argument and our second argument. */
7374 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7375 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7376 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7377 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7378 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7379 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7381 tree t = TREE_OPERAND (exp, 1);
7383 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7384 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7387 /* If the result is to be ptr_mode and we are adding an integer to
7388 something, we might be forming a constant. So try to use
7389 plus_constant. If it produces a sum and we can't accept it,
7390 use force_operand. This allows P = &ARR[const] to generate
7391 efficient code on machines where a SYMBOL_REF is not a valid
7392 address.
7394 If this is an EXPAND_SUM call, always return the sum. */
7395 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7396 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7398 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7399 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7400 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7402 rtx constant_part;
7404 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7405 EXPAND_SUM);
7406 /* Use immed_double_const to ensure that the constant is
7407 truncated according to the mode of OP1, then sign extended
7408 to a HOST_WIDE_INT. Using the constant directly can result
7409 in non-canonical RTL in a 64x32 cross compile. */
7410 constant_part
7411 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7412 (HOST_WIDE_INT) 0,
7413 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7414 op1 = plus_constant (op1, INTVAL (constant_part));
7415 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7416 op1 = force_operand (op1, target);
7417 return op1;
7420 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7421 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7422 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7424 rtx constant_part;
7426 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7427 EXPAND_SUM);
7428 if (! CONSTANT_P (op0))
7430 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7431 VOIDmode, modifier);
7432 /* Don't go to both_summands if modifier
7433 says it's not right to return a PLUS. */
7434 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7435 goto binop2;
7436 goto both_summands;
7438 /* Use immed_double_const to ensure that the constant is
7439 truncated according to the mode of OP1, then sign extended
7440 to a HOST_WIDE_INT. Using the constant directly can result
7441 in non-canonical RTL in a 64x32 cross compile. */
7442 constant_part
7443 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7444 (HOST_WIDE_INT) 0,
7445 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7446 op0 = plus_constant (op0, INTVAL (constant_part));
7447 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7448 op0 = force_operand (op0, target);
7449 return op0;
7453 /* No sense saving up arithmetic to be done
7454 if it's all in the wrong mode to form part of an address.
7455 And force_operand won't know whether to sign-extend or
7456 zero-extend. */
7457 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7458 || mode != ptr_mode)
7459 goto binop;
7461 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7462 subtarget = 0;
7464 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7465 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7467 both_summands:
7468 /* Make sure any term that's a sum with a constant comes last. */
7469 if (GET_CODE (op0) == PLUS
7470 && CONSTANT_P (XEXP (op0, 1)))
7472 temp = op0;
7473 op0 = op1;
7474 op1 = temp;
7476 /* If adding to a sum including a constant,
7477 associate it to put the constant outside. */
7478 if (GET_CODE (op1) == PLUS
7479 && CONSTANT_P (XEXP (op1, 1)))
7481 rtx constant_term = const0_rtx;
7483 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7484 if (temp != 0)
7485 op0 = temp;
7486 /* Ensure that MULT comes first if there is one. */
7487 else if (GET_CODE (op0) == MULT)
7488 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7489 else
7490 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7492 /* Let's also eliminate constants from op0 if possible. */
7493 op0 = eliminate_constant_term (op0, &constant_term);
7495 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7496 their sum should be a constant. Form it into OP1, since the
7497 result we want will then be OP0 + OP1. */
7499 temp = simplify_binary_operation (PLUS, mode, constant_term,
7500 XEXP (op1, 1));
7501 if (temp != 0)
7502 op1 = temp;
7503 else
7504 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7507 /* Put a constant term last and put a multiplication first. */
7508 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7509 temp = op1, op1 = op0, op0 = temp;
7511 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7512 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7514 case MINUS_EXPR:
7515 /* For initializers, we are allowed to return a MINUS of two
7516 symbolic constants. Here we handle all cases when both operands
7517 are constant. */
7518 /* Handle difference of two symbolic constants,
7519 for the sake of an initializer. */
7520 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7521 && really_constant_p (TREE_OPERAND (exp, 0))
7522 && really_constant_p (TREE_OPERAND (exp, 1)))
7524 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7525 VOIDmode, ro_modifier);
7526 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7527 VOIDmode, ro_modifier);
7529 /* If the last operand is a CONST_INT, use plus_constant of
7530 the negated constant. Else make the MINUS. */
7531 if (GET_CODE (op1) == CONST_INT)
7532 return plus_constant (op0, - INTVAL (op1));
7533 else
7534 return gen_rtx_MINUS (mode, op0, op1);
7536 /* Convert A - const to A + (-const). */
7537 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7539 tree negated = fold (build1 (NEGATE_EXPR, type,
7540 TREE_OPERAND (exp, 1)));
7542 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7543 /* If we can't negate the constant in TYPE, leave it alone and
7544 expand_binop will negate it for us. We used to try to do it
7545 here in the signed version of TYPE, but that doesn't work
7546 on POINTER_TYPEs. */;
7547 else
7549 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7550 goto plus_expr;
7553 this_optab = ! unsignedp && flag_trapv
7554 && (GET_MODE_CLASS(mode) == MODE_INT)
7555 ? subv_optab : sub_optab;
7556 goto binop;
7558 case MULT_EXPR:
7559 /* If first operand is constant, swap them.
7560 Thus the following special case checks need only
7561 check the second operand. */
7562 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7564 register tree t1 = TREE_OPERAND (exp, 0);
7565 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7566 TREE_OPERAND (exp, 1) = t1;
7569 /* Attempt to return something suitable for generating an
7570 indexed address, for machines that support that. */
7572 if (modifier == EXPAND_SUM && mode == ptr_mode
7573 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7574 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7576 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7577 EXPAND_SUM);
7579 /* Apply distributive law if OP0 is x+c. */
7580 if (GET_CODE (op0) == PLUS
7581 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7582 return
7583 gen_rtx_PLUS
7584 (mode,
7585 gen_rtx_MULT
7586 (mode, XEXP (op0, 0),
7587 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7588 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7589 * INTVAL (XEXP (op0, 1))));
7591 if (GET_CODE (op0) != REG)
7592 op0 = force_operand (op0, NULL_RTX);
7593 if (GET_CODE (op0) != REG)
7594 op0 = copy_to_mode_reg (mode, op0);
7596 return
7597 gen_rtx_MULT (mode, op0,
7598 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7601 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7602 subtarget = 0;
7604 /* Check for multiplying things that have been extended
7605 from a narrower type. If this machine supports multiplying
7606 in that narrower type with a result in the desired type,
7607 do it that way, and avoid the explicit type-conversion. */
7608 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7609 && TREE_CODE (type) == INTEGER_TYPE
7610 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7611 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7612 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7613 && int_fits_type_p (TREE_OPERAND (exp, 1),
7614 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7615 /* Don't use a widening multiply if a shift will do. */
7616 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7617 > HOST_BITS_PER_WIDE_INT)
7618 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7620 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7621 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7623 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7624 /* If both operands are extended, they must either both
7625 be zero-extended or both be sign-extended. */
7626 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7628 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7630 enum machine_mode innermode
7631 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7632 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7633 ? smul_widen_optab : umul_widen_optab);
7634 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7635 ? umul_widen_optab : smul_widen_optab);
7636 if (mode == GET_MODE_WIDER_MODE (innermode))
7638 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7640 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7641 NULL_RTX, VOIDmode, 0);
7642 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7643 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7644 VOIDmode, 0);
7645 else
7646 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7647 NULL_RTX, VOIDmode, 0);
7648 goto binop2;
7650 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7651 && innermode == word_mode)
7653 rtx htem;
7654 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7655 NULL_RTX, VOIDmode, 0);
7656 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7657 op1 = convert_modes (innermode, mode,
7658 expand_expr (TREE_OPERAND (exp, 1),
7659 NULL_RTX, VOIDmode, 0),
7660 unsignedp);
7661 else
7662 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7663 NULL_RTX, VOIDmode, 0);
7664 temp = expand_binop (mode, other_optab, op0, op1, target,
7665 unsignedp, OPTAB_LIB_WIDEN);
7666 htem = expand_mult_highpart_adjust (innermode,
7667 gen_highpart (innermode, temp),
7668 op0, op1,
7669 gen_highpart (innermode, temp),
7670 unsignedp);
7671 emit_move_insn (gen_highpart (innermode, temp), htem);
7672 return temp;
7676 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7677 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7678 return expand_mult (mode, op0, op1, target, unsignedp);
7680 case TRUNC_DIV_EXPR:
7681 case FLOOR_DIV_EXPR:
7682 case CEIL_DIV_EXPR:
7683 case ROUND_DIV_EXPR:
7684 case EXACT_DIV_EXPR:
7685 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7686 subtarget = 0;
7687 /* Possible optimization: compute the dividend with EXPAND_SUM
7688 then if the divisor is constant can optimize the case
7689 where some terms of the dividend have coeffs divisible by it. */
7690 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7691 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7692 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7694 case RDIV_EXPR:
7695 this_optab = flodiv_optab;
7696 goto binop;
7698 case TRUNC_MOD_EXPR:
7699 case FLOOR_MOD_EXPR:
7700 case CEIL_MOD_EXPR:
7701 case ROUND_MOD_EXPR:
7702 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7703 subtarget = 0;
7704 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7705 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7706 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7708 case FIX_ROUND_EXPR:
7709 case FIX_FLOOR_EXPR:
7710 case FIX_CEIL_EXPR:
7711 abort (); /* Not used for C. */
7713 case FIX_TRUNC_EXPR:
7714 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7715 if (target == 0)
7716 target = gen_reg_rtx (mode);
7717 expand_fix (target, op0, unsignedp);
7718 return target;
7720 case FLOAT_EXPR:
7721 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7722 if (target == 0)
7723 target = gen_reg_rtx (mode);
7724 /* expand_float can't figure out what to do if FROM has VOIDmode.
7725 So give it the correct mode. With -O, cse will optimize this. */
7726 if (GET_MODE (op0) == VOIDmode)
7727 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7728 op0);
7729 expand_float (target, op0,
7730 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7731 return target;
7733 case NEGATE_EXPR:
7734 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7735 temp = expand_unop (mode,
7736 ! unsignedp && flag_trapv
7737 && (GET_MODE_CLASS(mode) == MODE_INT)
7738 ? negv_optab : neg_optab, op0, target, 0);
7739 if (temp == 0)
7740 abort ();
7741 return temp;
7743 case ABS_EXPR:
7744 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7746 /* Handle complex values specially. */
7747 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7748 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7749 return expand_complex_abs (mode, op0, target, unsignedp);
7751 /* Unsigned abs is simply the operand. Testing here means we don't
7752 risk generating incorrect code below. */
7753 if (TREE_UNSIGNED (type))
7754 return op0;
7756 return expand_abs (mode, op0, target, unsignedp,
7757 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7759 case MAX_EXPR:
7760 case MIN_EXPR:
7761 target = original_target;
7762 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7763 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7764 || GET_MODE (target) != mode
7765 || (GET_CODE (target) == REG
7766 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7767 target = gen_reg_rtx (mode);
7768 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7769 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7771 /* First try to do it with a special MIN or MAX instruction.
7772 If that does not win, use a conditional jump to select the proper
7773 value. */
7774 this_optab = (TREE_UNSIGNED (type)
7775 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7776 : (code == MIN_EXPR ? smin_optab : smax_optab));
7778 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7779 OPTAB_WIDEN);
7780 if (temp != 0)
7781 return temp;
7783 /* At this point, a MEM target is no longer useful; we will get better
7784 code without it. */
7786 if (GET_CODE (target) == MEM)
7787 target = gen_reg_rtx (mode);
7789 if (target != op0)
7790 emit_move_insn (target, op0);
7792 op0 = gen_label_rtx ();
7794 /* If this mode is an integer too wide to compare properly,
7795 compare word by word. Rely on cse to optimize constant cases. */
7796 if (GET_MODE_CLASS (mode) == MODE_INT
7797 && ! can_compare_p (GE, mode, ccp_jump))
7799 if (code == MAX_EXPR)
7800 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7801 target, op1, NULL_RTX, op0);
7802 else
7803 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7804 op1, target, NULL_RTX, op0);
7806 else
7808 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7809 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7810 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7811 op0);
7813 emit_move_insn (target, op1);
7814 emit_label (op0);
7815 return target;
7817 case BIT_NOT_EXPR:
7818 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7819 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7820 if (temp == 0)
7821 abort ();
7822 return temp;
7824 case FFS_EXPR:
7825 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7826 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7827 if (temp == 0)
7828 abort ();
7829 return temp;
7831 /* ??? Can optimize bitwise operations with one arg constant.
7832 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7833 and (a bitwise1 b) bitwise2 b (etc)
7834 but that is probably not worth while. */
7836 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7837 boolean values when we want in all cases to compute both of them. In
7838 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7839 as actual zero-or-1 values and then bitwise anding. In cases where
7840 there cannot be any side effects, better code would be made by
7841 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7842 how to recognize those cases. */
7844 case TRUTH_AND_EXPR:
7845 case BIT_AND_EXPR:
7846 this_optab = and_optab;
7847 goto binop;
7849 case TRUTH_OR_EXPR:
7850 case BIT_IOR_EXPR:
7851 this_optab = ior_optab;
7852 goto binop;
7854 case TRUTH_XOR_EXPR:
7855 case BIT_XOR_EXPR:
7856 this_optab = xor_optab;
7857 goto binop;
7859 case LSHIFT_EXPR:
7860 case RSHIFT_EXPR:
7861 case LROTATE_EXPR:
7862 case RROTATE_EXPR:
7863 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7864 subtarget = 0;
7865 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7866 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7867 unsignedp);
7869 /* Could determine the answer when only additive constants differ. Also,
7870 the addition of one can be handled by changing the condition. */
7871 case LT_EXPR:
7872 case LE_EXPR:
7873 case GT_EXPR:
7874 case GE_EXPR:
7875 case EQ_EXPR:
7876 case NE_EXPR:
7877 case UNORDERED_EXPR:
7878 case ORDERED_EXPR:
7879 case UNLT_EXPR:
7880 case UNLE_EXPR:
7881 case UNGT_EXPR:
7882 case UNGE_EXPR:
7883 case UNEQ_EXPR:
7884 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7885 if (temp != 0)
7886 return temp;
7888 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7889 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7890 && original_target
7891 && GET_CODE (original_target) == REG
7892 && (GET_MODE (original_target)
7893 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7895 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7896 VOIDmode, 0);
7898 if (temp != original_target)
7899 temp = copy_to_reg (temp);
7901 op1 = gen_label_rtx ();
7902 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7903 GET_MODE (temp), unsignedp, 0, op1);
7904 emit_move_insn (temp, const1_rtx);
7905 emit_label (op1);
7906 return temp;
7909 /* If no set-flag instruction, must generate a conditional
7910 store into a temporary variable. Drop through
7911 and handle this like && and ||. */
7913 case TRUTH_ANDIF_EXPR:
7914 case TRUTH_ORIF_EXPR:
7915 if (! ignore
7916 && (target == 0 || ! safe_from_p (target, exp, 1)
7917 /* Make sure we don't have a hard reg (such as function's return
7918 value) live across basic blocks, if not optimizing. */
7919 || (!optimize && GET_CODE (target) == REG
7920 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7921 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7923 if (target)
7924 emit_clr_insn (target);
7926 op1 = gen_label_rtx ();
7927 jumpifnot (exp, op1);
7929 if (target)
7930 emit_0_to_1_insn (target);
7932 emit_label (op1);
7933 return ignore ? const0_rtx : target;
7935 case TRUTH_NOT_EXPR:
7936 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7937 /* The parser is careful to generate TRUTH_NOT_EXPR
7938 only with operands that are always zero or one. */
7939 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7940 target, 1, OPTAB_LIB_WIDEN);
7941 if (temp == 0)
7942 abort ();
7943 return temp;
7945 case COMPOUND_EXPR:
7946 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7947 emit_queue ();
7948 return expand_expr (TREE_OPERAND (exp, 1),
7949 (ignore ? const0_rtx : target),
7950 VOIDmode, 0);
7952 case COND_EXPR:
7953 /* If we would have a "singleton" (see below) were it not for a
7954 conversion in each arm, bring that conversion back out. */
7955 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7956 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7957 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7958 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7960 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7961 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7963 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7964 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7965 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7966 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7967 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7968 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7969 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7970 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7971 return expand_expr (build1 (NOP_EXPR, type,
7972 build (COND_EXPR, TREE_TYPE (iftrue),
7973 TREE_OPERAND (exp, 0),
7974 iftrue, iffalse)),
7975 target, tmode, modifier);
7979 /* Note that COND_EXPRs whose type is a structure or union
7980 are required to be constructed to contain assignments of
7981 a temporary variable, so that we can evaluate them here
7982 for side effect only. If type is void, we must do likewise. */
7984 /* If an arm of the branch requires a cleanup,
7985 only that cleanup is performed. */
7987 tree singleton = 0;
7988 tree binary_op = 0, unary_op = 0;
7990 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7991 convert it to our mode, if necessary. */
7992 if (integer_onep (TREE_OPERAND (exp, 1))
7993 && integer_zerop (TREE_OPERAND (exp, 2))
7994 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7996 if (ignore)
7998 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7999 ro_modifier);
8000 return const0_rtx;
8003 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8004 if (GET_MODE (op0) == mode)
8005 return op0;
8007 if (target == 0)
8008 target = gen_reg_rtx (mode);
8009 convert_move (target, op0, unsignedp);
8010 return target;
8013 /* Check for X ? A + B : A. If we have this, we can copy A to the
8014 output and conditionally add B. Similarly for unary operations.
8015 Don't do this if X has side-effects because those side effects
8016 might affect A or B and the "?" operation is a sequence point in
8017 ANSI. (operand_equal_p tests for side effects.) */
8019 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8020 && operand_equal_p (TREE_OPERAND (exp, 2),
8021 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8022 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8023 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8024 && operand_equal_p (TREE_OPERAND (exp, 1),
8025 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8026 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8027 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8028 && operand_equal_p (TREE_OPERAND (exp, 2),
8029 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8030 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8031 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8032 && operand_equal_p (TREE_OPERAND (exp, 1),
8033 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8034 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8036 /* If we are not to produce a result, we have no target. Otherwise,
8037 if a target was specified use it; it will not be used as an
8038 intermediate target unless it is safe. If no target, use a
8039 temporary. */
8041 if (ignore)
8042 temp = 0;
8043 else if (original_target
8044 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8045 || (singleton && GET_CODE (original_target) == REG
8046 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8047 && original_target == var_rtx (singleton)))
8048 && GET_MODE (original_target) == mode
8049 #ifdef HAVE_conditional_move
8050 && (! can_conditionally_move_p (mode)
8051 || GET_CODE (original_target) == REG
8052 || TREE_ADDRESSABLE (type))
8053 #endif
8054 && ! (GET_CODE (original_target) == MEM
8055 && MEM_VOLATILE_P (original_target)))
8056 temp = original_target;
8057 else if (TREE_ADDRESSABLE (type))
8058 abort ();
8059 else
8060 temp = assign_temp (type, 0, 0, 1);
8062 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8063 do the test of X as a store-flag operation, do this as
8064 A + ((X != 0) << log C). Similarly for other simple binary
8065 operators. Only do for C == 1 if BRANCH_COST is low. */
8066 if (temp && singleton && binary_op
8067 && (TREE_CODE (binary_op) == PLUS_EXPR
8068 || TREE_CODE (binary_op) == MINUS_EXPR
8069 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8070 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8071 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8072 : integer_onep (TREE_OPERAND (binary_op, 1)))
8073 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8075 rtx result;
8076 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8077 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8078 ? addv_optab : add_optab)
8079 : TREE_CODE (binary_op) == MINUS_EXPR
8080 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8081 ? subv_optab : sub_optab)
8082 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8083 : xor_optab);
8085 /* If we had X ? A : A + 1, do this as A + (X == 0).
8087 We have to invert the truth value here and then put it
8088 back later if do_store_flag fails. We cannot simply copy
8089 TREE_OPERAND (exp, 0) to another variable and modify that
8090 because invert_truthvalue can modify the tree pointed to
8091 by its argument. */
8092 if (singleton == TREE_OPERAND (exp, 1))
8093 TREE_OPERAND (exp, 0)
8094 = invert_truthvalue (TREE_OPERAND (exp, 0));
8096 result = do_store_flag (TREE_OPERAND (exp, 0),
8097 (safe_from_p (temp, singleton, 1)
8098 ? temp : NULL_RTX),
8099 mode, BRANCH_COST <= 1);
8101 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8102 result = expand_shift (LSHIFT_EXPR, mode, result,
8103 build_int_2 (tree_log2
8104 (TREE_OPERAND
8105 (binary_op, 1)),
8107 (safe_from_p (temp, singleton, 1)
8108 ? temp : NULL_RTX), 0);
8110 if (result)
8112 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8113 return expand_binop (mode, boptab, op1, result, temp,
8114 unsignedp, OPTAB_LIB_WIDEN);
8116 else if (singleton == TREE_OPERAND (exp, 1))
8117 TREE_OPERAND (exp, 0)
8118 = invert_truthvalue (TREE_OPERAND (exp, 0));
8121 do_pending_stack_adjust ();
8122 NO_DEFER_POP;
8123 op0 = gen_label_rtx ();
8125 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8127 if (temp != 0)
8129 /* If the target conflicts with the other operand of the
8130 binary op, we can't use it. Also, we can't use the target
8131 if it is a hard register, because evaluating the condition
8132 might clobber it. */
8133 if ((binary_op
8134 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8135 || (GET_CODE (temp) == REG
8136 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8137 temp = gen_reg_rtx (mode);
8138 store_expr (singleton, temp, 0);
8140 else
8141 expand_expr (singleton,
8142 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8143 if (singleton == TREE_OPERAND (exp, 1))
8144 jumpif (TREE_OPERAND (exp, 0), op0);
8145 else
8146 jumpifnot (TREE_OPERAND (exp, 0), op0);
8148 start_cleanup_deferral ();
8149 if (binary_op && temp == 0)
8150 /* Just touch the other operand. */
8151 expand_expr (TREE_OPERAND (binary_op, 1),
8152 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8153 else if (binary_op)
8154 store_expr (build (TREE_CODE (binary_op), type,
8155 make_tree (type, temp),
8156 TREE_OPERAND (binary_op, 1)),
8157 temp, 0);
8158 else
8159 store_expr (build1 (TREE_CODE (unary_op), type,
8160 make_tree (type, temp)),
8161 temp, 0);
8162 op1 = op0;
8164 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8165 comparison operator. If we have one of these cases, set the
8166 output to A, branch on A (cse will merge these two references),
8167 then set the output to FOO. */
8168 else if (temp
8169 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8170 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8171 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8172 TREE_OPERAND (exp, 1), 0)
8173 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8174 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8175 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8177 if (GET_CODE (temp) == REG
8178 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8179 temp = gen_reg_rtx (mode);
8180 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8181 jumpif (TREE_OPERAND (exp, 0), op0);
8183 start_cleanup_deferral ();
8184 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8185 op1 = op0;
8187 else if (temp
8188 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8189 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8190 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8191 TREE_OPERAND (exp, 2), 0)
8192 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8193 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8194 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8196 if (GET_CODE (temp) == REG
8197 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8198 temp = gen_reg_rtx (mode);
8199 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8200 jumpifnot (TREE_OPERAND (exp, 0), op0);
8202 start_cleanup_deferral ();
8203 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8204 op1 = op0;
8206 else
8208 op1 = gen_label_rtx ();
8209 jumpifnot (TREE_OPERAND (exp, 0), op0);
8211 start_cleanup_deferral ();
8213 /* One branch of the cond can be void, if it never returns. For
8214 example A ? throw : E */
8215 if (temp != 0
8216 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8217 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8218 else
8219 expand_expr (TREE_OPERAND (exp, 1),
8220 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8221 end_cleanup_deferral ();
8222 emit_queue ();
8223 emit_jump_insn (gen_jump (op1));
8224 emit_barrier ();
8225 emit_label (op0);
8226 start_cleanup_deferral ();
8227 if (temp != 0
8228 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8229 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8230 else
8231 expand_expr (TREE_OPERAND (exp, 2),
8232 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8235 end_cleanup_deferral ();
8237 emit_queue ();
8238 emit_label (op1);
8239 OK_DEFER_POP;
8241 return temp;
8244 case TARGET_EXPR:
8246 /* Something needs to be initialized, but we didn't know
8247 where that thing was when building the tree. For example,
8248 it could be the return value of a function, or a parameter
8249 to a function which lays down in the stack, or a temporary
8250 variable which must be passed by reference.
8252 We guarantee that the expression will either be constructed
8253 or copied into our original target. */
8255 tree slot = TREE_OPERAND (exp, 0);
8256 tree cleanups = NULL_TREE;
8257 tree exp1;
8259 if (TREE_CODE (slot) != VAR_DECL)
8260 abort ();
8262 if (! ignore)
8263 target = original_target;
8265 /* Set this here so that if we get a target that refers to a
8266 register variable that's already been used, put_reg_into_stack
8267 knows that it should fix up those uses. */
8268 TREE_USED (slot) = 1;
8270 if (target == 0)
8272 if (DECL_RTL (slot) != 0)
8274 target = DECL_RTL (slot);
8275 /* If we have already expanded the slot, so don't do
8276 it again. (mrs) */
8277 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8278 return target;
8280 else
8282 target = assign_temp (type, 2, 0, 1);
8283 /* All temp slots at this level must not conflict. */
8284 preserve_temp_slots (target);
8285 DECL_RTL (slot) = target;
8286 if (TREE_ADDRESSABLE (slot))
8287 put_var_into_stack (slot);
8289 /* Since SLOT is not known to the called function
8290 to belong to its stack frame, we must build an explicit
8291 cleanup. This case occurs when we must build up a reference
8292 to pass the reference as an argument. In this case,
8293 it is very likely that such a reference need not be
8294 built here. */
8296 if (TREE_OPERAND (exp, 2) == 0)
8297 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8298 cleanups = TREE_OPERAND (exp, 2);
8301 else
8303 /* This case does occur, when expanding a parameter which
8304 needs to be constructed on the stack. The target
8305 is the actual stack address that we want to initialize.
8306 The function we call will perform the cleanup in this case. */
8308 /* If we have already assigned it space, use that space,
8309 not target that we were passed in, as our target
8310 parameter is only a hint. */
8311 if (DECL_RTL (slot) != 0)
8313 target = DECL_RTL (slot);
8314 /* If we have already expanded the slot, so don't do
8315 it again. (mrs) */
8316 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8317 return target;
8319 else
8321 DECL_RTL (slot) = target;
8322 /* If we must have an addressable slot, then make sure that
8323 the RTL that we just stored in slot is OK. */
8324 if (TREE_ADDRESSABLE (slot))
8325 put_var_into_stack (slot);
8329 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8330 /* Mark it as expanded. */
8331 TREE_OPERAND (exp, 1) = NULL_TREE;
8333 store_expr (exp1, target, 0);
8335 expand_decl_cleanup (NULL_TREE, cleanups);
8337 return target;
8340 case INIT_EXPR:
8342 tree lhs = TREE_OPERAND (exp, 0);
8343 tree rhs = TREE_OPERAND (exp, 1);
8344 tree noncopied_parts = 0;
8345 tree lhs_type = TREE_TYPE (lhs);
8347 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8348 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8349 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8350 TYPE_NONCOPIED_PARTS (lhs_type));
8351 while (noncopied_parts != 0)
8353 expand_assignment (TREE_VALUE (noncopied_parts),
8354 TREE_PURPOSE (noncopied_parts), 0, 0);
8355 noncopied_parts = TREE_CHAIN (noncopied_parts);
8357 return temp;
8360 case MODIFY_EXPR:
8362 /* If lhs is complex, expand calls in rhs before computing it.
8363 That's so we don't compute a pointer and save it over a call.
8364 If lhs is simple, compute it first so we can give it as a
8365 target if the rhs is just a call. This avoids an extra temp and copy
8366 and that prevents a partial-subsumption which makes bad code.
8367 Actually we could treat component_ref's of vars like vars. */
8369 tree lhs = TREE_OPERAND (exp, 0);
8370 tree rhs = TREE_OPERAND (exp, 1);
8371 tree noncopied_parts = 0;
8372 tree lhs_type = TREE_TYPE (lhs);
8374 temp = 0;
8376 if (TREE_CODE (lhs) != VAR_DECL
8377 && TREE_CODE (lhs) != RESULT_DECL
8378 && TREE_CODE (lhs) != PARM_DECL
8379 && ! (TREE_CODE (lhs) == INDIRECT_REF
8380 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8382 /* Check for |= or &= of a bitfield of size one into another bitfield
8383 of size 1. In this case, (unless we need the result of the
8384 assignment) we can do this more efficiently with a
8385 test followed by an assignment, if necessary.
8387 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8388 things change so we do, this code should be enhanced to
8389 support it. */
8390 if (ignore
8391 && TREE_CODE (lhs) == COMPONENT_REF
8392 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8393 || TREE_CODE (rhs) == BIT_AND_EXPR)
8394 && TREE_OPERAND (rhs, 0) == lhs
8395 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8396 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8397 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8399 rtx label = gen_label_rtx ();
8401 do_jump (TREE_OPERAND (rhs, 1),
8402 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8403 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8404 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8405 (TREE_CODE (rhs) == BIT_IOR_EXPR
8406 ? integer_one_node
8407 : integer_zero_node)),
8408 0, 0);
8409 do_pending_stack_adjust ();
8410 emit_label (label);
8411 return const0_rtx;
8414 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8415 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8416 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8417 TYPE_NONCOPIED_PARTS (lhs_type));
8419 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8420 while (noncopied_parts != 0)
8422 expand_assignment (TREE_PURPOSE (noncopied_parts),
8423 TREE_VALUE (noncopied_parts), 0, 0);
8424 noncopied_parts = TREE_CHAIN (noncopied_parts);
8426 return temp;
8429 case RETURN_EXPR:
8430 if (!TREE_OPERAND (exp, 0))
8431 expand_null_return ();
8432 else
8433 expand_return (TREE_OPERAND (exp, 0));
8434 return const0_rtx;
8436 case PREINCREMENT_EXPR:
8437 case PREDECREMENT_EXPR:
8438 return expand_increment (exp, 0, ignore);
8440 case POSTINCREMENT_EXPR:
8441 case POSTDECREMENT_EXPR:
8442 /* Faster to treat as pre-increment if result is not used. */
8443 return expand_increment (exp, ! ignore, ignore);
8445 case ADDR_EXPR:
8446 /* If nonzero, TEMP will be set to the address of something that might
8447 be a MEM corresponding to a stack slot. */
8448 temp = 0;
8450 /* Are we taking the address of a nested function? */
8451 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8452 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8453 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8454 && ! TREE_STATIC (exp))
8456 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8457 op0 = force_operand (op0, target);
8459 /* If we are taking the address of something erroneous, just
8460 return a zero. */
8461 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8462 return const0_rtx;
8463 else
8465 /* We make sure to pass const0_rtx down if we came in with
8466 ignore set, to avoid doing the cleanups twice for something. */
8467 op0 = expand_expr (TREE_OPERAND (exp, 0),
8468 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8469 (modifier == EXPAND_INITIALIZER
8470 ? modifier : EXPAND_CONST_ADDRESS));
8472 /* If we are going to ignore the result, OP0 will have been set
8473 to const0_rtx, so just return it. Don't get confused and
8474 think we are taking the address of the constant. */
8475 if (ignore)
8476 return op0;
8478 op0 = protect_from_queue (op0, 0);
8480 /* We would like the object in memory. If it is a constant, we can
8481 have it be statically allocated into memory. For a non-constant,
8482 we need to allocate some memory and store the value into it. */
8484 if (CONSTANT_P (op0))
8485 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8486 op0);
8487 else if (GET_CODE (op0) == MEM)
8489 mark_temp_addr_taken (op0);
8490 temp = XEXP (op0, 0);
8493 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8494 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8495 || GET_CODE (op0) == PARALLEL)
8497 /* If this object is in a register, it must be not
8498 be BLKmode. */
8499 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8500 tree nt = build_qualified_type (inner_type,
8501 (TYPE_QUALS (inner_type)
8502 | TYPE_QUAL_CONST));
8503 rtx memloc = assign_temp (nt, 1, 1, 1);
8505 mark_temp_addr_taken (memloc);
8506 if (GET_CODE (op0) == PARALLEL)
8507 /* Handle calls that pass values in multiple non-contiguous
8508 locations. The Irix 6 ABI has examples of this. */
8509 emit_group_store (memloc, op0,
8510 int_size_in_bytes (inner_type),
8511 TYPE_ALIGN (inner_type));
8512 else
8513 emit_move_insn (memloc, op0);
8514 op0 = memloc;
8517 if (GET_CODE (op0) != MEM)
8518 abort ();
8520 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8522 temp = XEXP (op0, 0);
8523 #ifdef POINTERS_EXTEND_UNSIGNED
8524 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8525 && mode == ptr_mode)
8526 temp = convert_memory_address (ptr_mode, temp);
8527 #endif
8528 return temp;
8531 op0 = force_operand (XEXP (op0, 0), target);
8534 if (flag_force_addr && GET_CODE (op0) != REG)
8535 op0 = force_reg (Pmode, op0);
8537 if (GET_CODE (op0) == REG
8538 && ! REG_USERVAR_P (op0))
8539 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8541 /* If we might have had a temp slot, add an equivalent address
8542 for it. */
8543 if (temp != 0)
8544 update_temp_slot_address (temp, op0);
8546 #ifdef POINTERS_EXTEND_UNSIGNED
8547 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8548 && mode == ptr_mode)
8549 op0 = convert_memory_address (ptr_mode, op0);
8550 #endif
8552 return op0;
8554 case ENTRY_VALUE_EXPR:
8555 abort ();
8557 /* COMPLEX type for Extended Pascal & Fortran */
8558 case COMPLEX_EXPR:
8560 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8561 rtx insns;
8563 /* Get the rtx code of the operands. */
8564 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8565 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8567 if (! target)
8568 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8570 start_sequence ();
8572 /* Move the real (op0) and imaginary (op1) parts to their location. */
8573 emit_move_insn (gen_realpart (mode, target), op0);
8574 emit_move_insn (gen_imagpart (mode, target), op1);
8576 insns = get_insns ();
8577 end_sequence ();
8579 /* Complex construction should appear as a single unit. */
8580 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8581 each with a separate pseudo as destination.
8582 It's not correct for flow to treat them as a unit. */
8583 if (GET_CODE (target) != CONCAT)
8584 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8585 else
8586 emit_insns (insns);
8588 return target;
8591 case REALPART_EXPR:
8592 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8593 return gen_realpart (mode, op0);
8595 case IMAGPART_EXPR:
8596 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8597 return gen_imagpart (mode, op0);
8599 case CONJ_EXPR:
8601 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8602 rtx imag_t;
8603 rtx insns;
8605 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8607 if (! target)
8608 target = gen_reg_rtx (mode);
8610 start_sequence ();
8612 /* Store the realpart and the negated imagpart to target. */
8613 emit_move_insn (gen_realpart (partmode, target),
8614 gen_realpart (partmode, op0));
8616 imag_t = gen_imagpart (partmode, target);
8617 temp = expand_unop (partmode,
8618 ! unsignedp && flag_trapv
8619 && (GET_MODE_CLASS(partmode) == MODE_INT)
8620 ? negv_optab : neg_optab,
8621 gen_imagpart (partmode, op0), imag_t, 0);
8622 if (temp != imag_t)
8623 emit_move_insn (imag_t, temp);
8625 insns = get_insns ();
8626 end_sequence ();
8628 /* Conjugate should appear as a single unit
8629 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8630 each with a separate pseudo as destination.
8631 It's not correct for flow to treat them as a unit. */
8632 if (GET_CODE (target) != CONCAT)
8633 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8634 else
8635 emit_insns (insns);
8637 return target;
8640 case TRY_CATCH_EXPR:
8642 tree handler = TREE_OPERAND (exp, 1);
8644 expand_eh_region_start ();
8646 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8648 expand_eh_region_end (handler);
8650 return op0;
8653 case TRY_FINALLY_EXPR:
8655 tree try_block = TREE_OPERAND (exp, 0);
8656 tree finally_block = TREE_OPERAND (exp, 1);
8657 rtx finally_label = gen_label_rtx ();
8658 rtx done_label = gen_label_rtx ();
8659 rtx return_link = gen_reg_rtx (Pmode);
8660 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8661 (tree) finally_label, (tree) return_link);
8662 TREE_SIDE_EFFECTS (cleanup) = 1;
8664 /* Start a new binding layer that will keep track of all cleanup
8665 actions to be performed. */
8666 expand_start_bindings (2);
8668 target_temp_slot_level = temp_slot_level;
8670 expand_decl_cleanup (NULL_TREE, cleanup);
8671 op0 = expand_expr (try_block, target, tmode, modifier);
8673 preserve_temp_slots (op0);
8674 expand_end_bindings (NULL_TREE, 0, 0);
8675 emit_jump (done_label);
8676 emit_label (finally_label);
8677 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8678 emit_indirect_jump (return_link);
8679 emit_label (done_label);
8680 return op0;
8683 case GOTO_SUBROUTINE_EXPR:
8685 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8686 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8687 rtx return_address = gen_label_rtx ();
8688 emit_move_insn (return_link,
8689 gen_rtx_LABEL_REF (Pmode, return_address));
8690 emit_jump (subr);
8691 emit_label (return_address);
8692 return const0_rtx;
8695 case POPDCC_EXPR:
8697 rtx dcc = get_dynamic_cleanup_chain ();
8698 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8699 return const0_rtx;
8702 case POPDHC_EXPR:
8704 rtx dhc = get_dynamic_handler_chain ();
8705 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8706 return const0_rtx;
8709 case VA_ARG_EXPR:
8710 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8712 default:
8713 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8716 /* Here to do an ordinary binary operator, generating an instruction
8717 from the optab already placed in `this_optab'. */
8718 binop:
8719 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8720 subtarget = 0;
8721 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8722 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8723 binop2:
8724 temp = expand_binop (mode, this_optab, op0, op1, target,
8725 unsignedp, OPTAB_LIB_WIDEN);
8726 if (temp == 0)
8727 abort ();
8728 return temp;
8731 /* Similar to expand_expr, except that we don't specify a target, target
8732 mode, or modifier and we return the alignment of the inner type. This is
8733 used in cases where it is not necessary to align the result to the
8734 alignment of its type as long as we know the alignment of the result, for
8735 example for comparisons of BLKmode values. */
8737 static rtx
8738 expand_expr_unaligned (exp, palign)
8739 register tree exp;
8740 unsigned int *palign;
8742 register rtx op0;
8743 tree type = TREE_TYPE (exp);
8744 register enum machine_mode mode = TYPE_MODE (type);
8746 /* Default the alignment we return to that of the type. */
8747 *palign = TYPE_ALIGN (type);
8749 /* The only cases in which we do anything special is if the resulting mode
8750 is BLKmode. */
8751 if (mode != BLKmode)
8752 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8754 switch (TREE_CODE (exp))
8756 case CONVERT_EXPR:
8757 case NOP_EXPR:
8758 case NON_LVALUE_EXPR:
8759 /* Conversions between BLKmode values don't change the underlying
8760 alignment or value. */
8761 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8762 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8763 break;
8765 case ARRAY_REF:
8766 /* Much of the code for this case is copied directly from expand_expr.
8767 We need to duplicate it here because we will do something different
8768 in the fall-through case, so we need to handle the same exceptions
8769 it does. */
8771 tree array = TREE_OPERAND (exp, 0);
8772 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8773 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8774 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8775 HOST_WIDE_INT i;
8777 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8778 abort ();
8780 /* Optimize the special-case of a zero lower bound.
8782 We convert the low_bound to sizetype to avoid some problems
8783 with constant folding. (E.g. suppose the lower bound is 1,
8784 and its mode is QI. Without the conversion, (ARRAY
8785 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8786 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8788 if (! integer_zerop (low_bound))
8789 index = size_diffop (index, convert (sizetype, low_bound));
8791 /* If this is a constant index into a constant array,
8792 just get the value from the array. Handle both the cases when
8793 we have an explicit constructor and when our operand is a variable
8794 that was declared const. */
8796 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8797 && host_integerp (index, 0)
8798 && 0 > compare_tree_int (index,
8799 list_length (CONSTRUCTOR_ELTS
8800 (TREE_OPERAND (exp, 0)))))
8802 tree elem;
8804 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8805 i = tree_low_cst (index, 0);
8806 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8809 if (elem)
8810 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8813 else if (optimize >= 1
8814 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8815 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8816 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8818 if (TREE_CODE (index) == INTEGER_CST)
8820 tree init = DECL_INITIAL (array);
8822 if (TREE_CODE (init) == CONSTRUCTOR)
8824 tree elem;
8826 for (elem = CONSTRUCTOR_ELTS (init);
8827 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8828 elem = TREE_CHAIN (elem))
8831 if (elem)
8832 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8833 palign);
8838 /* Fall through. */
8840 case COMPONENT_REF:
8841 case BIT_FIELD_REF:
8842 /* If the operand is a CONSTRUCTOR, we can just extract the
8843 appropriate field if it is present. Don't do this if we have
8844 already written the data since we want to refer to that copy
8845 and varasm.c assumes that's what we'll do. */
8846 if (TREE_CODE (exp) != ARRAY_REF
8847 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8848 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8850 tree elt;
8852 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8853 elt = TREE_CHAIN (elt))
8854 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8855 /* Note that unlike the case in expand_expr, we know this is
8856 BLKmode and hence not an integer. */
8857 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8861 enum machine_mode mode1;
8862 HOST_WIDE_INT bitsize, bitpos;
8863 tree offset;
8864 int volatilep = 0;
8865 unsigned int alignment;
8866 int unsignedp;
8867 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8868 &mode1, &unsignedp, &volatilep,
8869 &alignment);
8871 /* If we got back the original object, something is wrong. Perhaps
8872 we are evaluating an expression too early. In any event, don't
8873 infinitely recurse. */
8874 if (tem == exp)
8875 abort ();
8877 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8879 /* If this is a constant, put it into a register if it is a
8880 legitimate constant and OFFSET is 0 and memory if it isn't. */
8881 if (CONSTANT_P (op0))
8883 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8885 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8886 && offset == 0)
8887 op0 = force_reg (inner_mode, op0);
8888 else
8889 op0 = validize_mem (force_const_mem (inner_mode, op0));
8892 if (offset != 0)
8894 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8896 /* If this object is in a register, put it into memory.
8897 This case can't occur in C, but can in Ada if we have
8898 unchecked conversion of an expression from a scalar type to
8899 an array or record type. */
8900 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8901 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8903 tree nt = build_qualified_type (TREE_TYPE (tem),
8904 (TYPE_QUALS (TREE_TYPE (tem))
8905 | TYPE_QUAL_CONST));
8906 rtx memloc = assign_temp (nt, 1, 1, 1);
8908 mark_temp_addr_taken (memloc);
8909 emit_move_insn (memloc, op0);
8910 op0 = memloc;
8913 if (GET_CODE (op0) != MEM)
8914 abort ();
8916 if (GET_MODE (offset_rtx) != ptr_mode)
8918 #ifdef POINTERS_EXTEND_UNSIGNED
8919 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8920 #else
8921 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8922 #endif
8925 op0 = change_address (op0, VOIDmode,
8926 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8927 force_reg (ptr_mode,
8928 offset_rtx)));
8931 /* Don't forget about volatility even if this is a bitfield. */
8932 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8934 op0 = copy_rtx (op0);
8935 MEM_VOLATILE_P (op0) = 1;
8938 /* Check the access. */
8939 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8941 rtx to;
8942 int size;
8944 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8945 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8947 /* Check the access right of the pointer. */
8948 in_check_memory_usage = 1;
8949 if (size > BITS_PER_UNIT)
8950 emit_library_call (chkr_check_addr_libfunc,
8951 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8952 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8953 TYPE_MODE (sizetype),
8954 GEN_INT (MEMORY_USE_RO),
8955 TYPE_MODE (integer_type_node));
8956 in_check_memory_usage = 0;
8959 /* In cases where an aligned union has an unaligned object
8960 as a field, we might be extracting a BLKmode value from
8961 an integer-mode (e.g., SImode) object. Handle this case
8962 by doing the extract into an object as wide as the field
8963 (which we know to be the width of a basic mode), then
8964 storing into memory, and changing the mode to BLKmode.
8965 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8966 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8967 if (mode1 == VOIDmode
8968 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8969 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8970 && (TYPE_ALIGN (type) > alignment
8971 || bitpos % TYPE_ALIGN (type) != 0)))
8973 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8975 if (ext_mode == BLKmode)
8977 /* In this case, BITPOS must start at a byte boundary. */
8978 if (GET_CODE (op0) != MEM
8979 || bitpos % BITS_PER_UNIT != 0)
8980 abort ();
8982 op0 = change_address (op0, VOIDmode,
8983 plus_constant (XEXP (op0, 0),
8984 bitpos / BITS_PER_UNIT));
8986 else
8988 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
8989 TYPE_QUAL_CONST);
8990 rtx new = assign_temp (nt, 0, 1, 1);
8992 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8993 unsignedp, NULL_RTX, ext_mode,
8994 ext_mode, alignment,
8995 int_size_in_bytes (TREE_TYPE (tem)));
8997 /* If the result is a record type and BITSIZE is narrower than
8998 the mode of OP0, an integral mode, and this is a big endian
8999 machine, we must put the field into the high-order bits. */
9000 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9001 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9002 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9003 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9004 size_int (GET_MODE_BITSIZE
9005 (GET_MODE (op0))
9006 - bitsize),
9007 op0, 1);
9009 emit_move_insn (new, op0);
9010 op0 = copy_rtx (new);
9011 PUT_MODE (op0, BLKmode);
9014 else
9015 /* Get a reference to just this component. */
9016 op0 = change_address (op0, mode1,
9017 plus_constant (XEXP (op0, 0),
9018 (bitpos / BITS_PER_UNIT)));
9020 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9022 /* Adjust the alignment in case the bit position is not
9023 a multiple of the alignment of the inner object. */
9024 while (bitpos % alignment != 0)
9025 alignment >>= 1;
9027 if (GET_CODE (XEXP (op0, 0)) == REG)
9028 mark_reg_pointer (XEXP (op0, 0), alignment);
9030 MEM_IN_STRUCT_P (op0) = 1;
9031 MEM_VOLATILE_P (op0) |= volatilep;
9033 *palign = alignment;
9034 return op0;
9037 default:
9038 break;
9042 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9045 /* Return the tree node if a ARG corresponds to a string constant or zero
9046 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9047 in bytes within the string that ARG is accessing. The type of the
9048 offset will be `sizetype'. */
9050 tree
9051 string_constant (arg, ptr_offset)
9052 tree arg;
9053 tree *ptr_offset;
9055 STRIP_NOPS (arg);
9057 if (TREE_CODE (arg) == ADDR_EXPR
9058 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9060 *ptr_offset = size_zero_node;
9061 return TREE_OPERAND (arg, 0);
9063 else if (TREE_CODE (arg) == PLUS_EXPR)
9065 tree arg0 = TREE_OPERAND (arg, 0);
9066 tree arg1 = TREE_OPERAND (arg, 1);
9068 STRIP_NOPS (arg0);
9069 STRIP_NOPS (arg1);
9071 if (TREE_CODE (arg0) == ADDR_EXPR
9072 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9074 *ptr_offset = convert (sizetype, arg1);
9075 return TREE_OPERAND (arg0, 0);
9077 else if (TREE_CODE (arg1) == ADDR_EXPR
9078 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9080 *ptr_offset = convert (sizetype, arg0);
9081 return TREE_OPERAND (arg1, 0);
9085 return 0;
9088 /* Expand code for a post- or pre- increment or decrement
9089 and return the RTX for the result.
9090 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9092 static rtx
9093 expand_increment (exp, post, ignore)
9094 register tree exp;
9095 int post, ignore;
9097 register rtx op0, op1;
9098 register rtx temp, value;
9099 register tree incremented = TREE_OPERAND (exp, 0);
9100 optab this_optab = add_optab;
9101 int icode;
9102 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9103 int op0_is_copy = 0;
9104 int single_insn = 0;
9105 /* 1 means we can't store into OP0 directly,
9106 because it is a subreg narrower than a word,
9107 and we don't dare clobber the rest of the word. */
9108 int bad_subreg = 0;
9110 /* Stabilize any component ref that might need to be
9111 evaluated more than once below. */
9112 if (!post
9113 || TREE_CODE (incremented) == BIT_FIELD_REF
9114 || (TREE_CODE (incremented) == COMPONENT_REF
9115 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9116 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9117 incremented = stabilize_reference (incremented);
9118 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9119 ones into save exprs so that they don't accidentally get evaluated
9120 more than once by the code below. */
9121 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9122 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9123 incremented = save_expr (incremented);
9125 /* Compute the operands as RTX.
9126 Note whether OP0 is the actual lvalue or a copy of it:
9127 I believe it is a copy iff it is a register or subreg
9128 and insns were generated in computing it. */
9130 temp = get_last_insn ();
9131 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9133 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9134 in place but instead must do sign- or zero-extension during assignment,
9135 so we copy it into a new register and let the code below use it as
9136 a copy.
9138 Note that we can safely modify this SUBREG since it is know not to be
9139 shared (it was made by the expand_expr call above). */
9141 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9143 if (post)
9144 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9145 else
9146 bad_subreg = 1;
9148 else if (GET_CODE (op0) == SUBREG
9149 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9151 /* We cannot increment this SUBREG in place. If we are
9152 post-incrementing, get a copy of the old value. Otherwise,
9153 just mark that we cannot increment in place. */
9154 if (post)
9155 op0 = copy_to_reg (op0);
9156 else
9157 bad_subreg = 1;
9160 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9161 && temp != get_last_insn ());
9162 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9163 EXPAND_MEMORY_USE_BAD);
9165 /* Decide whether incrementing or decrementing. */
9166 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9167 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9168 this_optab = sub_optab;
9170 /* Convert decrement by a constant into a negative increment. */
9171 if (this_optab == sub_optab
9172 && GET_CODE (op1) == CONST_INT)
9174 op1 = GEN_INT (-INTVAL (op1));
9175 this_optab = add_optab;
9178 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9179 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9181 /* For a preincrement, see if we can do this with a single instruction. */
9182 if (!post)
9184 icode = (int) this_optab->handlers[(int) mode].insn_code;
9185 if (icode != (int) CODE_FOR_nothing
9186 /* Make sure that OP0 is valid for operands 0 and 1
9187 of the insn we want to queue. */
9188 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9189 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9190 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9191 single_insn = 1;
9194 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9195 then we cannot just increment OP0. We must therefore contrive to
9196 increment the original value. Then, for postincrement, we can return
9197 OP0 since it is a copy of the old value. For preincrement, expand here
9198 unless we can do it with a single insn.
9200 Likewise if storing directly into OP0 would clobber high bits
9201 we need to preserve (bad_subreg). */
9202 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9204 /* This is the easiest way to increment the value wherever it is.
9205 Problems with multiple evaluation of INCREMENTED are prevented
9206 because either (1) it is a component_ref or preincrement,
9207 in which case it was stabilized above, or (2) it is an array_ref
9208 with constant index in an array in a register, which is
9209 safe to reevaluate. */
9210 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9211 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9212 ? MINUS_EXPR : PLUS_EXPR),
9213 TREE_TYPE (exp),
9214 incremented,
9215 TREE_OPERAND (exp, 1));
9217 while (TREE_CODE (incremented) == NOP_EXPR
9218 || TREE_CODE (incremented) == CONVERT_EXPR)
9220 newexp = convert (TREE_TYPE (incremented), newexp);
9221 incremented = TREE_OPERAND (incremented, 0);
9224 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9225 return post ? op0 : temp;
9228 if (post)
9230 /* We have a true reference to the value in OP0.
9231 If there is an insn to add or subtract in this mode, queue it.
9232 Queueing the increment insn avoids the register shuffling
9233 that often results if we must increment now and first save
9234 the old value for subsequent use. */
9236 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9237 op0 = stabilize (op0);
9238 #endif
9240 icode = (int) this_optab->handlers[(int) mode].insn_code;
9241 if (icode != (int) CODE_FOR_nothing
9242 /* Make sure that OP0 is valid for operands 0 and 1
9243 of the insn we want to queue. */
9244 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9245 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9247 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9248 op1 = force_reg (mode, op1);
9250 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9252 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9254 rtx addr = (general_operand (XEXP (op0, 0), mode)
9255 ? force_reg (Pmode, XEXP (op0, 0))
9256 : copy_to_reg (XEXP (op0, 0)));
9257 rtx temp, result;
9259 op0 = change_address (op0, VOIDmode, addr);
9260 temp = force_reg (GET_MODE (op0), op0);
9261 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9262 op1 = force_reg (mode, op1);
9264 /* The increment queue is LIFO, thus we have to `queue'
9265 the instructions in reverse order. */
9266 enqueue_insn (op0, gen_move_insn (op0, temp));
9267 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9268 return result;
9272 /* Preincrement, or we can't increment with one simple insn. */
9273 if (post)
9274 /* Save a copy of the value before inc or dec, to return it later. */
9275 temp = value = copy_to_reg (op0);
9276 else
9277 /* Arrange to return the incremented value. */
9278 /* Copy the rtx because expand_binop will protect from the queue,
9279 and the results of that would be invalid for us to return
9280 if our caller does emit_queue before using our result. */
9281 temp = copy_rtx (value = op0);
9283 /* Increment however we can. */
9284 op1 = expand_binop (mode, this_optab, value, op1,
9285 current_function_check_memory_usage ? NULL_RTX : op0,
9286 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9287 /* Make sure the value is stored into OP0. */
9288 if (op1 != op0)
9289 emit_move_insn (op0, op1);
9291 return temp;
9294 /* At the start of a function, record that we have no previously-pushed
9295 arguments waiting to be popped. */
9297 void
9298 init_pending_stack_adjust ()
9300 pending_stack_adjust = 0;
9303 /* When exiting from function, if safe, clear out any pending stack adjust
9304 so the adjustment won't get done.
9306 Note, if the current function calls alloca, then it must have a
9307 frame pointer regardless of the value of flag_omit_frame_pointer. */
9309 void
9310 clear_pending_stack_adjust ()
9312 #ifdef EXIT_IGNORE_STACK
9313 if (optimize > 0
9314 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9315 && EXIT_IGNORE_STACK
9316 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9317 && ! flag_inline_functions)
9319 stack_pointer_delta -= pending_stack_adjust,
9320 pending_stack_adjust = 0;
9322 #endif
9325 /* Pop any previously-pushed arguments that have not been popped yet. */
9327 void
9328 do_pending_stack_adjust ()
9330 if (inhibit_defer_pop == 0)
9332 if (pending_stack_adjust != 0)
9333 adjust_stack (GEN_INT (pending_stack_adjust));
9334 pending_stack_adjust = 0;
9338 /* Expand conditional expressions. */
9340 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9341 LABEL is an rtx of code CODE_LABEL, in this function and all the
9342 functions here. */
9344 void
9345 jumpifnot (exp, label)
9346 tree exp;
9347 rtx label;
9349 do_jump (exp, label, NULL_RTX);
9352 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9354 void
9355 jumpif (exp, label)
9356 tree exp;
9357 rtx label;
9359 do_jump (exp, NULL_RTX, label);
9362 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9363 the result is zero, or IF_TRUE_LABEL if the result is one.
9364 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9365 meaning fall through in that case.
9367 do_jump always does any pending stack adjust except when it does not
9368 actually perform a jump. An example where there is no jump
9369 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9371 This function is responsible for optimizing cases such as
9372 &&, || and comparison operators in EXP. */
9374 void
9375 do_jump (exp, if_false_label, if_true_label)
9376 tree exp;
9377 rtx if_false_label, if_true_label;
9379 register enum tree_code code = TREE_CODE (exp);
9380 /* Some cases need to create a label to jump to
9381 in order to properly fall through.
9382 These cases set DROP_THROUGH_LABEL nonzero. */
9383 rtx drop_through_label = 0;
9384 rtx temp;
9385 int i;
9386 tree type;
9387 enum machine_mode mode;
9389 #ifdef MAX_INTEGER_COMPUTATION_MODE
9390 check_max_integer_computation_mode (exp);
9391 #endif
9393 emit_queue ();
9395 switch (code)
9397 case ERROR_MARK:
9398 break;
9400 case INTEGER_CST:
9401 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9402 if (temp)
9403 emit_jump (temp);
9404 break;
9406 #if 0
9407 /* This is not true with #pragma weak */
9408 case ADDR_EXPR:
9409 /* The address of something can never be zero. */
9410 if (if_true_label)
9411 emit_jump (if_true_label);
9412 break;
9413 #endif
9415 case NOP_EXPR:
9416 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9417 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9418 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9419 goto normal;
9420 case CONVERT_EXPR:
9421 /* If we are narrowing the operand, we have to do the compare in the
9422 narrower mode. */
9423 if ((TYPE_PRECISION (TREE_TYPE (exp))
9424 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9425 goto normal;
9426 case NON_LVALUE_EXPR:
9427 case REFERENCE_EXPR:
9428 case ABS_EXPR:
9429 case NEGATE_EXPR:
9430 case LROTATE_EXPR:
9431 case RROTATE_EXPR:
9432 /* These cannot change zero->non-zero or vice versa. */
9433 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9434 break;
9436 case WITH_RECORD_EXPR:
9437 /* Put the object on the placeholder list, recurse through our first
9438 operand, and pop the list. */
9439 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9440 placeholder_list);
9441 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9442 placeholder_list = TREE_CHAIN (placeholder_list);
9443 break;
9445 #if 0
9446 /* This is never less insns than evaluating the PLUS_EXPR followed by
9447 a test and can be longer if the test is eliminated. */
9448 case PLUS_EXPR:
9449 /* Reduce to minus. */
9450 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9451 TREE_OPERAND (exp, 0),
9452 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9453 TREE_OPERAND (exp, 1))));
9454 /* Process as MINUS. */
9455 #endif
9457 case MINUS_EXPR:
9458 /* Non-zero iff operands of minus differ. */
9459 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9460 TREE_OPERAND (exp, 0),
9461 TREE_OPERAND (exp, 1)),
9462 NE, NE, if_false_label, if_true_label);
9463 break;
9465 case BIT_AND_EXPR:
9466 /* If we are AND'ing with a small constant, do this comparison in the
9467 smallest type that fits. If the machine doesn't have comparisons
9468 that small, it will be converted back to the wider comparison.
9469 This helps if we are testing the sign bit of a narrower object.
9470 combine can't do this for us because it can't know whether a
9471 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9473 if (! SLOW_BYTE_ACCESS
9474 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9475 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9476 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9477 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9478 && (type = type_for_mode (mode, 1)) != 0
9479 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9480 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9481 != CODE_FOR_nothing))
9483 do_jump (convert (type, exp), if_false_label, if_true_label);
9484 break;
9486 goto normal;
9488 case TRUTH_NOT_EXPR:
9489 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9490 break;
9492 case TRUTH_ANDIF_EXPR:
9493 if (if_false_label == 0)
9494 if_false_label = drop_through_label = gen_label_rtx ();
9495 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9496 start_cleanup_deferral ();
9497 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9498 end_cleanup_deferral ();
9499 break;
9501 case TRUTH_ORIF_EXPR:
9502 if (if_true_label == 0)
9503 if_true_label = drop_through_label = gen_label_rtx ();
9504 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9505 start_cleanup_deferral ();
9506 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9507 end_cleanup_deferral ();
9508 break;
9510 case COMPOUND_EXPR:
9511 push_temp_slots ();
9512 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9513 preserve_temp_slots (NULL_RTX);
9514 free_temp_slots ();
9515 pop_temp_slots ();
9516 emit_queue ();
9517 do_pending_stack_adjust ();
9518 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9519 break;
9521 case COMPONENT_REF:
9522 case BIT_FIELD_REF:
9523 case ARRAY_REF:
9525 HOST_WIDE_INT bitsize, bitpos;
9526 int unsignedp;
9527 enum machine_mode mode;
9528 tree type;
9529 tree offset;
9530 int volatilep = 0;
9531 unsigned int alignment;
9533 /* Get description of this reference. We don't actually care
9534 about the underlying object here. */
9535 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9536 &unsignedp, &volatilep, &alignment);
9538 type = type_for_size (bitsize, unsignedp);
9539 if (! SLOW_BYTE_ACCESS
9540 && type != 0 && bitsize >= 0
9541 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9542 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9543 != CODE_FOR_nothing))
9545 do_jump (convert (type, exp), if_false_label, if_true_label);
9546 break;
9548 goto normal;
9551 case COND_EXPR:
9552 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9553 if (integer_onep (TREE_OPERAND (exp, 1))
9554 && integer_zerop (TREE_OPERAND (exp, 2)))
9555 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9557 else if (integer_zerop (TREE_OPERAND (exp, 1))
9558 && integer_onep (TREE_OPERAND (exp, 2)))
9559 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9561 else
9563 register rtx label1 = gen_label_rtx ();
9564 drop_through_label = gen_label_rtx ();
9566 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9568 start_cleanup_deferral ();
9569 /* Now the THEN-expression. */
9570 do_jump (TREE_OPERAND (exp, 1),
9571 if_false_label ? if_false_label : drop_through_label,
9572 if_true_label ? if_true_label : drop_through_label);
9573 /* In case the do_jump just above never jumps. */
9574 do_pending_stack_adjust ();
9575 emit_label (label1);
9577 /* Now the ELSE-expression. */
9578 do_jump (TREE_OPERAND (exp, 2),
9579 if_false_label ? if_false_label : drop_through_label,
9580 if_true_label ? if_true_label : drop_through_label);
9581 end_cleanup_deferral ();
9583 break;
9585 case EQ_EXPR:
9587 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9589 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9590 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9592 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9593 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9594 do_jump
9595 (fold
9596 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9597 fold (build (EQ_EXPR, TREE_TYPE (exp),
9598 fold (build1 (REALPART_EXPR,
9599 TREE_TYPE (inner_type),
9600 exp0)),
9601 fold (build1 (REALPART_EXPR,
9602 TREE_TYPE (inner_type),
9603 exp1)))),
9604 fold (build (EQ_EXPR, TREE_TYPE (exp),
9605 fold (build1 (IMAGPART_EXPR,
9606 TREE_TYPE (inner_type),
9607 exp0)),
9608 fold (build1 (IMAGPART_EXPR,
9609 TREE_TYPE (inner_type),
9610 exp1)))))),
9611 if_false_label, if_true_label);
9614 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9615 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9617 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9618 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9619 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9620 else
9621 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9622 break;
9625 case NE_EXPR:
9627 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9629 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9630 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9632 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9633 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9634 do_jump
9635 (fold
9636 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9637 fold (build (NE_EXPR, TREE_TYPE (exp),
9638 fold (build1 (REALPART_EXPR,
9639 TREE_TYPE (inner_type),
9640 exp0)),
9641 fold (build1 (REALPART_EXPR,
9642 TREE_TYPE (inner_type),
9643 exp1)))),
9644 fold (build (NE_EXPR, TREE_TYPE (exp),
9645 fold (build1 (IMAGPART_EXPR,
9646 TREE_TYPE (inner_type),
9647 exp0)),
9648 fold (build1 (IMAGPART_EXPR,
9649 TREE_TYPE (inner_type),
9650 exp1)))))),
9651 if_false_label, if_true_label);
9654 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9655 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9657 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9658 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9659 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9660 else
9661 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9662 break;
9665 case LT_EXPR:
9666 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9667 if (GET_MODE_CLASS (mode) == MODE_INT
9668 && ! can_compare_p (LT, mode, ccp_jump))
9669 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9670 else
9671 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9672 break;
9674 case LE_EXPR:
9675 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9676 if (GET_MODE_CLASS (mode) == MODE_INT
9677 && ! can_compare_p (LE, mode, ccp_jump))
9678 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9679 else
9680 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9681 break;
9683 case GT_EXPR:
9684 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9685 if (GET_MODE_CLASS (mode) == MODE_INT
9686 && ! can_compare_p (GT, mode, ccp_jump))
9687 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9688 else
9689 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9690 break;
9692 case GE_EXPR:
9693 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9694 if (GET_MODE_CLASS (mode) == MODE_INT
9695 && ! can_compare_p (GE, mode, ccp_jump))
9696 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9697 else
9698 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9699 break;
9701 case UNORDERED_EXPR:
9702 case ORDERED_EXPR:
9704 enum rtx_code cmp, rcmp;
9705 int do_rev;
9707 if (code == UNORDERED_EXPR)
9708 cmp = UNORDERED, rcmp = ORDERED;
9709 else
9710 cmp = ORDERED, rcmp = UNORDERED;
9711 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9713 do_rev = 0;
9714 if (! can_compare_p (cmp, mode, ccp_jump)
9715 && (can_compare_p (rcmp, mode, ccp_jump)
9716 /* If the target doesn't provide either UNORDERED or ORDERED
9717 comparisons, canonicalize on UNORDERED for the library. */
9718 || rcmp == UNORDERED))
9719 do_rev = 1;
9721 if (! do_rev)
9722 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9723 else
9724 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9726 break;
9729 enum rtx_code rcode1;
9730 enum tree_code tcode2;
9732 case UNLT_EXPR:
9733 rcode1 = UNLT;
9734 tcode2 = LT_EXPR;
9735 goto unordered_bcc;
9736 case UNLE_EXPR:
9737 rcode1 = UNLE;
9738 tcode2 = LE_EXPR;
9739 goto unordered_bcc;
9740 case UNGT_EXPR:
9741 rcode1 = UNGT;
9742 tcode2 = GT_EXPR;
9743 goto unordered_bcc;
9744 case UNGE_EXPR:
9745 rcode1 = UNGE;
9746 tcode2 = GE_EXPR;
9747 goto unordered_bcc;
9748 case UNEQ_EXPR:
9749 rcode1 = UNEQ;
9750 tcode2 = EQ_EXPR;
9751 goto unordered_bcc;
9753 unordered_bcc:
9754 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9755 if (can_compare_p (rcode1, mode, ccp_jump))
9756 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9757 if_true_label);
9758 else
9760 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9761 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9762 tree cmp0, cmp1;
9764 /* If the target doesn't support combined unordered
9765 compares, decompose into UNORDERED + comparison. */
9766 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9767 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9768 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9769 do_jump (exp, if_false_label, if_true_label);
9772 break;
9774 default:
9775 normal:
9776 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9777 #if 0
9778 /* This is not needed any more and causes poor code since it causes
9779 comparisons and tests from non-SI objects to have different code
9780 sequences. */
9781 /* Copy to register to avoid generating bad insns by cse
9782 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9783 if (!cse_not_expected && GET_CODE (temp) == MEM)
9784 temp = copy_to_reg (temp);
9785 #endif
9786 do_pending_stack_adjust ();
9787 /* Do any postincrements in the expression that was tested. */
9788 emit_queue ();
9790 if (GET_CODE (temp) == CONST_INT
9791 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9792 || GET_CODE (temp) == LABEL_REF)
9794 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9795 if (target)
9796 emit_jump (target);
9798 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9799 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9800 /* Note swapping the labels gives us not-equal. */
9801 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9802 else if (GET_MODE (temp) != VOIDmode)
9803 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9804 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9805 GET_MODE (temp), NULL_RTX, 0,
9806 if_false_label, if_true_label);
9807 else
9808 abort ();
9811 if (drop_through_label)
9813 /* If do_jump produces code that might be jumped around,
9814 do any stack adjusts from that code, before the place
9815 where control merges in. */
9816 do_pending_stack_adjust ();
9817 emit_label (drop_through_label);
9821 /* Given a comparison expression EXP for values too wide to be compared
9822 with one insn, test the comparison and jump to the appropriate label.
9823 The code of EXP is ignored; we always test GT if SWAP is 0,
9824 and LT if SWAP is 1. */
9826 static void
9827 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9828 tree exp;
9829 int swap;
9830 rtx if_false_label, if_true_label;
9832 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9833 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9834 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9835 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9837 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9840 /* Compare OP0 with OP1, word at a time, in mode MODE.
9841 UNSIGNEDP says to do unsigned comparison.
9842 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9844 void
9845 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9846 enum machine_mode mode;
9847 int unsignedp;
9848 rtx op0, op1;
9849 rtx if_false_label, if_true_label;
9851 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9852 rtx drop_through_label = 0;
9853 int i;
9855 if (! if_true_label || ! if_false_label)
9856 drop_through_label = gen_label_rtx ();
9857 if (! if_true_label)
9858 if_true_label = drop_through_label;
9859 if (! if_false_label)
9860 if_false_label = drop_through_label;
9862 /* Compare a word at a time, high order first. */
9863 for (i = 0; i < nwords; i++)
9865 rtx op0_word, op1_word;
9867 if (WORDS_BIG_ENDIAN)
9869 op0_word = operand_subword_force (op0, i, mode);
9870 op1_word = operand_subword_force (op1, i, mode);
9872 else
9874 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9875 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9878 /* All but high-order word must be compared as unsigned. */
9879 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9880 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9881 NULL_RTX, if_true_label);
9883 /* Consider lower words only if these are equal. */
9884 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9885 NULL_RTX, 0, NULL_RTX, if_false_label);
9888 if (if_false_label)
9889 emit_jump (if_false_label);
9890 if (drop_through_label)
9891 emit_label (drop_through_label);
9894 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9895 with one insn, test the comparison and jump to the appropriate label. */
9897 static void
9898 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9899 tree exp;
9900 rtx if_false_label, if_true_label;
9902 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9903 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9904 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9905 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9906 int i;
9907 rtx drop_through_label = 0;
9909 if (! if_false_label)
9910 drop_through_label = if_false_label = gen_label_rtx ();
9912 for (i = 0; i < nwords; i++)
9913 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9914 operand_subword_force (op1, i, mode),
9915 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9916 word_mode, NULL_RTX, 0, if_false_label,
9917 NULL_RTX);
9919 if (if_true_label)
9920 emit_jump (if_true_label);
9921 if (drop_through_label)
9922 emit_label (drop_through_label);
9925 /* Jump according to whether OP0 is 0.
9926 We assume that OP0 has an integer mode that is too wide
9927 for the available compare insns. */
9929 void
9930 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9931 rtx op0;
9932 rtx if_false_label, if_true_label;
9934 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9935 rtx part;
9936 int i;
9937 rtx drop_through_label = 0;
9939 /* The fastest way of doing this comparison on almost any machine is to
9940 "or" all the words and compare the result. If all have to be loaded
9941 from memory and this is a very wide item, it's possible this may
9942 be slower, but that's highly unlikely. */
9944 part = gen_reg_rtx (word_mode);
9945 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9946 for (i = 1; i < nwords && part != 0; i++)
9947 part = expand_binop (word_mode, ior_optab, part,
9948 operand_subword_force (op0, i, GET_MODE (op0)),
9949 part, 1, OPTAB_WIDEN);
9951 if (part != 0)
9953 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9954 NULL_RTX, 0, if_false_label, if_true_label);
9956 return;
9959 /* If we couldn't do the "or" simply, do this with a series of compares. */
9960 if (! if_false_label)
9961 drop_through_label = if_false_label = gen_label_rtx ();
9963 for (i = 0; i < nwords; i++)
9964 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9965 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9966 if_false_label, NULL_RTX);
9968 if (if_true_label)
9969 emit_jump (if_true_label);
9971 if (drop_through_label)
9972 emit_label (drop_through_label);
9975 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9976 (including code to compute the values to be compared)
9977 and set (CC0) according to the result.
9978 The decision as to signed or unsigned comparison must be made by the caller.
9980 We force a stack adjustment unless there are currently
9981 things pushed on the stack that aren't yet used.
9983 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9984 compared.
9986 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9987 size of MODE should be used. */
9990 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9991 register rtx op0, op1;
9992 enum rtx_code code;
9993 int unsignedp;
9994 enum machine_mode mode;
9995 rtx size;
9996 unsigned int align;
9998 rtx tem;
10000 /* If one operand is constant, make it the second one. Only do this
10001 if the other operand is not constant as well. */
10003 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10004 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10006 tem = op0;
10007 op0 = op1;
10008 op1 = tem;
10009 code = swap_condition (code);
10012 if (flag_force_mem)
10014 op0 = force_not_mem (op0);
10015 op1 = force_not_mem (op1);
10018 do_pending_stack_adjust ();
10020 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10021 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10022 return tem;
10024 #if 0
10025 /* There's no need to do this now that combine.c can eliminate lots of
10026 sign extensions. This can be less efficient in certain cases on other
10027 machines. */
10029 /* If this is a signed equality comparison, we can do it as an
10030 unsigned comparison since zero-extension is cheaper than sign
10031 extension and comparisons with zero are done as unsigned. This is
10032 the case even on machines that can do fast sign extension, since
10033 zero-extension is easier to combine with other operations than
10034 sign-extension is. If we are comparing against a constant, we must
10035 convert it to what it would look like unsigned. */
10036 if ((code == EQ || code == NE) && ! unsignedp
10037 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10039 if (GET_CODE (op1) == CONST_INT
10040 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10041 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10042 unsignedp = 1;
10044 #endif
10046 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10048 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10051 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10052 The decision as to signed or unsigned comparison must be made by the caller.
10054 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10055 compared.
10057 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10058 size of MODE should be used. */
10060 void
10061 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10062 if_false_label, if_true_label)
10063 register rtx op0, op1;
10064 enum rtx_code code;
10065 int unsignedp;
10066 enum machine_mode mode;
10067 rtx size;
10068 unsigned int align;
10069 rtx if_false_label, if_true_label;
10071 rtx tem;
10072 int dummy_true_label = 0;
10074 /* Reverse the comparison if that is safe and we want to jump if it is
10075 false. */
10076 if (! if_true_label && ! FLOAT_MODE_P (mode))
10078 if_true_label = if_false_label;
10079 if_false_label = 0;
10080 code = reverse_condition (code);
10083 /* If one operand is constant, make it the second one. Only do this
10084 if the other operand is not constant as well. */
10086 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10087 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10089 tem = op0;
10090 op0 = op1;
10091 op1 = tem;
10092 code = swap_condition (code);
10095 if (flag_force_mem)
10097 op0 = force_not_mem (op0);
10098 op1 = force_not_mem (op1);
10101 do_pending_stack_adjust ();
10103 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10104 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10106 if (tem == const_true_rtx)
10108 if (if_true_label)
10109 emit_jump (if_true_label);
10111 else
10113 if (if_false_label)
10114 emit_jump (if_false_label);
10116 return;
10119 #if 0
10120 /* There's no need to do this now that combine.c can eliminate lots of
10121 sign extensions. This can be less efficient in certain cases on other
10122 machines. */
10124 /* If this is a signed equality comparison, we can do it as an
10125 unsigned comparison since zero-extension is cheaper than sign
10126 extension and comparisons with zero are done as unsigned. This is
10127 the case even on machines that can do fast sign extension, since
10128 zero-extension is easier to combine with other operations than
10129 sign-extension is. If we are comparing against a constant, we must
10130 convert it to what it would look like unsigned. */
10131 if ((code == EQ || code == NE) && ! unsignedp
10132 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10134 if (GET_CODE (op1) == CONST_INT
10135 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10136 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10137 unsignedp = 1;
10139 #endif
10141 if (! if_true_label)
10143 dummy_true_label = 1;
10144 if_true_label = gen_label_rtx ();
10147 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10148 if_true_label);
10150 if (if_false_label)
10151 emit_jump (if_false_label);
10152 if (dummy_true_label)
10153 emit_label (if_true_label);
10156 /* Generate code for a comparison expression EXP (including code to compute
10157 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10158 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10159 generated code will drop through.
10160 SIGNED_CODE should be the rtx operation for this comparison for
10161 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10163 We force a stack adjustment unless there are currently
10164 things pushed on the stack that aren't yet used. */
10166 static void
10167 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10168 if_true_label)
10169 register tree exp;
10170 enum rtx_code signed_code, unsigned_code;
10171 rtx if_false_label, if_true_label;
10173 unsigned int align0, align1;
10174 register rtx op0, op1;
10175 register tree type;
10176 register enum machine_mode mode;
10177 int unsignedp;
10178 enum rtx_code code;
10180 /* Don't crash if the comparison was erroneous. */
10181 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10182 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10183 return;
10185 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10186 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10187 return;
10189 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10190 mode = TYPE_MODE (type);
10191 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10192 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10193 || (GET_MODE_BITSIZE (mode)
10194 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10195 1)))))))
10197 /* op0 might have been replaced by promoted constant, in which
10198 case the type of second argument should be used. */
10199 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10200 mode = TYPE_MODE (type);
10202 unsignedp = TREE_UNSIGNED (type);
10203 code = unsignedp ? unsigned_code : signed_code;
10205 #ifdef HAVE_canonicalize_funcptr_for_compare
10206 /* If function pointers need to be "canonicalized" before they can
10207 be reliably compared, then canonicalize them. */
10208 if (HAVE_canonicalize_funcptr_for_compare
10209 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10210 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10211 == FUNCTION_TYPE))
10213 rtx new_op0 = gen_reg_rtx (mode);
10215 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10216 op0 = new_op0;
10219 if (HAVE_canonicalize_funcptr_for_compare
10220 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10221 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10222 == FUNCTION_TYPE))
10224 rtx new_op1 = gen_reg_rtx (mode);
10226 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10227 op1 = new_op1;
10229 #endif
10231 /* Do any postincrements in the expression that was tested. */
10232 emit_queue ();
10234 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10235 ((mode == BLKmode)
10236 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10237 MIN (align0, align1),
10238 if_false_label, if_true_label);
10241 /* Generate code to calculate EXP using a store-flag instruction
10242 and return an rtx for the result. EXP is either a comparison
10243 or a TRUTH_NOT_EXPR whose operand is a comparison.
10245 If TARGET is nonzero, store the result there if convenient.
10247 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10248 cheap.
10250 Return zero if there is no suitable set-flag instruction
10251 available on this machine.
10253 Once expand_expr has been called on the arguments of the comparison,
10254 we are committed to doing the store flag, since it is not safe to
10255 re-evaluate the expression. We emit the store-flag insn by calling
10256 emit_store_flag, but only expand the arguments if we have a reason
10257 to believe that emit_store_flag will be successful. If we think that
10258 it will, but it isn't, we have to simulate the store-flag with a
10259 set/jump/set sequence. */
10261 static rtx
10262 do_store_flag (exp, target, mode, only_cheap)
10263 tree exp;
10264 rtx target;
10265 enum machine_mode mode;
10266 int only_cheap;
10268 enum rtx_code code;
10269 tree arg0, arg1, type;
10270 tree tem;
10271 enum machine_mode operand_mode;
10272 int invert = 0;
10273 int unsignedp;
10274 rtx op0, op1;
10275 enum insn_code icode;
10276 rtx subtarget = target;
10277 rtx result, label;
10279 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10280 result at the end. We can't simply invert the test since it would
10281 have already been inverted if it were valid. This case occurs for
10282 some floating-point comparisons. */
10284 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10285 invert = 1, exp = TREE_OPERAND (exp, 0);
10287 arg0 = TREE_OPERAND (exp, 0);
10288 arg1 = TREE_OPERAND (exp, 1);
10290 /* Don't crash if the comparison was erroneous. */
10291 if (arg0 == error_mark_node || arg1 == error_mark_node)
10292 return const0_rtx;
10294 type = TREE_TYPE (arg0);
10295 operand_mode = TYPE_MODE (type);
10296 unsignedp = TREE_UNSIGNED (type);
10298 /* We won't bother with BLKmode store-flag operations because it would mean
10299 passing a lot of information to emit_store_flag. */
10300 if (operand_mode == BLKmode)
10301 return 0;
10303 /* We won't bother with store-flag operations involving function pointers
10304 when function pointers must be canonicalized before comparisons. */
10305 #ifdef HAVE_canonicalize_funcptr_for_compare
10306 if (HAVE_canonicalize_funcptr_for_compare
10307 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10308 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10309 == FUNCTION_TYPE))
10310 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10311 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10312 == FUNCTION_TYPE))))
10313 return 0;
10314 #endif
10316 STRIP_NOPS (arg0);
10317 STRIP_NOPS (arg1);
10319 /* Get the rtx comparison code to use. We know that EXP is a comparison
10320 operation of some type. Some comparisons against 1 and -1 can be
10321 converted to comparisons with zero. Do so here so that the tests
10322 below will be aware that we have a comparison with zero. These
10323 tests will not catch constants in the first operand, but constants
10324 are rarely passed as the first operand. */
10326 switch (TREE_CODE (exp))
10328 case EQ_EXPR:
10329 code = EQ;
10330 break;
10331 case NE_EXPR:
10332 code = NE;
10333 break;
10334 case LT_EXPR:
10335 if (integer_onep (arg1))
10336 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10337 else
10338 code = unsignedp ? LTU : LT;
10339 break;
10340 case LE_EXPR:
10341 if (! unsignedp && integer_all_onesp (arg1))
10342 arg1 = integer_zero_node, code = LT;
10343 else
10344 code = unsignedp ? LEU : LE;
10345 break;
10346 case GT_EXPR:
10347 if (! unsignedp && integer_all_onesp (arg1))
10348 arg1 = integer_zero_node, code = GE;
10349 else
10350 code = unsignedp ? GTU : GT;
10351 break;
10352 case GE_EXPR:
10353 if (integer_onep (arg1))
10354 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10355 else
10356 code = unsignedp ? GEU : GE;
10357 break;
10359 case UNORDERED_EXPR:
10360 code = UNORDERED;
10361 break;
10362 case ORDERED_EXPR:
10363 code = ORDERED;
10364 break;
10365 case UNLT_EXPR:
10366 code = UNLT;
10367 break;
10368 case UNLE_EXPR:
10369 code = UNLE;
10370 break;
10371 case UNGT_EXPR:
10372 code = UNGT;
10373 break;
10374 case UNGE_EXPR:
10375 code = UNGE;
10376 break;
10377 case UNEQ_EXPR:
10378 code = UNEQ;
10379 break;
10381 default:
10382 abort ();
10385 /* Put a constant second. */
10386 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10388 tem = arg0; arg0 = arg1; arg1 = tem;
10389 code = swap_condition (code);
10392 /* If this is an equality or inequality test of a single bit, we can
10393 do this by shifting the bit being tested to the low-order bit and
10394 masking the result with the constant 1. If the condition was EQ,
10395 we xor it with 1. This does not require an scc insn and is faster
10396 than an scc insn even if we have it. */
10398 if ((code == NE || code == EQ)
10399 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10400 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10402 tree inner = TREE_OPERAND (arg0, 0);
10403 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10404 int ops_unsignedp;
10406 /* If INNER is a right shift of a constant and it plus BITNUM does
10407 not overflow, adjust BITNUM and INNER. */
10409 if (TREE_CODE (inner) == RSHIFT_EXPR
10410 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10411 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10412 && bitnum < TYPE_PRECISION (type)
10413 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10414 bitnum - TYPE_PRECISION (type)))
10416 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10417 inner = TREE_OPERAND (inner, 0);
10420 /* If we are going to be able to omit the AND below, we must do our
10421 operations as unsigned. If we must use the AND, we have a choice.
10422 Normally unsigned is faster, but for some machines signed is. */
10423 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10424 #ifdef LOAD_EXTEND_OP
10425 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10426 #else
10428 #endif
10431 if (! get_subtarget (subtarget)
10432 || GET_MODE (subtarget) != operand_mode
10433 || ! safe_from_p (subtarget, inner, 1))
10434 subtarget = 0;
10436 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10438 if (bitnum != 0)
10439 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10440 size_int (bitnum), subtarget, ops_unsignedp);
10442 if (GET_MODE (op0) != mode)
10443 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10445 if ((code == EQ && ! invert) || (code == NE && invert))
10446 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10447 ops_unsignedp, OPTAB_LIB_WIDEN);
10449 /* Put the AND last so it can combine with more things. */
10450 if (bitnum != TYPE_PRECISION (type) - 1)
10451 op0 = expand_and (op0, const1_rtx, subtarget);
10453 return op0;
10456 /* Now see if we are likely to be able to do this. Return if not. */
10457 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10458 return 0;
10460 icode = setcc_gen_code[(int) code];
10461 if (icode == CODE_FOR_nothing
10462 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10464 /* We can only do this if it is one of the special cases that
10465 can be handled without an scc insn. */
10466 if ((code == LT && integer_zerop (arg1))
10467 || (! only_cheap && code == GE && integer_zerop (arg1)))
10469 else if (BRANCH_COST >= 0
10470 && ! only_cheap && (code == NE || code == EQ)
10471 && TREE_CODE (type) != REAL_TYPE
10472 && ((abs_optab->handlers[(int) operand_mode].insn_code
10473 != CODE_FOR_nothing)
10474 || (ffs_optab->handlers[(int) operand_mode].insn_code
10475 != CODE_FOR_nothing)))
10477 else
10478 return 0;
10481 if (! get_subtarget (target)
10482 || GET_MODE (subtarget) != operand_mode
10483 || ! safe_from_p (subtarget, arg1, 1))
10484 subtarget = 0;
10486 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10487 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10489 if (target == 0)
10490 target = gen_reg_rtx (mode);
10492 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10493 because, if the emit_store_flag does anything it will succeed and
10494 OP0 and OP1 will not be used subsequently. */
10496 result = emit_store_flag (target, code,
10497 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10498 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10499 operand_mode, unsignedp, 1);
10501 if (result)
10503 if (invert)
10504 result = expand_binop (mode, xor_optab, result, const1_rtx,
10505 result, 0, OPTAB_LIB_WIDEN);
10506 return result;
10509 /* If this failed, we have to do this with set/compare/jump/set code. */
10510 if (GET_CODE (target) != REG
10511 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10512 target = gen_reg_rtx (GET_MODE (target));
10514 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10515 result = compare_from_rtx (op0, op1, code, unsignedp,
10516 operand_mode, NULL_RTX, 0);
10517 if (GET_CODE (result) == CONST_INT)
10518 return (((result == const0_rtx && ! invert)
10519 || (result != const0_rtx && invert))
10520 ? const0_rtx : const1_rtx);
10522 label = gen_label_rtx ();
10523 if (bcc_gen_fctn[(int) code] == 0)
10524 abort ();
10526 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10527 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10528 emit_label (label);
10530 return target;
10533 /* Generate a tablejump instruction (used for switch statements). */
10535 #ifdef HAVE_tablejump
10537 /* INDEX is the value being switched on, with the lowest value
10538 in the table already subtracted.
10539 MODE is its expected mode (needed if INDEX is constant).
10540 RANGE is the length of the jump table.
10541 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10543 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10544 index value is out of range. */
10546 void
10547 do_tablejump (index, mode, range, table_label, default_label)
10548 rtx index, range, table_label, default_label;
10549 enum machine_mode mode;
10551 register rtx temp, vector;
10553 /* Do an unsigned comparison (in the proper mode) between the index
10554 expression and the value which represents the length of the range.
10555 Since we just finished subtracting the lower bound of the range
10556 from the index expression, this comparison allows us to simultaneously
10557 check that the original index expression value is both greater than
10558 or equal to the minimum value of the range and less than or equal to
10559 the maximum value of the range. */
10561 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10562 0, default_label);
10564 /* If index is in range, it must fit in Pmode.
10565 Convert to Pmode so we can index with it. */
10566 if (mode != Pmode)
10567 index = convert_to_mode (Pmode, index, 1);
10569 /* Don't let a MEM slip thru, because then INDEX that comes
10570 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10571 and break_out_memory_refs will go to work on it and mess it up. */
10572 #ifdef PIC_CASE_VECTOR_ADDRESS
10573 if (flag_pic && GET_CODE (index) != REG)
10574 index = copy_to_mode_reg (Pmode, index);
10575 #endif
10577 /* If flag_force_addr were to affect this address
10578 it could interfere with the tricky assumptions made
10579 about addresses that contain label-refs,
10580 which may be valid only very near the tablejump itself. */
10581 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10582 GET_MODE_SIZE, because this indicates how large insns are. The other
10583 uses should all be Pmode, because they are addresses. This code
10584 could fail if addresses and insns are not the same size. */
10585 index = gen_rtx_PLUS (Pmode,
10586 gen_rtx_MULT (Pmode, index,
10587 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10588 gen_rtx_LABEL_REF (Pmode, table_label));
10589 #ifdef PIC_CASE_VECTOR_ADDRESS
10590 if (flag_pic)
10591 index = PIC_CASE_VECTOR_ADDRESS (index);
10592 else
10593 #endif
10594 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10595 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10596 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10597 RTX_UNCHANGING_P (vector) = 1;
10598 convert_move (temp, vector, 0);
10600 emit_jump_insn (gen_tablejump (temp, table_label));
10602 /* If we are generating PIC code or if the table is PC-relative, the
10603 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10604 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10605 emit_barrier ();
10608 #endif /* HAVE_tablejump */