Fix IA-64 abort compiling ping.
[official-gcc.git] / gcc / expr.c
blob6013c3e96d7eda943bf68d4ecf114d29c50569ec
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
67 #ifdef PUSH_ROUNDING
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
73 #endif
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
94 parameter. */
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
105 /* Nonzero to generate code for all the subroutines within an
106 expression before generating the upper levels of the expression.
107 Nowadays this is never zero. */
108 int do_preexpand_calls = 1;
110 /* Don't check memory usage, since code is being emitted to check a memory
111 usage. Used when current_function_check_memory_usage is true, to avoid
112 infinite recursion. */
113 static int in_check_memory_usage;
115 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
116 static tree placeholder_list = 0;
118 /* This structure is used by move_by_pieces to describe the move to
119 be performed. */
120 struct move_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
130 unsigned HOST_WIDE_INT len;
131 HOST_WIDE_INT offset;
132 int reverse;
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
138 struct clear_by_pieces
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 unsigned HOST_WIDE_INT len;
145 HOST_WIDE_INT offset;
146 int reverse;
149 extern struct obstack permanent_obstack;
151 static rtx get_push_address PARAMS ((int));
153 static rtx enqueue_insn PARAMS ((rtx, rtx));
154 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
155 PARAMS ((unsigned HOST_WIDE_INT,
156 unsigned int));
157 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
158 struct move_by_pieces *));
159 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
160 unsigned int));
161 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
162 enum machine_mode,
163 struct clear_by_pieces *));
164 static rtx get_subtarget PARAMS ((rtx));
165 static int is_zeros_p PARAMS ((tree));
166 static int mostly_zeros_p PARAMS ((tree));
167 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
168 HOST_WIDE_INT, enum machine_mode,
169 tree, tree, unsigned int, int,
170 int));
171 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
172 HOST_WIDE_INT));
173 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
174 HOST_WIDE_INT, enum machine_mode,
175 tree, enum machine_mode, int,
176 unsigned int, HOST_WIDE_INT, int));
177 static enum memory_use_mode
178 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
179 static tree save_noncopied_parts PARAMS ((tree, tree));
180 static tree init_noncopied_parts PARAMS ((tree, tree));
181 static int fixed_type_p PARAMS ((tree));
182 static rtx var_rtx PARAMS ((tree));
183 static int readonly_fields_p PARAMS ((tree));
184 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
185 static rtx expand_increment PARAMS ((tree, int, int));
186 static void preexpand_calls PARAMS ((tree));
187 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
188 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
189 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
190 rtx, rtx));
191 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
193 /* Record for each mode whether we can move a register directly to or
194 from an object of that mode in memory. If we can't, we won't try
195 to use that mode directly when accessing a field of that mode. */
197 static char direct_load[NUM_MACHINE_MODES];
198 static char direct_store[NUM_MACHINE_MODES];
200 /* If a memory-to-memory move would take MOVE_RATIO or more simple
201 move-instruction sequences, we will do a movstr or libcall instead. */
203 #ifndef MOVE_RATIO
204 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
205 #define MOVE_RATIO 2
206 #else
207 /* If we are optimizing for space (-Os), cut down the default move ratio. */
208 #define MOVE_RATIO (optimize_size ? 3 : 15)
209 #endif
210 #endif
212 /* This macro is used to determine whether move_by_pieces should be called
213 to perform a structure copy. */
214 #ifndef MOVE_BY_PIECES_P
215 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
216 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
217 #endif
219 /* This array records the insn_code of insns to perform block moves. */
220 enum insn_code movstr_optab[NUM_MACHINE_MODES];
222 /* This array records the insn_code of insns to perform block clears. */
223 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
225 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
227 #ifndef SLOW_UNALIGNED_ACCESS
228 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
229 #endif
231 /* This is run once per compilation to set up which modes can be used
232 directly in memory and to initialize the block move optab. */
234 void
235 init_expr_once ()
237 rtx insn, pat;
238 enum machine_mode mode;
239 int num_clobbers;
240 rtx mem, mem1;
241 char *free_point;
243 start_sequence ();
245 /* Since we are on the permanent obstack, we must be sure we save this
246 spot AFTER we call start_sequence, since it will reuse the rtl it
247 makes. */
248 free_point = (char *) oballoc (0);
250 /* Try indexing by frame ptr and try by stack ptr.
251 It is known that on the Convex the stack ptr isn't a valid index.
252 With luck, one or the other is valid on any machine. */
253 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
254 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
256 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
257 pat = PATTERN (insn);
259 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
260 mode = (enum machine_mode) ((int) mode + 1))
262 int regno;
263 rtx reg;
265 direct_load[(int) mode] = direct_store[(int) mode] = 0;
266 PUT_MODE (mem, mode);
267 PUT_MODE (mem1, mode);
269 /* See if there is some register that can be used in this mode and
270 directly loaded or stored from memory. */
272 if (mode != VOIDmode && mode != BLKmode)
273 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
274 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
275 regno++)
277 if (! HARD_REGNO_MODE_OK (regno, mode))
278 continue;
280 reg = gen_rtx_REG (mode, regno);
282 SET_SRC (pat) = mem;
283 SET_DEST (pat) = reg;
284 if (recog (pat, insn, &num_clobbers) >= 0)
285 direct_load[(int) mode] = 1;
287 SET_SRC (pat) = mem1;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
292 SET_SRC (pat) = reg;
293 SET_DEST (pat) = mem;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_store[(int) mode] = 1;
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem1;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
304 end_sequence ();
305 obfree (free_point);
308 /* This is run at the start of compiling a function. */
310 void
311 init_expr ()
313 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
315 pending_chain = 0;
316 pending_stack_adjust = 0;
317 stack_pointer_delta = 0;
318 inhibit_defer_pop = 0;
319 saveregs_value = 0;
320 apply_args_value = 0;
321 forced_labels = 0;
324 void
325 mark_expr_status (p)
326 struct expr_status *p;
328 if (p == NULL)
329 return;
331 ggc_mark_rtx (p->x_saveregs_value);
332 ggc_mark_rtx (p->x_apply_args_value);
333 ggc_mark_rtx (p->x_forced_labels);
336 void
337 free_expr_status (f)
338 struct function *f;
340 free (f->expr);
341 f->expr = NULL;
344 /* Small sanity check that the queue is empty at the end of a function. */
346 void
347 finish_expr_for_function ()
349 if (pending_chain)
350 abort ();
353 /* Manage the queue of increment instructions to be output
354 for POSTINCREMENT_EXPR expressions, etc. */
356 /* Queue up to increment (or change) VAR later. BODY says how:
357 BODY should be the same thing you would pass to emit_insn
358 to increment right away. It will go to emit_insn later on.
360 The value is a QUEUED expression to be used in place of VAR
361 where you want to guarantee the pre-incrementation value of VAR. */
363 static rtx
364 enqueue_insn (var, body)
365 rtx var, body;
367 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
368 body, pending_chain);
369 return pending_chain;
372 /* Use protect_from_queue to convert a QUEUED expression
373 into something that you can put immediately into an instruction.
374 If the queued incrementation has not happened yet,
375 protect_from_queue returns the variable itself.
376 If the incrementation has happened, protect_from_queue returns a temp
377 that contains a copy of the old value of the variable.
379 Any time an rtx which might possibly be a QUEUED is to be put
380 into an instruction, it must be passed through protect_from_queue first.
381 QUEUED expressions are not meaningful in instructions.
383 Do not pass a value through protect_from_queue and then hold
384 on to it for a while before putting it in an instruction!
385 If the queue is flushed in between, incorrect code will result. */
388 protect_from_queue (x, modify)
389 register rtx x;
390 int modify;
392 register RTX_CODE code = GET_CODE (x);
394 #if 0 /* A QUEUED can hang around after the queue is forced out. */
395 /* Shortcut for most common case. */
396 if (pending_chain == 0)
397 return x;
398 #endif
400 if (code != QUEUED)
402 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
403 use of autoincrement. Make a copy of the contents of the memory
404 location rather than a copy of the address, but not if the value is
405 of mode BLKmode. Don't modify X in place since it might be
406 shared. */
407 if (code == MEM && GET_MODE (x) != BLKmode
408 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
410 register rtx y = XEXP (x, 0);
411 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
413 MEM_COPY_ATTRIBUTES (new, x);
415 if (QUEUED_INSN (y))
417 register rtx temp = gen_reg_rtx (GET_MODE (new));
418 emit_insn_before (gen_move_insn (temp, new),
419 QUEUED_INSN (y));
420 return temp;
422 return new;
424 /* Otherwise, recursively protect the subexpressions of all
425 the kinds of rtx's that can contain a QUEUED. */
426 if (code == MEM)
428 rtx tem = protect_from_queue (XEXP (x, 0), 0);
429 if (tem != XEXP (x, 0))
431 x = copy_rtx (x);
432 XEXP (x, 0) = tem;
435 else if (code == PLUS || code == MULT)
437 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
438 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
439 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
441 x = copy_rtx (x);
442 XEXP (x, 0) = new0;
443 XEXP (x, 1) = new1;
446 return x;
448 /* If the increment has not happened, use the variable itself. */
449 if (QUEUED_INSN (x) == 0)
450 return QUEUED_VAR (x);
451 /* If the increment has happened and a pre-increment copy exists,
452 use that copy. */
453 if (QUEUED_COPY (x) != 0)
454 return QUEUED_COPY (x);
455 /* The increment has happened but we haven't set up a pre-increment copy.
456 Set one up now, and use it. */
457 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
458 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
459 QUEUED_INSN (x));
460 return QUEUED_COPY (x);
463 /* Return nonzero if X contains a QUEUED expression:
464 if it contains anything that will be altered by a queued increment.
465 We handle only combinations of MEM, PLUS, MINUS and MULT operators
466 since memory addresses generally contain only those. */
469 queued_subexp_p (x)
470 rtx x;
472 register enum rtx_code code = GET_CODE (x);
473 switch (code)
475 case QUEUED:
476 return 1;
477 case MEM:
478 return queued_subexp_p (XEXP (x, 0));
479 case MULT:
480 case PLUS:
481 case MINUS:
482 return (queued_subexp_p (XEXP (x, 0))
483 || queued_subexp_p (XEXP (x, 1)));
484 default:
485 return 0;
489 /* Perform all the pending incrementations. */
491 void
492 emit_queue ()
494 register rtx p;
495 while ((p = pending_chain))
497 rtx body = QUEUED_BODY (p);
499 if (GET_CODE (body) == SEQUENCE)
501 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
502 emit_insn (QUEUED_BODY (p));
504 else
505 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
506 pending_chain = QUEUED_NEXT (p);
510 /* Copy data from FROM to TO, where the machine modes are not the same.
511 Both modes may be integer, or both may be floating.
512 UNSIGNEDP should be nonzero if FROM is an unsigned type.
513 This causes zero-extension instead of sign-extension. */
515 void
516 convert_move (to, from, unsignedp)
517 register rtx to, from;
518 int unsignedp;
520 enum machine_mode to_mode = GET_MODE (to);
521 enum machine_mode from_mode = GET_MODE (from);
522 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
523 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
524 enum insn_code code;
525 rtx libcall;
527 /* rtx code for making an equivalent value. */
528 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
530 to = protect_from_queue (to, 1);
531 from = protect_from_queue (from, 0);
533 if (to_real != from_real)
534 abort ();
536 /* If FROM is a SUBREG that indicates that we have already done at least
537 the required extension, strip it. We don't handle such SUBREGs as
538 TO here. */
540 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
541 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
542 >= GET_MODE_SIZE (to_mode))
543 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
544 from = gen_lowpart (to_mode, from), from_mode = to_mode;
546 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
547 abort ();
549 if (to_mode == from_mode
550 || (from_mode == VOIDmode && CONSTANT_P (from)))
552 emit_move_insn (to, from);
553 return;
556 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
558 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
559 abort ();
561 if (VECTOR_MODE_P (to_mode))
562 from = gen_rtx_SUBREG (to_mode, from, 0);
563 else
564 to = gen_rtx_SUBREG (from_mode, to, 0);
566 emit_move_insn (to, from);
567 return;
570 if (to_real != from_real)
571 abort ();
573 if (to_real)
575 rtx value, insns;
577 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
579 /* Try converting directly if the insn is supported. */
580 if ((code = can_extend_p (to_mode, from_mode, 0))
581 != CODE_FOR_nothing)
583 emit_unop_insn (code, to, from, UNKNOWN);
584 return;
588 #ifdef HAVE_trunchfqf2
589 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
591 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
592 return;
594 #endif
595 #ifdef HAVE_trunctqfqf2
596 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
598 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
599 return;
601 #endif
602 #ifdef HAVE_truncsfqf2
603 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
605 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
606 return;
608 #endif
609 #ifdef HAVE_truncdfqf2
610 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
612 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
613 return;
615 #endif
616 #ifdef HAVE_truncxfqf2
617 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
619 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
620 return;
622 #endif
623 #ifdef HAVE_trunctfqf2
624 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
626 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
627 return;
629 #endif
631 #ifdef HAVE_trunctqfhf2
632 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
634 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
635 return;
637 #endif
638 #ifdef HAVE_truncsfhf2
639 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
641 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
642 return;
644 #endif
645 #ifdef HAVE_truncdfhf2
646 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
648 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
649 return;
651 #endif
652 #ifdef HAVE_truncxfhf2
653 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
655 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
656 return;
658 #endif
659 #ifdef HAVE_trunctfhf2
660 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
662 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
663 return;
665 #endif
667 #ifdef HAVE_truncsftqf2
668 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
670 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
671 return;
673 #endif
674 #ifdef HAVE_truncdftqf2
675 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
677 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
678 return;
680 #endif
681 #ifdef HAVE_truncxftqf2
682 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
684 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
685 return;
687 #endif
688 #ifdef HAVE_trunctftqf2
689 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
691 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
692 return;
694 #endif
696 #ifdef HAVE_truncdfsf2
697 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
699 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
700 return;
702 #endif
703 #ifdef HAVE_truncxfsf2
704 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
706 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
707 return;
709 #endif
710 #ifdef HAVE_trunctfsf2
711 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
713 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
714 return;
716 #endif
717 #ifdef HAVE_truncxfdf2
718 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
720 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
721 return;
723 #endif
724 #ifdef HAVE_trunctfdf2
725 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
727 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
728 return;
730 #endif
732 libcall = (rtx) 0;
733 switch (from_mode)
735 case SFmode:
736 switch (to_mode)
738 case DFmode:
739 libcall = extendsfdf2_libfunc;
740 break;
742 case XFmode:
743 libcall = extendsfxf2_libfunc;
744 break;
746 case TFmode:
747 libcall = extendsftf2_libfunc;
748 break;
750 default:
751 break;
753 break;
755 case DFmode:
756 switch (to_mode)
758 case SFmode:
759 libcall = truncdfsf2_libfunc;
760 break;
762 case XFmode:
763 libcall = extenddfxf2_libfunc;
764 break;
766 case TFmode:
767 libcall = extenddftf2_libfunc;
768 break;
770 default:
771 break;
773 break;
775 case XFmode:
776 switch (to_mode)
778 case SFmode:
779 libcall = truncxfsf2_libfunc;
780 break;
782 case DFmode:
783 libcall = truncxfdf2_libfunc;
784 break;
786 default:
787 break;
789 break;
791 case TFmode:
792 switch (to_mode)
794 case SFmode:
795 libcall = trunctfsf2_libfunc;
796 break;
798 case DFmode:
799 libcall = trunctfdf2_libfunc;
800 break;
802 default:
803 break;
805 break;
807 default:
808 break;
811 if (libcall == (rtx) 0)
812 /* This conversion is not implemented yet. */
813 abort ();
815 start_sequence ();
816 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
817 1, from, from_mode);
818 insns = get_insns ();
819 end_sequence ();
820 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
821 from));
822 return;
825 /* Now both modes are integers. */
827 /* Handle expanding beyond a word. */
828 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
829 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
831 rtx insns;
832 rtx lowpart;
833 rtx fill_value;
834 rtx lowfrom;
835 int i;
836 enum machine_mode lowpart_mode;
837 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
839 /* Try converting directly if the insn is supported. */
840 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
841 != CODE_FOR_nothing)
843 /* If FROM is a SUBREG, put it into a register. Do this
844 so that we always generate the same set of insns for
845 better cse'ing; if an intermediate assignment occurred,
846 we won't be doing the operation directly on the SUBREG. */
847 if (optimize > 0 && GET_CODE (from) == SUBREG)
848 from = force_reg (from_mode, from);
849 emit_unop_insn (code, to, from, equiv_code);
850 return;
852 /* Next, try converting via full word. */
853 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
854 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
855 != CODE_FOR_nothing))
857 if (GET_CODE (to) == REG)
858 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
859 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
860 emit_unop_insn (code, to,
861 gen_lowpart (word_mode, to), equiv_code);
862 return;
865 /* No special multiword conversion insn; do it by hand. */
866 start_sequence ();
868 /* Since we will turn this into a no conflict block, we must ensure
869 that the source does not overlap the target. */
871 if (reg_overlap_mentioned_p (to, from))
872 from = force_reg (from_mode, from);
874 /* Get a copy of FROM widened to a word, if necessary. */
875 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
876 lowpart_mode = word_mode;
877 else
878 lowpart_mode = from_mode;
880 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
882 lowpart = gen_lowpart (lowpart_mode, to);
883 emit_move_insn (lowpart, lowfrom);
885 /* Compute the value to put in each remaining word. */
886 if (unsignedp)
887 fill_value = const0_rtx;
888 else
890 #ifdef HAVE_slt
891 if (HAVE_slt
892 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
893 && STORE_FLAG_VALUE == -1)
895 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
896 lowpart_mode, 0, 0);
897 fill_value = gen_reg_rtx (word_mode);
898 emit_insn (gen_slt (fill_value));
900 else
901 #endif
903 fill_value
904 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
905 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906 NULL_RTX, 0);
907 fill_value = convert_to_mode (word_mode, fill_value, 1);
911 /* Fill the remaining words. */
912 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
914 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
915 rtx subword = operand_subword (to, index, 1, to_mode);
917 if (subword == 0)
918 abort ();
920 if (fill_value != subword)
921 emit_move_insn (subword, fill_value);
924 insns = get_insns ();
925 end_sequence ();
927 emit_no_conflict_block (insns, to, from, NULL_RTX,
928 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
929 return;
932 /* Truncating multi-word to a word or less. */
933 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
934 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
936 if (!((GET_CODE (from) == MEM
937 && ! MEM_VOLATILE_P (from)
938 && direct_load[(int) to_mode]
939 && ! mode_dependent_address_p (XEXP (from, 0)))
940 || GET_CODE (from) == REG
941 || GET_CODE (from) == SUBREG))
942 from = force_reg (from_mode, from);
943 convert_move (to, gen_lowpart (word_mode, from), 0);
944 return;
947 /* Handle pointer conversion. */ /* SPEE 900220. */
948 if (to_mode == PQImode)
950 if (from_mode != QImode)
951 from = convert_to_mode (QImode, from, unsignedp);
953 #ifdef HAVE_truncqipqi2
954 if (HAVE_truncqipqi2)
956 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
957 return;
959 #endif /* HAVE_truncqipqi2 */
960 abort ();
963 if (from_mode == PQImode)
965 if (to_mode != QImode)
967 from = convert_to_mode (QImode, from, unsignedp);
968 from_mode = QImode;
970 else
972 #ifdef HAVE_extendpqiqi2
973 if (HAVE_extendpqiqi2)
975 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
976 return;
978 #endif /* HAVE_extendpqiqi2 */
979 abort ();
983 if (to_mode == PSImode)
985 if (from_mode != SImode)
986 from = convert_to_mode (SImode, from, unsignedp);
988 #ifdef HAVE_truncsipsi2
989 if (HAVE_truncsipsi2)
991 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
992 return;
994 #endif /* HAVE_truncsipsi2 */
995 abort ();
998 if (from_mode == PSImode)
1000 if (to_mode != SImode)
1002 from = convert_to_mode (SImode, from, unsignedp);
1003 from_mode = SImode;
1005 else
1007 #ifdef HAVE_extendpsisi2
1008 if (HAVE_extendpsisi2)
1010 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1011 return;
1013 #endif /* HAVE_extendpsisi2 */
1014 abort ();
1018 if (to_mode == PDImode)
1020 if (from_mode != DImode)
1021 from = convert_to_mode (DImode, from, unsignedp);
1023 #ifdef HAVE_truncdipdi2
1024 if (HAVE_truncdipdi2)
1026 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1027 return;
1029 #endif /* HAVE_truncdipdi2 */
1030 abort ();
1033 if (from_mode == PDImode)
1035 if (to_mode != DImode)
1037 from = convert_to_mode (DImode, from, unsignedp);
1038 from_mode = DImode;
1040 else
1042 #ifdef HAVE_extendpdidi2
1043 if (HAVE_extendpdidi2)
1045 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1046 return;
1048 #endif /* HAVE_extendpdidi2 */
1049 abort ();
1053 /* Now follow all the conversions between integers
1054 no more than a word long. */
1056 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1057 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1058 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1059 GET_MODE_BITSIZE (from_mode)))
1061 if (!((GET_CODE (from) == MEM
1062 && ! MEM_VOLATILE_P (from)
1063 && direct_load[(int) to_mode]
1064 && ! mode_dependent_address_p (XEXP (from, 0)))
1065 || GET_CODE (from) == REG
1066 || GET_CODE (from) == SUBREG))
1067 from = force_reg (from_mode, from);
1068 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1069 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1070 from = copy_to_reg (from);
1071 emit_move_insn (to, gen_lowpart (to_mode, from));
1072 return;
1075 /* Handle extension. */
1076 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1078 /* Convert directly if that works. */
1079 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1080 != CODE_FOR_nothing)
1082 emit_unop_insn (code, to, from, equiv_code);
1083 return;
1085 else
1087 enum machine_mode intermediate;
1088 rtx tmp;
1089 tree shift_amount;
1091 /* Search for a mode to convert via. */
1092 for (intermediate = from_mode; intermediate != VOIDmode;
1093 intermediate = GET_MODE_WIDER_MODE (intermediate))
1094 if (((can_extend_p (to_mode, intermediate, unsignedp)
1095 != CODE_FOR_nothing)
1096 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1097 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1098 GET_MODE_BITSIZE (intermediate))))
1099 && (can_extend_p (intermediate, from_mode, unsignedp)
1100 != CODE_FOR_nothing))
1102 convert_move (to, convert_to_mode (intermediate, from,
1103 unsignedp), unsignedp);
1104 return;
1107 /* No suitable intermediate mode.
1108 Generate what we need with shifts. */
1109 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1110 - GET_MODE_BITSIZE (from_mode), 0);
1111 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1112 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1113 to, unsignedp);
1114 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1115 to, unsignedp);
1116 if (tmp != to)
1117 emit_move_insn (to, tmp);
1118 return;
1122 /* Support special truncate insns for certain modes. */
1124 if (from_mode == DImode && to_mode == SImode)
1126 #ifdef HAVE_truncdisi2
1127 if (HAVE_truncdisi2)
1129 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1130 return;
1132 #endif
1133 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 return;
1137 if (from_mode == DImode && to_mode == HImode)
1139 #ifdef HAVE_truncdihi2
1140 if (HAVE_truncdihi2)
1142 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1143 return;
1145 #endif
1146 convert_move (to, force_reg (from_mode, from), unsignedp);
1147 return;
1150 if (from_mode == DImode && to_mode == QImode)
1152 #ifdef HAVE_truncdiqi2
1153 if (HAVE_truncdiqi2)
1155 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1156 return;
1158 #endif
1159 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 return;
1163 if (from_mode == SImode && to_mode == HImode)
1165 #ifdef HAVE_truncsihi2
1166 if (HAVE_truncsihi2)
1168 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1169 return;
1171 #endif
1172 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 return;
1176 if (from_mode == SImode && to_mode == QImode)
1178 #ifdef HAVE_truncsiqi2
1179 if (HAVE_truncsiqi2)
1181 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1182 return;
1184 #endif
1185 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 return;
1189 if (from_mode == HImode && to_mode == QImode)
1191 #ifdef HAVE_trunchiqi2
1192 if (HAVE_trunchiqi2)
1194 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1195 return;
1197 #endif
1198 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 return;
1202 if (from_mode == TImode && to_mode == DImode)
1204 #ifdef HAVE_trunctidi2
1205 if (HAVE_trunctidi2)
1207 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1208 return;
1210 #endif
1211 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 return;
1215 if (from_mode == TImode && to_mode == SImode)
1217 #ifdef HAVE_trunctisi2
1218 if (HAVE_trunctisi2)
1220 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1221 return;
1223 #endif
1224 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 return;
1228 if (from_mode == TImode && to_mode == HImode)
1230 #ifdef HAVE_trunctihi2
1231 if (HAVE_trunctihi2)
1233 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1234 return;
1236 #endif
1237 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 return;
1241 if (from_mode == TImode && to_mode == QImode)
1243 #ifdef HAVE_trunctiqi2
1244 if (HAVE_trunctiqi2)
1246 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1247 return;
1249 #endif
1250 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 return;
1254 /* Handle truncation of volatile memrefs, and so on;
1255 the things that couldn't be truncated directly,
1256 and for which there was no special instruction. */
1257 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1259 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1260 emit_move_insn (to, temp);
1261 return;
1264 /* Mode combination is not recognized. */
1265 abort ();
1268 /* Return an rtx for a value that would result
1269 from converting X to mode MODE.
1270 Both X and MODE may be floating, or both integer.
1271 UNSIGNEDP is nonzero if X is an unsigned value.
1272 This can be done by referring to a part of X in place
1273 or by copying to a new temporary with conversion.
1275 This function *must not* call protect_from_queue
1276 except when putting X into an insn (in which case convert_move does it). */
1279 convert_to_mode (mode, x, unsignedp)
1280 enum machine_mode mode;
1281 rtx x;
1282 int unsignedp;
1284 return convert_modes (mode, VOIDmode, x, unsignedp);
1287 /* Return an rtx for a value that would result
1288 from converting X from mode OLDMODE to mode MODE.
1289 Both modes may be floating, or both integer.
1290 UNSIGNEDP is nonzero if X is an unsigned value.
1292 This can be done by referring to a part of X in place
1293 or by copying to a new temporary with conversion.
1295 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1297 This function *must not* call protect_from_queue
1298 except when putting X into an insn (in which case convert_move does it). */
1301 convert_modes (mode, oldmode, x, unsignedp)
1302 enum machine_mode mode, oldmode;
1303 rtx x;
1304 int unsignedp;
1306 register rtx temp;
1308 /* If FROM is a SUBREG that indicates that we have already done at least
1309 the required extension, strip it. */
1311 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1312 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1313 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1314 x = gen_lowpart (mode, x);
1316 if (GET_MODE (x) != VOIDmode)
1317 oldmode = GET_MODE (x);
1319 if (mode == oldmode)
1320 return x;
1322 /* There is one case that we must handle specially: If we are converting
1323 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1324 we are to interpret the constant as unsigned, gen_lowpart will do
1325 the wrong if the constant appears negative. What we want to do is
1326 make the high-order word of the constant zero, not all ones. */
1328 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1329 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1330 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1332 HOST_WIDE_INT val = INTVAL (x);
1334 if (oldmode != VOIDmode
1335 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1337 int width = GET_MODE_BITSIZE (oldmode);
1339 /* We need to zero extend VAL. */
1340 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1343 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1346 /* We can do this with a gen_lowpart if both desired and current modes
1347 are integer, and this is either a constant integer, a register, or a
1348 non-volatile MEM. Except for the constant case where MODE is no
1349 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1351 if ((GET_CODE (x) == CONST_INT
1352 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1353 || (GET_MODE_CLASS (mode) == MODE_INT
1354 && GET_MODE_CLASS (oldmode) == MODE_INT
1355 && (GET_CODE (x) == CONST_DOUBLE
1356 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1357 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1358 && direct_load[(int) mode])
1359 || (GET_CODE (x) == REG
1360 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1361 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1363 /* ?? If we don't know OLDMODE, we have to assume here that
1364 X does not need sign- or zero-extension. This may not be
1365 the case, but it's the best we can do. */
1366 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1367 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1369 HOST_WIDE_INT val = INTVAL (x);
1370 int width = GET_MODE_BITSIZE (oldmode);
1372 /* We must sign or zero-extend in this case. Start by
1373 zero-extending, then sign extend if we need to. */
1374 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1375 if (! unsignedp
1376 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1377 val |= (HOST_WIDE_INT) (-1) << width;
1379 return GEN_INT (val);
1382 return gen_lowpart (mode, x);
1385 temp = gen_reg_rtx (mode);
1386 convert_move (temp, x, unsignedp);
1387 return temp;
1390 /* This macro is used to determine what the largest unit size that
1391 move_by_pieces can use is. */
1393 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1394 move efficiently, as opposed to MOVE_MAX which is the maximum
1395 number of bytes we can move with a single instruction. */
1397 #ifndef MOVE_MAX_PIECES
1398 #define MOVE_MAX_PIECES MOVE_MAX
1399 #endif
1401 /* Generate several move instructions to copy LEN bytes
1402 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1403 The caller must pass FROM and TO
1404 through protect_from_queue before calling.
1405 ALIGN is maximum alignment we can assume. */
1407 void
1408 move_by_pieces (to, from, len, align)
1409 rtx to, from;
1410 unsigned HOST_WIDE_INT len;
1411 unsigned int align;
1413 struct move_by_pieces data;
1414 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1415 unsigned int max_size = MOVE_MAX_PIECES + 1;
1416 enum machine_mode mode = VOIDmode, tmode;
1417 enum insn_code icode;
1419 data.offset = 0;
1420 data.to_addr = to_addr;
1421 data.from_addr = from_addr;
1422 data.to = to;
1423 data.from = from;
1424 data.autinc_to
1425 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1426 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1427 data.autinc_from
1428 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1429 || GET_CODE (from_addr) == POST_INC
1430 || GET_CODE (from_addr) == POST_DEC);
1432 data.explicit_inc_from = 0;
1433 data.explicit_inc_to = 0;
1434 data.reverse
1435 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1436 if (data.reverse) data.offset = len;
1437 data.len = len;
1439 /* If copying requires more than two move insns,
1440 copy addresses to registers (to make displacements shorter)
1441 and use post-increment if available. */
1442 if (!(data.autinc_from && data.autinc_to)
1443 && move_by_pieces_ninsns (len, align) > 2)
1445 /* Find the mode of the largest move... */
1446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1448 if (GET_MODE_SIZE (tmode) < max_size)
1449 mode = tmode;
1451 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = -1;
1457 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1459 data.from_addr = copy_addr_to_reg (from_addr);
1460 data.autinc_from = 1;
1461 data.explicit_inc_from = 1;
1463 if (!data.autinc_from && CONSTANT_P (from_addr))
1464 data.from_addr = copy_addr_to_reg (from_addr);
1465 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = -1;
1471 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1473 data.to_addr = copy_addr_to_reg (to_addr);
1474 data.autinc_to = 1;
1475 data.explicit_inc_to = 1;
1477 if (!data.autinc_to && CONSTANT_P (to_addr))
1478 data.to_addr = copy_addr_to_reg (to_addr);
1481 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1482 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1483 align = MOVE_MAX * BITS_PER_UNIT;
1485 /* First move what we can in the largest integer mode, then go to
1486 successively smaller modes. */
1488 while (max_size > 1)
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1493 mode = tmode;
1495 if (mode == VOIDmode)
1496 break;
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1500 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1502 max_size = GET_MODE_SIZE (mode);
1505 /* The code above should have handled everything. */
1506 if (data.len > 0)
1507 abort ();
1510 /* Return number of insns required to move L bytes by pieces.
1511 ALIGN (in bytes) is maximum alignment we can assume. */
1513 static unsigned HOST_WIDE_INT
1514 move_by_pieces_ninsns (l, align)
1515 unsigned HOST_WIDE_INT l;
1516 unsigned int align;
1518 unsigned HOST_WIDE_INT n_insns = 0;
1519 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1521 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1522 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1523 align = MOVE_MAX * BITS_PER_UNIT;
1525 while (max_size > 1)
1527 enum machine_mode mode = VOIDmode, tmode;
1528 enum insn_code icode;
1530 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1531 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1532 if (GET_MODE_SIZE (tmode) < max_size)
1533 mode = tmode;
1535 if (mode == VOIDmode)
1536 break;
1538 icode = mov_optab->handlers[(int) mode].insn_code;
1539 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1540 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1542 max_size = GET_MODE_SIZE (mode);
1545 return n_insns;
1548 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1549 with move instructions for mode MODE. GENFUN is the gen_... function
1550 to make a move insn for that mode. DATA has all the other info. */
1552 static void
1553 move_by_pieces_1 (genfun, mode, data)
1554 rtx (*genfun) PARAMS ((rtx, ...));
1555 enum machine_mode mode;
1556 struct move_by_pieces *data;
1558 unsigned int size = GET_MODE_SIZE (mode);
1559 rtx to1, from1;
1561 while (data->len >= size)
1563 if (data->reverse)
1564 data->offset -= size;
1566 if (data->autinc_to)
1568 to1 = gen_rtx_MEM (mode, data->to_addr);
1569 MEM_COPY_ATTRIBUTES (to1, data->to);
1571 else
1572 to1 = change_address (data->to, mode,
1573 plus_constant (data->to_addr, data->offset));
1575 if (data->autinc_from)
1577 from1 = gen_rtx_MEM (mode, data->from_addr);
1578 MEM_COPY_ATTRIBUTES (from1, data->from);
1580 else
1581 from1 = change_address (data->from, mode,
1582 plus_constant (data->from_addr, data->offset));
1584 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1585 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1586 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1587 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1589 emit_insn ((*genfun) (to1, from1));
1591 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1592 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1593 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1594 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1596 if (! data->reverse)
1597 data->offset += size;
1599 data->len -= size;
1603 /* Emit code to move a block Y to a block X.
1604 This may be done with string-move instructions,
1605 with multiple scalar move instructions, or with a library call.
1607 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1608 with mode BLKmode.
1609 SIZE is an rtx that says how long they are.
1610 ALIGN is the maximum alignment we can assume they have.
1612 Return the address of the new block, if memcpy is called and returns it,
1613 0 otherwise. */
1616 emit_block_move (x, y, size, align)
1617 rtx x, y;
1618 rtx size;
1619 unsigned int align;
1621 rtx retval = 0;
1622 #ifdef TARGET_MEM_FUNCTIONS
1623 static tree fn;
1624 tree call_expr, arg_list;
1625 #endif
1627 if (GET_MODE (x) != BLKmode)
1628 abort ();
1630 if (GET_MODE (y) != BLKmode)
1631 abort ();
1633 x = protect_from_queue (x, 1);
1634 y = protect_from_queue (y, 0);
1635 size = protect_from_queue (size, 0);
1637 if (GET_CODE (x) != MEM)
1638 abort ();
1639 if (GET_CODE (y) != MEM)
1640 abort ();
1641 if (size == 0)
1642 abort ();
1644 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1645 move_by_pieces (x, y, INTVAL (size), align);
1646 else
1648 /* Try the most limited insn first, because there's no point
1649 including more than one in the machine description unless
1650 the more limited one has some advantage. */
1652 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1653 enum machine_mode mode;
1655 /* Since this is a move insn, we don't care about volatility. */
1656 volatile_ok = 1;
1658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1659 mode = GET_MODE_WIDER_MODE (mode))
1661 enum insn_code code = movstr_optab[(int) mode];
1662 insn_operand_predicate_fn pred;
1664 if (code != CODE_FOR_nothing
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
1667 returned by the macro, it will definitely be less than the
1668 actual mode mask. */
1669 && ((GET_CODE (size) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1671 <= (GET_MODE_MASK (mode) >> 1)))
1672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1673 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1674 || (*pred) (x, BLKmode))
1675 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1676 || (*pred) (y, BLKmode))
1677 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1678 || (*pred) (opalign, VOIDmode)))
1680 rtx op2;
1681 rtx last = get_last_insn ();
1682 rtx pat;
1684 op2 = convert_to_mode (mode, size, 1);
1685 pred = insn_data[(int) code].operand[2].predicate;
1686 if (pred != 0 && ! (*pred) (op2, mode))
1687 op2 = copy_to_mode_reg (mode, op2);
1689 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1690 if (pat)
1692 emit_insn (pat);
1693 volatile_ok = 0;
1694 return 0;
1696 else
1697 delete_insns_since (last);
1701 volatile_ok = 0;
1703 /* X, Y, or SIZE may have been passed through protect_from_queue.
1705 It is unsafe to save the value generated by protect_from_queue
1706 and reuse it later. Consider what happens if emit_queue is
1707 called before the return value from protect_from_queue is used.
1709 Expansion of the CALL_EXPR below will call emit_queue before
1710 we are finished emitting RTL for argument setup. So if we are
1711 not careful we could get the wrong value for an argument.
1713 To avoid this problem we go ahead and emit code to copy X, Y &
1714 SIZE into new pseudos. We can then place those new pseudos
1715 into an RTL_EXPR and use them later, even after a call to
1716 emit_queue.
1718 Note this is not strictly needed for library calls since they
1719 do not call emit_queue before loading their arguments. However,
1720 we may need to have library calls call emit_queue in the future
1721 since failing to do so could cause problems for targets which
1722 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1723 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1724 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1726 #ifdef TARGET_MEM_FUNCTIONS
1727 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1728 #else
1729 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1730 TREE_UNSIGNED (integer_type_node));
1731 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1732 #endif
1734 #ifdef TARGET_MEM_FUNCTIONS
1735 /* It is incorrect to use the libcall calling conventions to call
1736 memcpy in this context.
1738 This could be a user call to memcpy and the user may wish to
1739 examine the return value from memcpy.
1741 For targets where libcalls and normal calls have different conventions
1742 for returning pointers, we could end up generating incorrect code.
1744 So instead of using a libcall sequence we build up a suitable
1745 CALL_EXPR and expand the call in the normal fashion. */
1746 if (fn == NULL_TREE)
1748 tree fntype;
1750 /* This was copied from except.c, I don't know if all this is
1751 necessary in this context or not. */
1752 fn = get_identifier ("memcpy");
1753 push_obstacks_nochange ();
1754 end_temporary_allocation ();
1755 fntype = build_pointer_type (void_type_node);
1756 fntype = build_function_type (fntype, NULL_TREE);
1757 fn = build_decl (FUNCTION_DECL, fn, fntype);
1758 ggc_add_tree_root (&fn, 1);
1759 DECL_EXTERNAL (fn) = 1;
1760 TREE_PUBLIC (fn) = 1;
1761 DECL_ARTIFICIAL (fn) = 1;
1762 make_decl_rtl (fn, NULL_PTR, 1);
1763 assemble_external (fn);
1764 pop_obstacks ();
1767 /* We need to make an argument list for the function call.
1769 memcpy has three arguments, the first two are void * addresses and
1770 the last is a size_t byte count for the copy. */
1771 arg_list
1772 = build_tree_list (NULL_TREE,
1773 make_tree (build_pointer_type (void_type_node), x));
1774 TREE_CHAIN (arg_list)
1775 = build_tree_list (NULL_TREE,
1776 make_tree (build_pointer_type (void_type_node), y));
1777 TREE_CHAIN (TREE_CHAIN (arg_list))
1778 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1779 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1781 /* Now we have to build up the CALL_EXPR itself. */
1782 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1783 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1784 call_expr, arg_list, NULL_TREE);
1785 TREE_SIDE_EFFECTS (call_expr) = 1;
1787 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1788 #else
1789 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1790 VOIDmode, 3, y, Pmode, x, Pmode,
1791 convert_to_mode (TYPE_MODE (integer_type_node), size,
1792 TREE_UNSIGNED (integer_type_node)),
1793 TYPE_MODE (integer_type_node));
1794 #endif
1797 return retval;
1800 /* Copy all or part of a value X into registers starting at REGNO.
1801 The number of registers to be filled is NREGS. */
1803 void
1804 move_block_to_reg (regno, x, nregs, mode)
1805 int regno;
1806 rtx x;
1807 int nregs;
1808 enum machine_mode mode;
1810 int i;
1811 #ifdef HAVE_load_multiple
1812 rtx pat;
1813 rtx last;
1814 #endif
1816 if (nregs == 0)
1817 return;
1819 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1820 x = validize_mem (force_const_mem (mode, x));
1822 /* See if the machine can do this with a load multiple insn. */
1823 #ifdef HAVE_load_multiple
1824 if (HAVE_load_multiple)
1826 last = get_last_insn ();
1827 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1828 GEN_INT (nregs));
1829 if (pat)
1831 emit_insn (pat);
1832 return;
1834 else
1835 delete_insns_since (last);
1837 #endif
1839 for (i = 0; i < nregs; i++)
1840 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1841 operand_subword_force (x, i, mode));
1844 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1845 The number of registers to be filled is NREGS. SIZE indicates the number
1846 of bytes in the object X. */
1848 void
1849 move_block_from_reg (regno, x, nregs, size)
1850 int regno;
1851 rtx x;
1852 int nregs;
1853 int size;
1855 int i;
1856 #ifdef HAVE_store_multiple
1857 rtx pat;
1858 rtx last;
1859 #endif
1860 enum machine_mode mode;
1862 /* If SIZE is that of a mode no bigger than a word, just use that
1863 mode's store operation. */
1864 if (size <= UNITS_PER_WORD
1865 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1867 emit_move_insn (change_address (x, mode, NULL),
1868 gen_rtx_REG (mode, regno));
1869 return;
1872 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1873 to the left before storing to memory. Note that the previous test
1874 doesn't handle all cases (e.g. SIZE == 3). */
1875 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1877 rtx tem = operand_subword (x, 0, 1, BLKmode);
1878 rtx shift;
1880 if (tem == 0)
1881 abort ();
1883 shift = expand_shift (LSHIFT_EXPR, word_mode,
1884 gen_rtx_REG (word_mode, regno),
1885 build_int_2 ((UNITS_PER_WORD - size)
1886 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1887 emit_move_insn (tem, shift);
1888 return;
1891 /* See if the machine can do this with a store multiple insn. */
1892 #ifdef HAVE_store_multiple
1893 if (HAVE_store_multiple)
1895 last = get_last_insn ();
1896 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1897 GEN_INT (nregs));
1898 if (pat)
1900 emit_insn (pat);
1901 return;
1903 else
1904 delete_insns_since (last);
1906 #endif
1908 for (i = 0; i < nregs; i++)
1910 rtx tem = operand_subword (x, i, 1, BLKmode);
1912 if (tem == 0)
1913 abort ();
1915 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1919 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1920 registers represented by a PARALLEL. SSIZE represents the total size of
1921 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1922 SRC in bits. */
1923 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1924 the balance will be in what would be the low-order memory addresses, i.e.
1925 left justified for big endian, right justified for little endian. This
1926 happens to be true for the targets currently using this support. If this
1927 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1928 would be needed. */
1930 void
1931 emit_group_load (dst, orig_src, ssize, align)
1932 rtx dst, orig_src;
1933 unsigned int align;
1934 int ssize;
1936 rtx *tmps, src;
1937 int start, i;
1939 if (GET_CODE (dst) != PARALLEL)
1940 abort ();
1942 /* Check for a NULL entry, used to indicate that the parameter goes
1943 both on the stack and in registers. */
1944 if (XEXP (XVECEXP (dst, 0, 0), 0))
1945 start = 0;
1946 else
1947 start = 1;
1949 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1951 /* If we won't be loading directly from memory, protect the real source
1952 from strange tricks we might play. */
1953 src = orig_src;
1954 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1956 if (GET_MODE (src) == VOIDmode)
1957 src = gen_reg_rtx (GET_MODE (dst));
1958 else
1959 src = gen_reg_rtx (GET_MODE (orig_src));
1960 emit_move_insn (src, orig_src);
1963 /* Process the pieces. */
1964 for (i = start; i < XVECLEN (dst, 0); i++)
1966 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1967 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1968 unsigned int bytelen = GET_MODE_SIZE (mode);
1969 int shift = 0;
1971 /* Handle trailing fragments that run over the size of the struct. */
1972 if (ssize >= 0 && bytepos + bytelen > ssize)
1974 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1975 bytelen = ssize - bytepos;
1976 if (bytelen <= 0)
1977 abort ();
1980 /* Optimize the access just a bit. */
1981 if (GET_CODE (src) == MEM
1982 && align >= GET_MODE_ALIGNMENT (mode)
1983 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1984 && bytelen == GET_MODE_SIZE (mode))
1986 tmps[i] = gen_reg_rtx (mode);
1987 emit_move_insn (tmps[i],
1988 change_address (src, mode,
1989 plus_constant (XEXP (src, 0),
1990 bytepos)));
1992 else if (GET_CODE (src) == CONCAT)
1994 if (bytepos == 0
1995 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1996 tmps[i] = XEXP (src, 0);
1997 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1998 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1999 tmps[i] = XEXP (src, 1);
2000 else
2001 abort ();
2003 else if ((CONSTANT_P (src)
2004 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2005 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2006 tmps[i] = src;
2007 else
2008 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2009 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2010 mode, mode, align, ssize);
2012 if (BYTES_BIG_ENDIAN && shift)
2013 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2014 tmps[i], 0, OPTAB_WIDEN);
2017 emit_queue ();
2019 /* Copy the extracted pieces into the proper (probable) hard regs. */
2020 for (i = start; i < XVECLEN (dst, 0); i++)
2021 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2024 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2025 registers represented by a PARALLEL. SSIZE represents the total size of
2026 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2028 void
2029 emit_group_store (orig_dst, src, ssize, align)
2030 rtx orig_dst, src;
2031 int ssize;
2032 unsigned int align;
2034 rtx *tmps, dst;
2035 int start, i;
2037 if (GET_CODE (src) != PARALLEL)
2038 abort ();
2040 /* Check for a NULL entry, used to indicate that the parameter goes
2041 both on the stack and in registers. */
2042 if (XEXP (XVECEXP (src, 0, 0), 0))
2043 start = 0;
2044 else
2045 start = 1;
2047 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2049 /* Copy the (probable) hard regs into pseudos. */
2050 for (i = start; i < XVECLEN (src, 0); i++)
2052 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2053 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2054 emit_move_insn (tmps[i], reg);
2056 emit_queue ();
2058 /* If we won't be storing directly into memory, protect the real destination
2059 from strange tricks we might play. */
2060 dst = orig_dst;
2061 if (GET_CODE (dst) == PARALLEL)
2063 rtx temp;
2065 /* We can get a PARALLEL dst if there is a conditional expression in
2066 a return statement. In that case, the dst and src are the same,
2067 so no action is necessary. */
2068 if (rtx_equal_p (dst, src))
2069 return;
2071 /* It is unclear if we can ever reach here, but we may as well handle
2072 it. Allocate a temporary, and split this into a store/load to/from
2073 the temporary. */
2075 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2076 emit_group_store (temp, src, ssize, align);
2077 emit_group_load (dst, temp, ssize, align);
2078 return;
2080 else if (GET_CODE (dst) != MEM)
2082 dst = gen_reg_rtx (GET_MODE (orig_dst));
2083 /* Make life a bit easier for combine. */
2084 emit_move_insn (dst, const0_rtx);
2087 /* Process the pieces. */
2088 for (i = start; i < XVECLEN (src, 0); i++)
2090 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2091 enum machine_mode mode = GET_MODE (tmps[i]);
2092 unsigned int bytelen = GET_MODE_SIZE (mode);
2094 /* Handle trailing fragments that run over the size of the struct. */
2095 if (ssize >= 0 && bytepos + bytelen > ssize)
2097 if (BYTES_BIG_ENDIAN)
2099 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2100 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2101 tmps[i], 0, OPTAB_WIDEN);
2103 bytelen = ssize - bytepos;
2106 /* Optimize the access just a bit. */
2107 if (GET_CODE (dst) == MEM
2108 && align >= GET_MODE_ALIGNMENT (mode)
2109 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2110 && bytelen == GET_MODE_SIZE (mode))
2111 emit_move_insn (change_address (dst, mode,
2112 plus_constant (XEXP (dst, 0),
2113 bytepos)),
2114 tmps[i]);
2115 else
2116 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2117 mode, tmps[i], align, ssize);
2120 emit_queue ();
2122 /* Copy from the pseudo into the (probable) hard reg. */
2123 if (GET_CODE (dst) == REG)
2124 emit_move_insn (orig_dst, dst);
2127 /* Generate code to copy a BLKmode object of TYPE out of a
2128 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2129 is null, a stack temporary is created. TGTBLK is returned.
2131 The primary purpose of this routine is to handle functions
2132 that return BLKmode structures in registers. Some machines
2133 (the PA for example) want to return all small structures
2134 in registers regardless of the structure's alignment. */
2137 copy_blkmode_from_reg (tgtblk, srcreg, type)
2138 rtx tgtblk;
2139 rtx srcreg;
2140 tree type;
2142 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2143 rtx src = NULL, dst = NULL;
2144 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2145 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2147 if (tgtblk == 0)
2149 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2150 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2151 preserve_temp_slots (tgtblk);
2154 /* This code assumes srcreg is at least a full word. If it isn't,
2155 copy it into a new pseudo which is a full word. */
2156 if (GET_MODE (srcreg) != BLKmode
2157 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2158 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2160 /* Structures whose size is not a multiple of a word are aligned
2161 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2162 machine, this means we must skip the empty high order bytes when
2163 calculating the bit offset. */
2164 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2165 big_endian_correction
2166 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2168 /* Copy the structure BITSIZE bites at a time.
2170 We could probably emit more efficient code for machines which do not use
2171 strict alignment, but it doesn't seem worth the effort at the current
2172 time. */
2173 for (bitpos = 0, xbitpos = big_endian_correction;
2174 bitpos < bytes * BITS_PER_UNIT;
2175 bitpos += bitsize, xbitpos += bitsize)
2177 /* We need a new source operand each time xbitpos is on a
2178 word boundary and when xbitpos == big_endian_correction
2179 (the first time through). */
2180 if (xbitpos % BITS_PER_WORD == 0
2181 || xbitpos == big_endian_correction)
2182 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2184 /* We need a new destination operand each time bitpos is on
2185 a word boundary. */
2186 if (bitpos % BITS_PER_WORD == 0)
2187 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2189 /* Use xbitpos for the source extraction (right justified) and
2190 xbitpos for the destination store (left justified). */
2191 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2192 extract_bit_field (src, bitsize,
2193 xbitpos % BITS_PER_WORD, 1,
2194 NULL_RTX, word_mode, word_mode,
2195 bitsize, BITS_PER_WORD),
2196 bitsize, BITS_PER_WORD);
2199 return tgtblk;
2202 /* Add a USE expression for REG to the (possibly empty) list pointed
2203 to by CALL_FUSAGE. REG must denote a hard register. */
2205 void
2206 use_reg (call_fusage, reg)
2207 rtx *call_fusage, reg;
2209 if (GET_CODE (reg) != REG
2210 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2211 abort ();
2213 *call_fusage
2214 = gen_rtx_EXPR_LIST (VOIDmode,
2215 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2218 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2219 starting at REGNO. All of these registers must be hard registers. */
2221 void
2222 use_regs (call_fusage, regno, nregs)
2223 rtx *call_fusage;
2224 int regno;
2225 int nregs;
2227 int i;
2229 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2230 abort ();
2232 for (i = 0; i < nregs; i++)
2233 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2236 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2237 PARALLEL REGS. This is for calls that pass values in multiple
2238 non-contiguous locations. The Irix 6 ABI has examples of this. */
2240 void
2241 use_group_regs (call_fusage, regs)
2242 rtx *call_fusage;
2243 rtx regs;
2245 int i;
2247 for (i = 0; i < XVECLEN (regs, 0); i++)
2249 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2251 /* A NULL entry means the parameter goes both on the stack and in
2252 registers. This can also be a MEM for targets that pass values
2253 partially on the stack and partially in registers. */
2254 if (reg != 0 && GET_CODE (reg) == REG)
2255 use_reg (call_fusage, reg);
2259 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2260 rtx with BLKmode). The caller must pass TO through protect_from_queue
2261 before calling. ALIGN is maximum alignment we can assume. */
2263 static void
2264 clear_by_pieces (to, len, align)
2265 rtx to;
2266 unsigned HOST_WIDE_INT len;
2267 unsigned int align;
2269 struct clear_by_pieces data;
2270 rtx to_addr = XEXP (to, 0);
2271 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2272 enum machine_mode mode = VOIDmode, tmode;
2273 enum insn_code icode;
2275 data.offset = 0;
2276 data.to_addr = to_addr;
2277 data.to = to;
2278 data.autinc_to
2279 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2280 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2282 data.explicit_inc_to = 0;
2283 data.reverse
2284 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2285 if (data.reverse)
2286 data.offset = len;
2287 data.len = len;
2289 /* If copying requires more than two move insns,
2290 copy addresses to registers (to make displacements shorter)
2291 and use post-increment if available. */
2292 if (!data.autinc_to
2293 && move_by_pieces_ninsns (len, align) > 2)
2295 /* Determine the main mode we'll be using. */
2296 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2297 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2298 if (GET_MODE_SIZE (tmode) < max_size)
2299 mode = tmode;
2301 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2303 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2304 data.autinc_to = 1;
2305 data.explicit_inc_to = -1;
2308 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2309 && ! data.autinc_to)
2311 data.to_addr = copy_addr_to_reg (to_addr);
2312 data.autinc_to = 1;
2313 data.explicit_inc_to = 1;
2316 if ( !data.autinc_to && CONSTANT_P (to_addr))
2317 data.to_addr = copy_addr_to_reg (to_addr);
2320 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2321 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2322 align = MOVE_MAX * BITS_PER_UNIT;
2324 /* First move what we can in the largest integer mode, then go to
2325 successively smaller modes. */
2327 while (max_size > 1)
2329 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2330 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2331 if (GET_MODE_SIZE (tmode) < max_size)
2332 mode = tmode;
2334 if (mode == VOIDmode)
2335 break;
2337 icode = mov_optab->handlers[(int) mode].insn_code;
2338 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2339 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2341 max_size = GET_MODE_SIZE (mode);
2344 /* The code above should have handled everything. */
2345 if (data.len != 0)
2346 abort ();
2349 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2350 with move instructions for mode MODE. GENFUN is the gen_... function
2351 to make a move insn for that mode. DATA has all the other info. */
2353 static void
2354 clear_by_pieces_1 (genfun, mode, data)
2355 rtx (*genfun) PARAMS ((rtx, ...));
2356 enum machine_mode mode;
2357 struct clear_by_pieces *data;
2359 unsigned int size = GET_MODE_SIZE (mode);
2360 rtx to1;
2362 while (data->len >= size)
2364 if (data->reverse)
2365 data->offset -= size;
2367 if (data->autinc_to)
2369 to1 = gen_rtx_MEM (mode, data->to_addr);
2370 MEM_COPY_ATTRIBUTES (to1, data->to);
2372 else
2373 to1 = change_address (data->to, mode,
2374 plus_constant (data->to_addr, data->offset));
2376 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2377 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2379 emit_insn ((*genfun) (to1, const0_rtx));
2381 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2382 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2384 if (! data->reverse)
2385 data->offset += size;
2387 data->len -= size;
2391 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2392 its length in bytes and ALIGN is the maximum alignment we can is has.
2394 If we call a function that returns the length of the block, return it. */
2397 clear_storage (object, size, align)
2398 rtx object;
2399 rtx size;
2400 unsigned int align;
2402 #ifdef TARGET_MEM_FUNCTIONS
2403 static tree fn;
2404 tree call_expr, arg_list;
2405 #endif
2406 rtx retval = 0;
2408 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2409 just move a zero. Otherwise, do this a piece at a time. */
2410 if (GET_MODE (object) != BLKmode
2411 && GET_CODE (size) == CONST_INT
2412 && GET_MODE_SIZE (GET_MODE (object)) == INTVAL (size))
2413 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2414 else
2416 object = protect_from_queue (object, 1);
2417 size = protect_from_queue (size, 0);
2419 if (GET_CODE (size) == CONST_INT
2420 && MOVE_BY_PIECES_P (INTVAL (size), align))
2421 clear_by_pieces (object, INTVAL (size), align);
2422 else
2424 /* Try the most limited insn first, because there's no point
2425 including more than one in the machine description unless
2426 the more limited one has some advantage. */
2428 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2429 enum machine_mode mode;
2431 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2432 mode = GET_MODE_WIDER_MODE (mode))
2434 enum insn_code code = clrstr_optab[(int) mode];
2435 insn_operand_predicate_fn pred;
2437 if (code != CODE_FOR_nothing
2438 /* We don't need MODE to be narrower than
2439 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2440 the mode mask, as it is returned by the macro, it will
2441 definitely be less than the actual mode mask. */
2442 && ((GET_CODE (size) == CONST_INT
2443 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2444 <= (GET_MODE_MASK (mode) >> 1)))
2445 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2446 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2447 || (*pred) (object, BLKmode))
2448 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2449 || (*pred) (opalign, VOIDmode)))
2451 rtx op1;
2452 rtx last = get_last_insn ();
2453 rtx pat;
2455 op1 = convert_to_mode (mode, size, 1);
2456 pred = insn_data[(int) code].operand[1].predicate;
2457 if (pred != 0 && ! (*pred) (op1, mode))
2458 op1 = copy_to_mode_reg (mode, op1);
2460 pat = GEN_FCN ((int) code) (object, op1, opalign);
2461 if (pat)
2463 emit_insn (pat);
2464 return 0;
2466 else
2467 delete_insns_since (last);
2471 /* OBJECT or SIZE may have been passed through protect_from_queue.
2473 It is unsafe to save the value generated by protect_from_queue
2474 and reuse it later. Consider what happens if emit_queue is
2475 called before the return value from protect_from_queue is used.
2477 Expansion of the CALL_EXPR below will call emit_queue before
2478 we are finished emitting RTL for argument setup. So if we are
2479 not careful we could get the wrong value for an argument.
2481 To avoid this problem we go ahead and emit code to copy OBJECT
2482 and SIZE into new pseudos. We can then place those new pseudos
2483 into an RTL_EXPR and use them later, even after a call to
2484 emit_queue.
2486 Note this is not strictly needed for library calls since they
2487 do not call emit_queue before loading their arguments. However,
2488 we may need to have library calls call emit_queue in the future
2489 since failing to do so could cause problems for targets which
2490 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2491 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2493 #ifdef TARGET_MEM_FUNCTIONS
2494 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2495 #else
2496 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2497 TREE_UNSIGNED (integer_type_node));
2498 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2499 #endif
2501 #ifdef TARGET_MEM_FUNCTIONS
2502 /* It is incorrect to use the libcall calling conventions to call
2503 memset in this context.
2505 This could be a user call to memset and the user may wish to
2506 examine the return value from memset.
2508 For targets where libcalls and normal calls have different
2509 conventions for returning pointers, we could end up generating
2510 incorrect code.
2512 So instead of using a libcall sequence we build up a suitable
2513 CALL_EXPR and expand the call in the normal fashion. */
2514 if (fn == NULL_TREE)
2516 tree fntype;
2518 /* This was copied from except.c, I don't know if all this is
2519 necessary in this context or not. */
2520 fn = get_identifier ("memset");
2521 push_obstacks_nochange ();
2522 end_temporary_allocation ();
2523 fntype = build_pointer_type (void_type_node);
2524 fntype = build_function_type (fntype, NULL_TREE);
2525 fn = build_decl (FUNCTION_DECL, fn, fntype);
2526 ggc_add_tree_root (&fn, 1);
2527 DECL_EXTERNAL (fn) = 1;
2528 TREE_PUBLIC (fn) = 1;
2529 DECL_ARTIFICIAL (fn) = 1;
2530 make_decl_rtl (fn, NULL_PTR, 1);
2531 assemble_external (fn);
2532 pop_obstacks ();
2535 /* We need to make an argument list for the function call.
2537 memset has three arguments, the first is a void * addresses, the
2538 second a integer with the initialization value, the last is a
2539 size_t byte count for the copy. */
2540 arg_list
2541 = build_tree_list (NULL_TREE,
2542 make_tree (build_pointer_type (void_type_node),
2543 object));
2544 TREE_CHAIN (arg_list)
2545 = build_tree_list (NULL_TREE,
2546 make_tree (integer_type_node, const0_rtx));
2547 TREE_CHAIN (TREE_CHAIN (arg_list))
2548 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2549 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2551 /* Now we have to build up the CALL_EXPR itself. */
2552 call_expr = build1 (ADDR_EXPR,
2553 build_pointer_type (TREE_TYPE (fn)), fn);
2554 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2555 call_expr, arg_list, NULL_TREE);
2556 TREE_SIDE_EFFECTS (call_expr) = 1;
2558 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2559 #else
2560 emit_library_call (bzero_libfunc, LCT_NORMAL,
2561 VOIDmode, 2, object, Pmode, size,
2562 TYPE_MODE (integer_type_node));
2563 #endif
2567 return retval;
2570 /* Generate code to copy Y into X.
2571 Both Y and X must have the same mode, except that
2572 Y can be a constant with VOIDmode.
2573 This mode cannot be BLKmode; use emit_block_move for that.
2575 Return the last instruction emitted. */
2578 emit_move_insn (x, y)
2579 rtx x, y;
2581 enum machine_mode mode = GET_MODE (x);
2583 x = protect_from_queue (x, 1);
2584 y = protect_from_queue (y, 0);
2586 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2587 abort ();
2589 /* Never force constant_p_rtx to memory. */
2590 if (GET_CODE (y) == CONSTANT_P_RTX)
2592 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2593 y = force_const_mem (mode, y);
2595 /* If X or Y are memory references, verify that their addresses are valid
2596 for the machine. */
2597 if (GET_CODE (x) == MEM
2598 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2599 && ! push_operand (x, GET_MODE (x)))
2600 || (flag_force_addr
2601 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2602 x = change_address (x, VOIDmode, XEXP (x, 0));
2604 if (GET_CODE (y) == MEM
2605 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2606 || (flag_force_addr
2607 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2608 y = change_address (y, VOIDmode, XEXP (y, 0));
2610 if (mode == BLKmode)
2611 abort ();
2613 return emit_move_insn_1 (x, y);
2616 /* Low level part of emit_move_insn.
2617 Called just like emit_move_insn, but assumes X and Y
2618 are basically valid. */
2621 emit_move_insn_1 (x, y)
2622 rtx x, y;
2624 enum machine_mode mode = GET_MODE (x);
2625 enum machine_mode submode;
2626 enum mode_class class = GET_MODE_CLASS (mode);
2627 unsigned int i;
2629 if (mode >= MAX_MACHINE_MODE)
2630 abort ();
2632 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2633 return
2634 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2636 /* Expand complex moves by moving real part and imag part, if possible. */
2637 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2638 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2639 * BITS_PER_UNIT),
2640 (class == MODE_COMPLEX_INT
2641 ? MODE_INT : MODE_FLOAT),
2643 && (mov_optab->handlers[(int) submode].insn_code
2644 != CODE_FOR_nothing))
2646 /* Don't split destination if it is a stack push. */
2647 int stack = push_operand (x, GET_MODE (x));
2649 /* If this is a stack, push the highpart first, so it
2650 will be in the argument order.
2652 In that case, change_address is used only to convert
2653 the mode, not to change the address. */
2654 if (stack)
2656 /* Note that the real part always precedes the imag part in memory
2657 regardless of machine's endianness. */
2658 #ifdef STACK_GROWS_DOWNWARD
2659 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2660 (gen_rtx_MEM (submode, XEXP (x, 0)),
2661 gen_imagpart (submode, y)));
2662 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2663 (gen_rtx_MEM (submode, XEXP (x, 0)),
2664 gen_realpart (submode, y)));
2665 #else
2666 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2667 (gen_rtx_MEM (submode, XEXP (x, 0)),
2668 gen_realpart (submode, y)));
2669 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2670 (gen_rtx_MEM (submode, XEXP (x, 0)),
2671 gen_imagpart (submode, y)));
2672 #endif
2674 else
2676 rtx realpart_x, realpart_y;
2677 rtx imagpart_x, imagpart_y;
2679 /* If this is a complex value with each part being smaller than a
2680 word, the usual calling sequence will likely pack the pieces into
2681 a single register. Unfortunately, SUBREG of hard registers only
2682 deals in terms of words, so we have a problem converting input
2683 arguments to the CONCAT of two registers that is used elsewhere
2684 for complex values. If this is before reload, we can copy it into
2685 memory and reload. FIXME, we should see about using extract and
2686 insert on integer registers, but complex short and complex char
2687 variables should be rarely used. */
2688 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2689 && (reload_in_progress | reload_completed) == 0)
2691 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2692 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2694 if (packed_dest_p || packed_src_p)
2696 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2697 ? MODE_FLOAT : MODE_INT);
2699 enum machine_mode reg_mode =
2700 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2702 if (reg_mode != BLKmode)
2704 rtx mem = assign_stack_temp (reg_mode,
2705 GET_MODE_SIZE (mode), 0);
2707 rtx cmem = change_address (mem, mode, NULL_RTX);
2709 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2711 if (packed_dest_p)
2713 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2714 emit_move_insn_1 (cmem, y);
2715 return emit_move_insn_1 (sreg, mem);
2717 else
2719 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2720 emit_move_insn_1 (mem, sreg);
2721 return emit_move_insn_1 (x, cmem);
2727 realpart_x = gen_realpart (submode, x);
2728 realpart_y = gen_realpart (submode, y);
2729 imagpart_x = gen_imagpart (submode, x);
2730 imagpart_y = gen_imagpart (submode, y);
2732 /* Show the output dies here. This is necessary for SUBREGs
2733 of pseudos since we cannot track their lifetimes correctly;
2734 hard regs shouldn't appear here except as return values.
2735 We never want to emit such a clobber after reload. */
2736 if (x != y
2737 && ! (reload_in_progress || reload_completed)
2738 && (GET_CODE (realpart_x) == SUBREG
2739 || GET_CODE (imagpart_x) == SUBREG))
2741 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2744 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2745 (realpart_x, realpart_y));
2746 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2747 (imagpart_x, imagpart_y));
2750 return get_last_insn ();
2753 /* This will handle any multi-word mode that lacks a move_insn pattern.
2754 However, you will get better code if you define such patterns,
2755 even if they must turn into multiple assembler instructions. */
2756 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2758 rtx last_insn = 0;
2759 rtx seq, inner;
2760 int need_clobber;
2762 #ifdef PUSH_ROUNDING
2764 /* If X is a push on the stack, do the push now and replace
2765 X with a reference to the stack pointer. */
2766 if (push_operand (x, GET_MODE (x)))
2768 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2769 x = change_address (x, VOIDmode, stack_pointer_rtx);
2771 #endif
2773 /* If we are in reload, see if either operand is a MEM whose address
2774 is scheduled for replacement. */
2775 if (reload_in_progress && GET_CODE (x) == MEM
2776 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2778 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2780 MEM_COPY_ATTRIBUTES (new, x);
2781 x = new;
2783 if (reload_in_progress && GET_CODE (y) == MEM
2784 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2786 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2788 MEM_COPY_ATTRIBUTES (new, y);
2789 y = new;
2792 start_sequence ();
2794 need_clobber = 0;
2795 for (i = 0;
2796 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2797 i++)
2799 rtx xpart = operand_subword (x, i, 1, mode);
2800 rtx ypart = operand_subword (y, i, 1, mode);
2802 /* If we can't get a part of Y, put Y into memory if it is a
2803 constant. Otherwise, force it into a register. If we still
2804 can't get a part of Y, abort. */
2805 if (ypart == 0 && CONSTANT_P (y))
2807 y = force_const_mem (mode, y);
2808 ypart = operand_subword (y, i, 1, mode);
2810 else if (ypart == 0)
2811 ypart = operand_subword_force (y, i, mode);
2813 if (xpart == 0 || ypart == 0)
2814 abort ();
2816 need_clobber |= (GET_CODE (xpart) == SUBREG);
2818 last_insn = emit_move_insn (xpart, ypart);
2821 seq = gen_sequence ();
2822 end_sequence ();
2824 /* Show the output dies here. This is necessary for SUBREGs
2825 of pseudos since we cannot track their lifetimes correctly;
2826 hard regs shouldn't appear here except as return values.
2827 We never want to emit such a clobber after reload. */
2828 if (x != y
2829 && ! (reload_in_progress || reload_completed)
2830 && need_clobber != 0)
2832 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2835 emit_insn (seq);
2837 return last_insn;
2839 else
2840 abort ();
2843 /* Pushing data onto the stack. */
2845 /* Push a block of length SIZE (perhaps variable)
2846 and return an rtx to address the beginning of the block.
2847 Note that it is not possible for the value returned to be a QUEUED.
2848 The value may be virtual_outgoing_args_rtx.
2850 EXTRA is the number of bytes of padding to push in addition to SIZE.
2851 BELOW nonzero means this padding comes at low addresses;
2852 otherwise, the padding comes at high addresses. */
2855 push_block (size, extra, below)
2856 rtx size;
2857 int extra, below;
2859 register rtx temp;
2861 size = convert_modes (Pmode, ptr_mode, size, 1);
2862 if (CONSTANT_P (size))
2863 anti_adjust_stack (plus_constant (size, extra));
2864 else if (GET_CODE (size) == REG && extra == 0)
2865 anti_adjust_stack (size);
2866 else
2868 temp = copy_to_mode_reg (Pmode, size);
2869 if (extra != 0)
2870 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2871 temp, 0, OPTAB_LIB_WIDEN);
2872 anti_adjust_stack (temp);
2875 #ifndef STACK_GROWS_DOWNWARD
2876 #ifdef ARGS_GROW_DOWNWARD
2877 if (!ACCUMULATE_OUTGOING_ARGS)
2878 #else
2879 if (0)
2880 #endif
2881 #else
2882 if (1)
2883 #endif
2885 /* Return the lowest stack address when STACK or ARGS grow downward and
2886 we are not aaccumulating outgoing arguments (the c4x port uses such
2887 conventions). */
2888 temp = virtual_outgoing_args_rtx;
2889 if (extra != 0 && below)
2890 temp = plus_constant (temp, extra);
2892 else
2894 if (GET_CODE (size) == CONST_INT)
2895 temp = plus_constant (virtual_outgoing_args_rtx,
2896 -INTVAL (size) - (below ? 0 : extra));
2897 else if (extra != 0 && !below)
2898 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2899 negate_rtx (Pmode, plus_constant (size, extra)));
2900 else
2901 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2902 negate_rtx (Pmode, size));
2905 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2909 gen_push_operand ()
2911 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2914 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2915 block of SIZE bytes. */
2917 static rtx
2918 get_push_address (size)
2919 int size;
2921 register rtx temp;
2923 if (STACK_PUSH_CODE == POST_DEC)
2924 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2925 else if (STACK_PUSH_CODE == POST_INC)
2926 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2927 else
2928 temp = stack_pointer_rtx;
2930 return copy_to_reg (temp);
2933 /* Generate code to push X onto the stack, assuming it has mode MODE and
2934 type TYPE.
2935 MODE is redundant except when X is a CONST_INT (since they don't
2936 carry mode info).
2937 SIZE is an rtx for the size of data to be copied (in bytes),
2938 needed only if X is BLKmode.
2940 ALIGN is maximum alignment we can assume.
2942 If PARTIAL and REG are both nonzero, then copy that many of the first
2943 words of X into registers starting with REG, and push the rest of X.
2944 The amount of space pushed is decreased by PARTIAL words,
2945 rounded *down* to a multiple of PARM_BOUNDARY.
2946 REG must be a hard register in this case.
2947 If REG is zero but PARTIAL is not, take any all others actions for an
2948 argument partially in registers, but do not actually load any
2949 registers.
2951 EXTRA is the amount in bytes of extra space to leave next to this arg.
2952 This is ignored if an argument block has already been allocated.
2954 On a machine that lacks real push insns, ARGS_ADDR is the address of
2955 the bottom of the argument block for this call. We use indexing off there
2956 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2957 argument block has not been preallocated.
2959 ARGS_SO_FAR is the size of args previously pushed for this call.
2961 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2962 for arguments passed in registers. If nonzero, it will be the number
2963 of bytes required. */
2965 void
2966 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2967 args_addr, args_so_far, reg_parm_stack_space,
2968 alignment_pad)
2969 register rtx x;
2970 enum machine_mode mode;
2971 tree type;
2972 rtx size;
2973 unsigned int align;
2974 int partial;
2975 rtx reg;
2976 int extra;
2977 rtx args_addr;
2978 rtx args_so_far;
2979 int reg_parm_stack_space;
2980 rtx alignment_pad;
2982 rtx xinner;
2983 enum direction stack_direction
2984 #ifdef STACK_GROWS_DOWNWARD
2985 = downward;
2986 #else
2987 = upward;
2988 #endif
2990 /* Decide where to pad the argument: `downward' for below,
2991 `upward' for above, or `none' for don't pad it.
2992 Default is below for small data on big-endian machines; else above. */
2993 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2995 /* Invert direction if stack is post-update. */
2996 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2997 if (where_pad != none)
2998 where_pad = (where_pad == downward ? upward : downward);
3000 xinner = x = protect_from_queue (x, 0);
3002 if (mode == BLKmode)
3004 /* Copy a block into the stack, entirely or partially. */
3006 register rtx temp;
3007 int used = partial * UNITS_PER_WORD;
3008 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3009 int skip;
3011 if (size == 0)
3012 abort ();
3014 used -= offset;
3016 /* USED is now the # of bytes we need not copy to the stack
3017 because registers will take care of them. */
3019 if (partial != 0)
3020 xinner = change_address (xinner, BLKmode,
3021 plus_constant (XEXP (xinner, 0), used));
3023 /* If the partial register-part of the arg counts in its stack size,
3024 skip the part of stack space corresponding to the registers.
3025 Otherwise, start copying to the beginning of the stack space,
3026 by setting SKIP to 0. */
3027 skip = (reg_parm_stack_space == 0) ? 0 : used;
3029 #ifdef PUSH_ROUNDING
3030 /* Do it with several push insns if that doesn't take lots of insns
3031 and if there is no difficulty with push insns that skip bytes
3032 on the stack for alignment purposes. */
3033 if (args_addr == 0
3034 && PUSH_ARGS
3035 && GET_CODE (size) == CONST_INT
3036 && skip == 0
3037 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3038 /* Here we avoid the case of a structure whose weak alignment
3039 forces many pushes of a small amount of data,
3040 and such small pushes do rounding that causes trouble. */
3041 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3042 || align >= BIGGEST_ALIGNMENT
3043 || PUSH_ROUNDING (align) == align)
3044 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3046 /* Push padding now if padding above and stack grows down,
3047 or if padding below and stack grows up.
3048 But if space already allocated, this has already been done. */
3049 if (extra && args_addr == 0
3050 && where_pad != none && where_pad != stack_direction)
3051 anti_adjust_stack (GEN_INT (extra));
3053 stack_pointer_delta += INTVAL (size) - used;
3054 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3055 INTVAL (size) - used, align);
3057 if (current_function_check_memory_usage && ! in_check_memory_usage)
3059 rtx temp;
3061 in_check_memory_usage = 1;
3062 temp = get_push_address (INTVAL (size) - used);
3063 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3064 emit_library_call (chkr_copy_bitmap_libfunc,
3065 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3066 Pmode, XEXP (xinner, 0), Pmode,
3067 GEN_INT (INTVAL (size) - used),
3068 TYPE_MODE (sizetype));
3069 else
3070 emit_library_call (chkr_set_right_libfunc,
3071 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3072 Pmode, GEN_INT (INTVAL (size) - used),
3073 TYPE_MODE (sizetype),
3074 GEN_INT (MEMORY_USE_RW),
3075 TYPE_MODE (integer_type_node));
3076 in_check_memory_usage = 0;
3079 else
3080 #endif /* PUSH_ROUNDING */
3082 rtx target;
3084 /* Otherwise make space on the stack and copy the data
3085 to the address of that space. */
3087 /* Deduct words put into registers from the size we must copy. */
3088 if (partial != 0)
3090 if (GET_CODE (size) == CONST_INT)
3091 size = GEN_INT (INTVAL (size) - used);
3092 else
3093 size = expand_binop (GET_MODE (size), sub_optab, size,
3094 GEN_INT (used), NULL_RTX, 0,
3095 OPTAB_LIB_WIDEN);
3098 /* Get the address of the stack space.
3099 In this case, we do not deal with EXTRA separately.
3100 A single stack adjust will do. */
3101 if (! args_addr)
3103 temp = push_block (size, extra, where_pad == downward);
3104 extra = 0;
3106 else if (GET_CODE (args_so_far) == CONST_INT)
3107 temp = memory_address (BLKmode,
3108 plus_constant (args_addr,
3109 skip + INTVAL (args_so_far)));
3110 else
3111 temp = memory_address (BLKmode,
3112 plus_constant (gen_rtx_PLUS (Pmode,
3113 args_addr,
3114 args_so_far),
3115 skip));
3116 if (current_function_check_memory_usage && ! in_check_memory_usage)
3118 in_check_memory_usage = 1;
3119 target = copy_to_reg (temp);
3120 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3121 emit_library_call (chkr_copy_bitmap_libfunc,
3122 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3123 target, Pmode,
3124 XEXP (xinner, 0), Pmode,
3125 size, TYPE_MODE (sizetype));
3126 else
3127 emit_library_call (chkr_set_right_libfunc,
3128 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3129 target, Pmode,
3130 size, TYPE_MODE (sizetype),
3131 GEN_INT (MEMORY_USE_RW),
3132 TYPE_MODE (integer_type_node));
3133 in_check_memory_usage = 0;
3136 target = gen_rtx_MEM (BLKmode, temp);
3138 if (type != 0)
3140 set_mem_attributes (target, type, 1);
3141 /* Function incoming arguments may overlap with sibling call
3142 outgoing arguments and we cannot allow reordering of reads
3143 from function arguments with stores to outgoing arguments
3144 of sibling calls. */
3145 MEM_ALIAS_SET (target) = 0;
3148 /* TEMP is the address of the block. Copy the data there. */
3149 if (GET_CODE (size) == CONST_INT
3150 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3152 move_by_pieces (target, xinner, INTVAL (size), align);
3153 goto ret;
3155 else
3157 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3158 enum machine_mode mode;
3160 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3161 mode != VOIDmode;
3162 mode = GET_MODE_WIDER_MODE (mode))
3164 enum insn_code code = movstr_optab[(int) mode];
3165 insn_operand_predicate_fn pred;
3167 if (code != CODE_FOR_nothing
3168 && ((GET_CODE (size) == CONST_INT
3169 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3170 <= (GET_MODE_MASK (mode) >> 1)))
3171 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3172 && (!(pred = insn_data[(int) code].operand[0].predicate)
3173 || ((*pred) (target, BLKmode)))
3174 && (!(pred = insn_data[(int) code].operand[1].predicate)
3175 || ((*pred) (xinner, BLKmode)))
3176 && (!(pred = insn_data[(int) code].operand[3].predicate)
3177 || ((*pred) (opalign, VOIDmode))))
3179 rtx op2 = convert_to_mode (mode, size, 1);
3180 rtx last = get_last_insn ();
3181 rtx pat;
3183 pred = insn_data[(int) code].operand[2].predicate;
3184 if (pred != 0 && ! (*pred) (op2, mode))
3185 op2 = copy_to_mode_reg (mode, op2);
3187 pat = GEN_FCN ((int) code) (target, xinner,
3188 op2, opalign);
3189 if (pat)
3191 emit_insn (pat);
3192 goto ret;
3194 else
3195 delete_insns_since (last);
3200 if (!ACCUMULATE_OUTGOING_ARGS)
3202 /* If the source is referenced relative to the stack pointer,
3203 copy it to another register to stabilize it. We do not need
3204 to do this if we know that we won't be changing sp. */
3206 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3207 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3208 temp = copy_to_reg (temp);
3211 /* Make inhibit_defer_pop nonzero around the library call
3212 to force it to pop the bcopy-arguments right away. */
3213 NO_DEFER_POP;
3214 #ifdef TARGET_MEM_FUNCTIONS
3215 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3216 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3217 convert_to_mode (TYPE_MODE (sizetype),
3218 size, TREE_UNSIGNED (sizetype)),
3219 TYPE_MODE (sizetype));
3220 #else
3221 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3222 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3223 convert_to_mode (TYPE_MODE (integer_type_node),
3224 size,
3225 TREE_UNSIGNED (integer_type_node)),
3226 TYPE_MODE (integer_type_node));
3227 #endif
3228 OK_DEFER_POP;
3231 else if (partial > 0)
3233 /* Scalar partly in registers. */
3235 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3236 int i;
3237 int not_stack;
3238 /* # words of start of argument
3239 that we must make space for but need not store. */
3240 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3241 int args_offset = INTVAL (args_so_far);
3242 int skip;
3244 /* Push padding now if padding above and stack grows down,
3245 or if padding below and stack grows up.
3246 But if space already allocated, this has already been done. */
3247 if (extra && args_addr == 0
3248 && where_pad != none && where_pad != stack_direction)
3249 anti_adjust_stack (GEN_INT (extra));
3251 /* If we make space by pushing it, we might as well push
3252 the real data. Otherwise, we can leave OFFSET nonzero
3253 and leave the space uninitialized. */
3254 if (args_addr == 0)
3255 offset = 0;
3257 /* Now NOT_STACK gets the number of words that we don't need to
3258 allocate on the stack. */
3259 not_stack = partial - offset;
3261 /* If the partial register-part of the arg counts in its stack size,
3262 skip the part of stack space corresponding to the registers.
3263 Otherwise, start copying to the beginning of the stack space,
3264 by setting SKIP to 0. */
3265 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3267 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3268 x = validize_mem (force_const_mem (mode, x));
3270 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3271 SUBREGs of such registers are not allowed. */
3272 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3273 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3274 x = copy_to_reg (x);
3276 /* Loop over all the words allocated on the stack for this arg. */
3277 /* We can do it by words, because any scalar bigger than a word
3278 has a size a multiple of a word. */
3279 #ifndef PUSH_ARGS_REVERSED
3280 for (i = not_stack; i < size; i++)
3281 #else
3282 for (i = size - 1; i >= not_stack; i--)
3283 #endif
3284 if (i >= not_stack + offset)
3285 emit_push_insn (operand_subword_force (x, i, mode),
3286 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3287 0, args_addr,
3288 GEN_INT (args_offset + ((i - not_stack + skip)
3289 * UNITS_PER_WORD)),
3290 reg_parm_stack_space, alignment_pad);
3292 else
3294 rtx addr;
3295 rtx target = NULL_RTX;
3296 rtx dest;
3298 /* Push padding now if padding above and stack grows down,
3299 or if padding below and stack grows up.
3300 But if space already allocated, this has already been done. */
3301 if (extra && args_addr == 0
3302 && where_pad != none && where_pad != stack_direction)
3303 anti_adjust_stack (GEN_INT (extra));
3305 #ifdef PUSH_ROUNDING
3306 if (args_addr == 0 && PUSH_ARGS)
3308 addr = gen_push_operand ();
3309 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3311 else
3312 #endif
3314 if (GET_CODE (args_so_far) == CONST_INT)
3315 addr
3316 = memory_address (mode,
3317 plus_constant (args_addr,
3318 INTVAL (args_so_far)));
3319 else
3320 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3321 args_so_far));
3322 target = addr;
3325 dest = gen_rtx_MEM (mode, addr);
3326 if (type != 0)
3328 set_mem_attributes (dest, type, 1);
3329 /* Function incoming arguments may overlap with sibling call
3330 outgoing arguments and we cannot allow reordering of reads
3331 from function arguments with stores to outgoing arguments
3332 of sibling calls. */
3333 MEM_ALIAS_SET (dest) = 0;
3336 emit_move_insn (dest, x);
3338 if (current_function_check_memory_usage && ! in_check_memory_usage)
3340 in_check_memory_usage = 1;
3341 if (target == 0)
3342 target = get_push_address (GET_MODE_SIZE (mode));
3344 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3345 emit_library_call (chkr_copy_bitmap_libfunc,
3346 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3347 Pmode, XEXP (x, 0), Pmode,
3348 GEN_INT (GET_MODE_SIZE (mode)),
3349 TYPE_MODE (sizetype));
3350 else
3351 emit_library_call (chkr_set_right_libfunc,
3352 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3353 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3354 TYPE_MODE (sizetype),
3355 GEN_INT (MEMORY_USE_RW),
3356 TYPE_MODE (integer_type_node));
3357 in_check_memory_usage = 0;
3361 ret:
3362 /* If part should go in registers, copy that part
3363 into the appropriate registers. Do this now, at the end,
3364 since mem-to-mem copies above may do function calls. */
3365 if (partial > 0 && reg != 0)
3367 /* Handle calls that pass values in multiple non-contiguous locations.
3368 The Irix 6 ABI has examples of this. */
3369 if (GET_CODE (reg) == PARALLEL)
3370 emit_group_load (reg, x, -1, align); /* ??? size? */
3371 else
3372 move_block_to_reg (REGNO (reg), x, partial, mode);
3375 if (extra && args_addr == 0 && where_pad == stack_direction)
3376 anti_adjust_stack (GEN_INT (extra));
3378 if (alignment_pad && args_addr == 0)
3379 anti_adjust_stack (alignment_pad);
3382 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3383 operations. */
3385 static rtx
3386 get_subtarget (x)
3387 rtx x;
3389 return ((x == 0
3390 /* Only registers can be subtargets. */
3391 || GET_CODE (x) != REG
3392 /* If the register is readonly, it can't be set more than once. */
3393 || RTX_UNCHANGING_P (x)
3394 /* Don't use hard regs to avoid extending their life. */
3395 || REGNO (x) < FIRST_PSEUDO_REGISTER
3396 /* Avoid subtargets inside loops,
3397 since they hide some invariant expressions. */
3398 || preserve_subexpressions_p ())
3399 ? 0 : x);
3402 /* Expand an assignment that stores the value of FROM into TO.
3403 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3404 (This may contain a QUEUED rtx;
3405 if the value is constant, this rtx is a constant.)
3406 Otherwise, the returned value is NULL_RTX.
3408 SUGGEST_REG is no longer actually used.
3409 It used to mean, copy the value through a register
3410 and return that register, if that is possible.
3411 We now use WANT_VALUE to decide whether to do this. */
3414 expand_assignment (to, from, want_value, suggest_reg)
3415 tree to, from;
3416 int want_value;
3417 int suggest_reg ATTRIBUTE_UNUSED;
3419 register rtx to_rtx = 0;
3420 rtx result;
3422 /* Don't crash if the lhs of the assignment was erroneous. */
3424 if (TREE_CODE (to) == ERROR_MARK)
3426 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3427 return want_value ? result : NULL_RTX;
3430 /* Assignment of a structure component needs special treatment
3431 if the structure component's rtx is not simply a MEM.
3432 Assignment of an array element at a constant index, and assignment of
3433 an array element in an unaligned packed structure field, has the same
3434 problem. */
3436 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3437 || TREE_CODE (to) == ARRAY_REF)
3439 enum machine_mode mode1;
3440 HOST_WIDE_INT bitsize, bitpos;
3441 tree offset;
3442 int unsignedp;
3443 int volatilep = 0;
3444 tree tem;
3445 unsigned int alignment;
3447 push_temp_slots ();
3448 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3449 &unsignedp, &volatilep, &alignment);
3451 /* If we are going to use store_bit_field and extract_bit_field,
3452 make sure to_rtx will be safe for multiple use. */
3454 if (mode1 == VOIDmode && want_value)
3455 tem = stabilize_reference (tem);
3457 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3458 if (offset != 0)
3460 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3462 if (GET_CODE (to_rtx) != MEM)
3463 abort ();
3465 if (GET_MODE (offset_rtx) != ptr_mode)
3467 #ifdef POINTERS_EXTEND_UNSIGNED
3468 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3469 #else
3470 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3471 #endif
3474 /* A constant address in TO_RTX can have VOIDmode, we must not try
3475 to call force_reg for that case. Avoid that case. */
3476 if (GET_CODE (to_rtx) == MEM
3477 && GET_MODE (to_rtx) == BLKmode
3478 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3479 && bitsize
3480 && (bitpos % bitsize) == 0
3481 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3482 && alignment == GET_MODE_ALIGNMENT (mode1))
3484 rtx temp = change_address (to_rtx, mode1,
3485 plus_constant (XEXP (to_rtx, 0),
3486 (bitpos /
3487 BITS_PER_UNIT)));
3488 if (GET_CODE (XEXP (temp, 0)) == REG)
3489 to_rtx = temp;
3490 else
3491 to_rtx = change_address (to_rtx, mode1,
3492 force_reg (GET_MODE (XEXP (temp, 0)),
3493 XEXP (temp, 0)));
3494 bitpos = 0;
3497 to_rtx = change_address (to_rtx, VOIDmode,
3498 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3499 force_reg (ptr_mode,
3500 offset_rtx)));
3503 if (volatilep)
3505 if (GET_CODE (to_rtx) == MEM)
3507 /* When the offset is zero, to_rtx is the address of the
3508 structure we are storing into, and hence may be shared.
3509 We must make a new MEM before setting the volatile bit. */
3510 if (offset == 0)
3511 to_rtx = copy_rtx (to_rtx);
3513 MEM_VOLATILE_P (to_rtx) = 1;
3515 #if 0 /* This was turned off because, when a field is volatile
3516 in an object which is not volatile, the object may be in a register,
3517 and then we would abort over here. */
3518 else
3519 abort ();
3520 #endif
3523 if (TREE_CODE (to) == COMPONENT_REF
3524 && TREE_READONLY (TREE_OPERAND (to, 1)))
3526 if (offset == 0)
3527 to_rtx = copy_rtx (to_rtx);
3529 RTX_UNCHANGING_P (to_rtx) = 1;
3532 /* Check the access. */
3533 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3535 rtx to_addr;
3536 int size;
3537 int best_mode_size;
3538 enum machine_mode best_mode;
3540 best_mode = get_best_mode (bitsize, bitpos,
3541 TYPE_ALIGN (TREE_TYPE (tem)),
3542 mode1, volatilep);
3543 if (best_mode == VOIDmode)
3544 best_mode = QImode;
3546 best_mode_size = GET_MODE_BITSIZE (best_mode);
3547 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3548 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3549 size *= GET_MODE_SIZE (best_mode);
3551 /* Check the access right of the pointer. */
3552 in_check_memory_usage = 1;
3553 if (size)
3554 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3555 VOIDmode, 3, to_addr, Pmode,
3556 GEN_INT (size), TYPE_MODE (sizetype),
3557 GEN_INT (MEMORY_USE_WO),
3558 TYPE_MODE (integer_type_node));
3559 in_check_memory_usage = 0;
3562 /* If this is a varying-length object, we must get the address of
3563 the source and do an explicit block move. */
3564 if (bitsize < 0)
3566 unsigned int from_align;
3567 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3568 rtx inner_to_rtx
3569 = change_address (to_rtx, VOIDmode,
3570 plus_constant (XEXP (to_rtx, 0),
3571 bitpos / BITS_PER_UNIT));
3573 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3574 MIN (alignment, from_align));
3575 free_temp_slots ();
3576 pop_temp_slots ();
3577 return to_rtx;
3579 else
3581 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3582 (want_value
3583 /* Spurious cast for HPUX compiler. */
3584 ? ((enum machine_mode)
3585 TYPE_MODE (TREE_TYPE (to)))
3586 : VOIDmode),
3587 unsignedp,
3588 alignment,
3589 int_size_in_bytes (TREE_TYPE (tem)),
3590 get_alias_set (to));
3592 preserve_temp_slots (result);
3593 free_temp_slots ();
3594 pop_temp_slots ();
3596 /* If the value is meaningful, convert RESULT to the proper mode.
3597 Otherwise, return nothing. */
3598 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3599 TYPE_MODE (TREE_TYPE (from)),
3600 result,
3601 TREE_UNSIGNED (TREE_TYPE (to)))
3602 : NULL_RTX);
3606 /* If the rhs is a function call and its value is not an aggregate,
3607 call the function before we start to compute the lhs.
3608 This is needed for correct code for cases such as
3609 val = setjmp (buf) on machines where reference to val
3610 requires loading up part of an address in a separate insn.
3612 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3613 since it might be a promoted variable where the zero- or sign- extension
3614 needs to be done. Handling this in the normal way is safe because no
3615 computation is done before the call. */
3616 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3617 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3618 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3619 && GET_CODE (DECL_RTL (to)) == REG))
3621 rtx value;
3623 push_temp_slots ();
3624 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3625 if (to_rtx == 0)
3626 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3628 /* Handle calls that return values in multiple non-contiguous locations.
3629 The Irix 6 ABI has examples of this. */
3630 if (GET_CODE (to_rtx) == PARALLEL)
3631 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3632 TYPE_ALIGN (TREE_TYPE (from)));
3633 else if (GET_MODE (to_rtx) == BLKmode)
3634 emit_block_move (to_rtx, value, expr_size (from),
3635 TYPE_ALIGN (TREE_TYPE (from)));
3636 else
3638 #ifdef POINTERS_EXTEND_UNSIGNED
3639 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3640 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3641 value = convert_memory_address (GET_MODE (to_rtx), value);
3642 #endif
3643 emit_move_insn (to_rtx, value);
3645 preserve_temp_slots (to_rtx);
3646 free_temp_slots ();
3647 pop_temp_slots ();
3648 return want_value ? to_rtx : NULL_RTX;
3651 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3652 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3654 if (to_rtx == 0)
3656 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3657 if (GET_CODE (to_rtx) == MEM)
3658 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3661 /* Don't move directly into a return register. */
3662 if (TREE_CODE (to) == RESULT_DECL
3663 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3665 rtx temp;
3667 push_temp_slots ();
3668 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3670 if (GET_CODE (to_rtx) == PARALLEL)
3671 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3672 TYPE_ALIGN (TREE_TYPE (from)));
3673 else
3674 emit_move_insn (to_rtx, temp);
3676 preserve_temp_slots (to_rtx);
3677 free_temp_slots ();
3678 pop_temp_slots ();
3679 return want_value ? to_rtx : NULL_RTX;
3682 /* In case we are returning the contents of an object which overlaps
3683 the place the value is being stored, use a safe function when copying
3684 a value through a pointer into a structure value return block. */
3685 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3686 && current_function_returns_struct
3687 && !current_function_returns_pcc_struct)
3689 rtx from_rtx, size;
3691 push_temp_slots ();
3692 size = expr_size (from);
3693 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3694 EXPAND_MEMORY_USE_DONT);
3696 /* Copy the rights of the bitmap. */
3697 if (current_function_check_memory_usage)
3698 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3699 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3700 XEXP (from_rtx, 0), Pmode,
3701 convert_to_mode (TYPE_MODE (sizetype),
3702 size, TREE_UNSIGNED (sizetype)),
3703 TYPE_MODE (sizetype));
3705 #ifdef TARGET_MEM_FUNCTIONS
3706 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3707 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3708 XEXP (from_rtx, 0), Pmode,
3709 convert_to_mode (TYPE_MODE (sizetype),
3710 size, TREE_UNSIGNED (sizetype)),
3711 TYPE_MODE (sizetype));
3712 #else
3713 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3714 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3715 XEXP (to_rtx, 0), Pmode,
3716 convert_to_mode (TYPE_MODE (integer_type_node),
3717 size, TREE_UNSIGNED (integer_type_node)),
3718 TYPE_MODE (integer_type_node));
3719 #endif
3721 preserve_temp_slots (to_rtx);
3722 free_temp_slots ();
3723 pop_temp_slots ();
3724 return want_value ? to_rtx : NULL_RTX;
3727 /* Compute FROM and store the value in the rtx we got. */
3729 push_temp_slots ();
3730 result = store_expr (from, to_rtx, want_value);
3731 preserve_temp_slots (result);
3732 free_temp_slots ();
3733 pop_temp_slots ();
3734 return want_value ? result : NULL_RTX;
3737 /* Generate code for computing expression EXP,
3738 and storing the value into TARGET.
3739 TARGET may contain a QUEUED rtx.
3741 If WANT_VALUE is nonzero, return a copy of the value
3742 not in TARGET, so that we can be sure to use the proper
3743 value in a containing expression even if TARGET has something
3744 else stored in it. If possible, we copy the value through a pseudo
3745 and return that pseudo. Or, if the value is constant, we try to
3746 return the constant. In some cases, we return a pseudo
3747 copied *from* TARGET.
3749 If the mode is BLKmode then we may return TARGET itself.
3750 It turns out that in BLKmode it doesn't cause a problem.
3751 because C has no operators that could combine two different
3752 assignments into the same BLKmode object with different values
3753 with no sequence point. Will other languages need this to
3754 be more thorough?
3756 If WANT_VALUE is 0, we return NULL, to make sure
3757 to catch quickly any cases where the caller uses the value
3758 and fails to set WANT_VALUE. */
3761 store_expr (exp, target, want_value)
3762 register tree exp;
3763 register rtx target;
3764 int want_value;
3766 register rtx temp;
3767 int dont_return_target = 0;
3769 if (TREE_CODE (exp) == COMPOUND_EXPR)
3771 /* Perform first part of compound expression, then assign from second
3772 part. */
3773 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3774 emit_queue ();
3775 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3777 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3779 /* For conditional expression, get safe form of the target. Then
3780 test the condition, doing the appropriate assignment on either
3781 side. This avoids the creation of unnecessary temporaries.
3782 For non-BLKmode, it is more efficient not to do this. */
3784 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3786 emit_queue ();
3787 target = protect_from_queue (target, 1);
3789 do_pending_stack_adjust ();
3790 NO_DEFER_POP;
3791 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3792 start_cleanup_deferral ();
3793 store_expr (TREE_OPERAND (exp, 1), target, 0);
3794 end_cleanup_deferral ();
3795 emit_queue ();
3796 emit_jump_insn (gen_jump (lab2));
3797 emit_barrier ();
3798 emit_label (lab1);
3799 start_cleanup_deferral ();
3800 store_expr (TREE_OPERAND (exp, 2), target, 0);
3801 end_cleanup_deferral ();
3802 emit_queue ();
3803 emit_label (lab2);
3804 OK_DEFER_POP;
3806 return want_value ? target : NULL_RTX;
3808 else if (queued_subexp_p (target))
3809 /* If target contains a postincrement, let's not risk
3810 using it as the place to generate the rhs. */
3812 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3814 /* Expand EXP into a new pseudo. */
3815 temp = gen_reg_rtx (GET_MODE (target));
3816 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3818 else
3819 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3821 /* If target is volatile, ANSI requires accessing the value
3822 *from* the target, if it is accessed. So make that happen.
3823 In no case return the target itself. */
3824 if (! MEM_VOLATILE_P (target) && want_value)
3825 dont_return_target = 1;
3827 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3828 && GET_MODE (target) != BLKmode)
3829 /* If target is in memory and caller wants value in a register instead,
3830 arrange that. Pass TARGET as target for expand_expr so that,
3831 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3832 We know expand_expr will not use the target in that case.
3833 Don't do this if TARGET is volatile because we are supposed
3834 to write it and then read it. */
3836 temp = expand_expr (exp, target, GET_MODE (target), 0);
3837 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3838 temp = copy_to_reg (temp);
3839 dont_return_target = 1;
3841 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3842 /* If this is an scalar in a register that is stored in a wider mode
3843 than the declared mode, compute the result into its declared mode
3844 and then convert to the wider mode. Our value is the computed
3845 expression. */
3847 /* If we don't want a value, we can do the conversion inside EXP,
3848 which will often result in some optimizations. Do the conversion
3849 in two steps: first change the signedness, if needed, then
3850 the extend. But don't do this if the type of EXP is a subtype
3851 of something else since then the conversion might involve
3852 more than just converting modes. */
3853 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3854 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3856 if (TREE_UNSIGNED (TREE_TYPE (exp))
3857 != SUBREG_PROMOTED_UNSIGNED_P (target))
3859 = convert
3860 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3861 TREE_TYPE (exp)),
3862 exp);
3864 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3865 SUBREG_PROMOTED_UNSIGNED_P (target)),
3866 exp);
3869 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3871 /* If TEMP is a volatile MEM and we want a result value, make
3872 the access now so it gets done only once. Likewise if
3873 it contains TARGET. */
3874 if (GET_CODE (temp) == MEM && want_value
3875 && (MEM_VOLATILE_P (temp)
3876 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3877 temp = copy_to_reg (temp);
3879 /* If TEMP is a VOIDmode constant, use convert_modes to make
3880 sure that we properly convert it. */
3881 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3882 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3883 TYPE_MODE (TREE_TYPE (exp)), temp,
3884 SUBREG_PROMOTED_UNSIGNED_P (target));
3886 convert_move (SUBREG_REG (target), temp,
3887 SUBREG_PROMOTED_UNSIGNED_P (target));
3889 /* If we promoted a constant, change the mode back down to match
3890 target. Otherwise, the caller might get confused by a result whose
3891 mode is larger than expected. */
3893 if (want_value && GET_MODE (temp) != GET_MODE (target)
3894 && GET_MODE (temp) != VOIDmode)
3896 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3897 SUBREG_PROMOTED_VAR_P (temp) = 1;
3898 SUBREG_PROMOTED_UNSIGNED_P (temp)
3899 = SUBREG_PROMOTED_UNSIGNED_P (target);
3902 return want_value ? temp : NULL_RTX;
3904 else
3906 temp = expand_expr (exp, target, GET_MODE (target), 0);
3907 /* Return TARGET if it's a specified hardware register.
3908 If TARGET is a volatile mem ref, either return TARGET
3909 or return a reg copied *from* TARGET; ANSI requires this.
3911 Otherwise, if TEMP is not TARGET, return TEMP
3912 if it is constant (for efficiency),
3913 or if we really want the correct value. */
3914 if (!(target && GET_CODE (target) == REG
3915 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3916 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3917 && ! rtx_equal_p (temp, target)
3918 && (CONSTANT_P (temp) || want_value))
3919 dont_return_target = 1;
3922 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3923 the same as that of TARGET, adjust the constant. This is needed, for
3924 example, in case it is a CONST_DOUBLE and we want only a word-sized
3925 value. */
3926 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3927 && TREE_CODE (exp) != ERROR_MARK
3928 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3929 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3930 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3932 if (current_function_check_memory_usage
3933 && GET_CODE (target) == MEM
3934 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3936 in_check_memory_usage = 1;
3937 if (GET_CODE (temp) == MEM)
3938 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3939 VOIDmode, 3, XEXP (target, 0), Pmode,
3940 XEXP (temp, 0), Pmode,
3941 expr_size (exp), TYPE_MODE (sizetype));
3942 else
3943 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3944 VOIDmode, 3, XEXP (target, 0), Pmode,
3945 expr_size (exp), TYPE_MODE (sizetype),
3946 GEN_INT (MEMORY_USE_WO),
3947 TYPE_MODE (integer_type_node));
3948 in_check_memory_usage = 0;
3951 /* If value was not generated in the target, store it there.
3952 Convert the value to TARGET's type first if nec. */
3953 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3954 one or both of them are volatile memory refs, we have to distinguish
3955 two cases:
3956 - expand_expr has used TARGET. In this case, we must not generate
3957 another copy. This can be detected by TARGET being equal according
3958 to == .
3959 - expand_expr has not used TARGET - that means that the source just
3960 happens to have the same RTX form. Since temp will have been created
3961 by expand_expr, it will compare unequal according to == .
3962 We must generate a copy in this case, to reach the correct number
3963 of volatile memory references. */
3965 if ((! rtx_equal_p (temp, target)
3966 || (temp != target && (side_effects_p (temp)
3967 || side_effects_p (target))))
3968 && TREE_CODE (exp) != ERROR_MARK)
3970 target = protect_from_queue (target, 1);
3971 if (GET_MODE (temp) != GET_MODE (target)
3972 && GET_MODE (temp) != VOIDmode)
3974 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3975 if (dont_return_target)
3977 /* In this case, we will return TEMP,
3978 so make sure it has the proper mode.
3979 But don't forget to store the value into TARGET. */
3980 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3981 emit_move_insn (target, temp);
3983 else
3984 convert_move (target, temp, unsignedp);
3987 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3989 /* Handle copying a string constant into an array.
3990 The string constant may be shorter than the array.
3991 So copy just the string's actual length, and clear the rest. */
3992 rtx size;
3993 rtx addr;
3995 /* Get the size of the data type of the string,
3996 which is actually the size of the target. */
3997 size = expr_size (exp);
3998 if (GET_CODE (size) == CONST_INT
3999 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4000 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4001 else
4003 /* Compute the size of the data to copy from the string. */
4004 tree copy_size
4005 = size_binop (MIN_EXPR,
4006 make_tree (sizetype, size),
4007 size_int (TREE_STRING_LENGTH (exp)));
4008 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4009 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4010 VOIDmode, 0);
4011 rtx label = 0;
4013 /* Copy that much. */
4014 emit_block_move (target, temp, copy_size_rtx,
4015 TYPE_ALIGN (TREE_TYPE (exp)));
4017 /* Figure out how much is left in TARGET that we have to clear.
4018 Do all calculations in ptr_mode. */
4020 addr = XEXP (target, 0);
4021 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4023 if (GET_CODE (copy_size_rtx) == CONST_INT)
4025 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4026 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4027 align = MIN (align, (BITS_PER_UNIT
4028 * (INTVAL (copy_size_rtx)
4029 & - INTVAL (copy_size_rtx))));
4031 else
4033 addr = force_reg (ptr_mode, addr);
4034 addr = expand_binop (ptr_mode, add_optab, addr,
4035 copy_size_rtx, NULL_RTX, 0,
4036 OPTAB_LIB_WIDEN);
4038 size = expand_binop (ptr_mode, sub_optab, size,
4039 copy_size_rtx, NULL_RTX, 0,
4040 OPTAB_LIB_WIDEN);
4042 align = BITS_PER_UNIT;
4043 label = gen_label_rtx ();
4044 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4045 GET_MODE (size), 0, 0, label);
4047 align = MIN (align, expr_align (copy_size));
4049 if (size != const0_rtx)
4051 rtx dest = gen_rtx_MEM (BLKmode, addr);
4053 MEM_COPY_ATTRIBUTES (dest, target);
4055 /* Be sure we can write on ADDR. */
4056 in_check_memory_usage = 1;
4057 if (current_function_check_memory_usage)
4058 emit_library_call (chkr_check_addr_libfunc,
4059 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4060 addr, Pmode,
4061 size, TYPE_MODE (sizetype),
4062 GEN_INT (MEMORY_USE_WO),
4063 TYPE_MODE (integer_type_node));
4064 in_check_memory_usage = 0;
4065 clear_storage (dest, size, align);
4068 if (label)
4069 emit_label (label);
4072 /* Handle calls that return values in multiple non-contiguous locations.
4073 The Irix 6 ABI has examples of this. */
4074 else if (GET_CODE (target) == PARALLEL)
4075 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4076 TYPE_ALIGN (TREE_TYPE (exp)));
4077 else if (GET_MODE (temp) == BLKmode)
4078 emit_block_move (target, temp, expr_size (exp),
4079 TYPE_ALIGN (TREE_TYPE (exp)));
4080 else
4081 emit_move_insn (target, temp);
4084 /* If we don't want a value, return NULL_RTX. */
4085 if (! want_value)
4086 return NULL_RTX;
4088 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4089 ??? The latter test doesn't seem to make sense. */
4090 else if (dont_return_target && GET_CODE (temp) != MEM)
4091 return temp;
4093 /* Return TARGET itself if it is a hard register. */
4094 else if (want_value && GET_MODE (target) != BLKmode
4095 && ! (GET_CODE (target) == REG
4096 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4097 return copy_to_reg (target);
4099 else
4100 return target;
4103 /* Return 1 if EXP just contains zeros. */
4105 static int
4106 is_zeros_p (exp)
4107 tree exp;
4109 tree elt;
4111 switch (TREE_CODE (exp))
4113 case CONVERT_EXPR:
4114 case NOP_EXPR:
4115 case NON_LVALUE_EXPR:
4116 return is_zeros_p (TREE_OPERAND (exp, 0));
4118 case INTEGER_CST:
4119 return integer_zerop (exp);
4121 case COMPLEX_CST:
4122 return
4123 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4125 case REAL_CST:
4126 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4128 case CONSTRUCTOR:
4129 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4130 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4131 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4132 if (! is_zeros_p (TREE_VALUE (elt)))
4133 return 0;
4135 return 1;
4137 default:
4138 return 0;
4142 /* Return 1 if EXP contains mostly (3/4) zeros. */
4144 static int
4145 mostly_zeros_p (exp)
4146 tree exp;
4148 if (TREE_CODE (exp) == CONSTRUCTOR)
4150 int elts = 0, zeros = 0;
4151 tree elt = CONSTRUCTOR_ELTS (exp);
4152 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4154 /* If there are no ranges of true bits, it is all zero. */
4155 return elt == NULL_TREE;
4157 for (; elt; elt = TREE_CHAIN (elt))
4159 /* We do not handle the case where the index is a RANGE_EXPR,
4160 so the statistic will be somewhat inaccurate.
4161 We do make a more accurate count in store_constructor itself,
4162 so since this function is only used for nested array elements,
4163 this should be close enough. */
4164 if (mostly_zeros_p (TREE_VALUE (elt)))
4165 zeros++;
4166 elts++;
4169 return 4 * zeros >= 3 * elts;
4172 return is_zeros_p (exp);
4175 /* Helper function for store_constructor.
4176 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4177 TYPE is the type of the CONSTRUCTOR, not the element type.
4178 ALIGN and CLEARED are as for store_constructor.
4179 ALIAS_SET is the alias set to use for any stores.
4181 This provides a recursive shortcut back to store_constructor when it isn't
4182 necessary to go through store_field. This is so that we can pass through
4183 the cleared field to let store_constructor know that we may not have to
4184 clear a substructure if the outer structure has already been cleared. */
4186 static void
4187 store_constructor_field (target, bitsize, bitpos,
4188 mode, exp, type, align, cleared, alias_set)
4189 rtx target;
4190 unsigned HOST_WIDE_INT bitsize;
4191 HOST_WIDE_INT bitpos;
4192 enum machine_mode mode;
4193 tree exp, type;
4194 unsigned int align;
4195 int cleared;
4196 int alias_set;
4198 if (TREE_CODE (exp) == CONSTRUCTOR
4199 && bitpos % BITS_PER_UNIT == 0
4200 /* If we have a non-zero bitpos for a register target, then we just
4201 let store_field do the bitfield handling. This is unlikely to
4202 generate unnecessary clear instructions anyways. */
4203 && (bitpos == 0 || GET_CODE (target) == MEM))
4205 if (bitpos != 0)
4206 target
4207 = change_address (target,
4208 GET_MODE (target) == BLKmode
4209 || 0 != (bitpos
4210 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4211 ? BLKmode : VOIDmode,
4212 plus_constant (XEXP (target, 0),
4213 bitpos / BITS_PER_UNIT));
4215 MEM_ALIAS_SET (target) = alias_set;
4216 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4218 else
4219 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4220 int_size_in_bytes (type), alias_set);
4223 /* Store the value of constructor EXP into the rtx TARGET.
4224 TARGET is either a REG or a MEM.
4225 ALIGN is the maximum known alignment for TARGET.
4226 CLEARED is true if TARGET is known to have been zero'd.
4227 SIZE is the number of bytes of TARGET we are allowed to modify: this
4228 may not be the same as the size of EXP if we are assigning to a field
4229 which has been packed to exclude padding bits. */
4231 static void
4232 store_constructor (exp, target, align, cleared, size)
4233 tree exp;
4234 rtx target;
4235 unsigned int align;
4236 int cleared;
4237 HOST_WIDE_INT size;
4239 tree type = TREE_TYPE (exp);
4240 #ifdef WORD_REGISTER_OPERATIONS
4241 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4242 #endif
4244 /* We know our target cannot conflict, since safe_from_p has been called. */
4245 #if 0
4246 /* Don't try copying piece by piece into a hard register
4247 since that is vulnerable to being clobbered by EXP.
4248 Instead, construct in a pseudo register and then copy it all. */
4249 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4251 rtx temp = gen_reg_rtx (GET_MODE (target));
4252 store_constructor (exp, temp, align, cleared, size);
4253 emit_move_insn (target, temp);
4254 return;
4256 #endif
4258 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4259 || TREE_CODE (type) == QUAL_UNION_TYPE)
4261 register tree elt;
4263 /* Inform later passes that the whole union value is dead. */
4264 if ((TREE_CODE (type) == UNION_TYPE
4265 || TREE_CODE (type) == QUAL_UNION_TYPE)
4266 && ! cleared)
4268 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4270 /* If the constructor is empty, clear the union. */
4271 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4272 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4275 /* If we are building a static constructor into a register,
4276 set the initial value as zero so we can fold the value into
4277 a constant. But if more than one register is involved,
4278 this probably loses. */
4279 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4280 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4282 if (! cleared)
4283 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4285 cleared = 1;
4288 /* If the constructor has fewer fields than the structure
4289 or if we are initializing the structure to mostly zeros,
4290 clear the whole structure first. Don't do this is TARGET is
4291 register whose mode size isn't equal to SIZE since clear_storage
4292 can't handle this case. */
4293 else if (size > 0
4294 && ((list_length (CONSTRUCTOR_ELTS (exp))
4295 != fields_length (type))
4296 || mostly_zeros_p (exp))
4297 && (GET_CODE (target) != REG
4298 || GET_MODE_SIZE (GET_MODE (target)) == size))
4300 if (! cleared)
4301 clear_storage (target, GEN_INT (size), align);
4303 cleared = 1;
4305 else if (! cleared)
4306 /* Inform later passes that the old value is dead. */
4307 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4309 /* Store each element of the constructor into
4310 the corresponding field of TARGET. */
4312 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4314 register tree field = TREE_PURPOSE (elt);
4315 #ifdef WORD_REGISTER_OPERATIONS
4316 tree value = TREE_VALUE (elt);
4317 #endif
4318 register enum machine_mode mode;
4319 HOST_WIDE_INT bitsize;
4320 HOST_WIDE_INT bitpos = 0;
4321 int unsignedp;
4322 tree offset;
4323 rtx to_rtx = target;
4325 /* Just ignore missing fields.
4326 We cleared the whole structure, above,
4327 if any fields are missing. */
4328 if (field == 0)
4329 continue;
4331 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4332 continue;
4334 if (host_integerp (DECL_SIZE (field), 1))
4335 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4336 else
4337 bitsize = -1;
4339 unsignedp = TREE_UNSIGNED (field);
4340 mode = DECL_MODE (field);
4341 if (DECL_BIT_FIELD (field))
4342 mode = VOIDmode;
4344 offset = DECL_FIELD_OFFSET (field);
4345 if (host_integerp (offset, 0)
4346 && host_integerp (bit_position (field), 0))
4348 bitpos = int_bit_position (field);
4349 offset = 0;
4351 else
4352 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4354 if (offset)
4356 rtx offset_rtx;
4358 if (contains_placeholder_p (offset))
4359 offset = build (WITH_RECORD_EXPR, sizetype,
4360 offset, make_tree (TREE_TYPE (exp), target));
4362 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4363 if (GET_CODE (to_rtx) != MEM)
4364 abort ();
4366 if (GET_MODE (offset_rtx) != ptr_mode)
4368 #ifdef POINTERS_EXTEND_UNSIGNED
4369 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4370 #else
4371 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4372 #endif
4375 to_rtx
4376 = change_address (to_rtx, VOIDmode,
4377 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4378 force_reg (ptr_mode,
4379 offset_rtx)));
4380 align = DECL_OFFSET_ALIGN (field);
4383 if (TREE_READONLY (field))
4385 if (GET_CODE (to_rtx) == MEM)
4386 to_rtx = copy_rtx (to_rtx);
4388 RTX_UNCHANGING_P (to_rtx) = 1;
4391 #ifdef WORD_REGISTER_OPERATIONS
4392 /* If this initializes a field that is smaller than a word, at the
4393 start of a word, try to widen it to a full word.
4394 This special case allows us to output C++ member function
4395 initializations in a form that the optimizers can understand. */
4396 if (GET_CODE (target) == REG
4397 && bitsize < BITS_PER_WORD
4398 && bitpos % BITS_PER_WORD == 0
4399 && GET_MODE_CLASS (mode) == MODE_INT
4400 && TREE_CODE (value) == INTEGER_CST
4401 && exp_size >= 0
4402 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4404 tree type = TREE_TYPE (value);
4405 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4407 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4408 value = convert (type, value);
4410 if (BYTES_BIG_ENDIAN)
4411 value
4412 = fold (build (LSHIFT_EXPR, type, value,
4413 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4414 bitsize = BITS_PER_WORD;
4415 mode = word_mode;
4417 #endif
4418 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4419 TREE_VALUE (elt), type, align, cleared,
4420 DECL_NONADDRESSABLE_P (field)
4421 ? MEM_ALIAS_SET (to_rtx)
4422 : get_alias_set (TREE_TYPE (field)));
4425 else if (TREE_CODE (type) == ARRAY_TYPE)
4427 register tree elt;
4428 register int i;
4429 int need_to_clear;
4430 tree domain = TYPE_DOMAIN (type);
4431 tree elttype = TREE_TYPE (type);
4432 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4433 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4434 HOST_WIDE_INT minelt;
4435 HOST_WIDE_INT maxelt;
4437 /* If we have constant bounds for the range of the type, get them. */
4438 if (const_bounds_p)
4440 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4441 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4444 /* If the constructor has fewer elements than the array,
4445 clear the whole array first. Similarly if this is
4446 static constructor of a non-BLKmode object. */
4447 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4448 need_to_clear = 1;
4449 else
4451 HOST_WIDE_INT count = 0, zero_count = 0;
4452 need_to_clear = ! const_bounds_p;
4454 /* This loop is a more accurate version of the loop in
4455 mostly_zeros_p (it handles RANGE_EXPR in an index).
4456 It is also needed to check for missing elements. */
4457 for (elt = CONSTRUCTOR_ELTS (exp);
4458 elt != NULL_TREE && ! need_to_clear;
4459 elt = TREE_CHAIN (elt))
4461 tree index = TREE_PURPOSE (elt);
4462 HOST_WIDE_INT this_node_count;
4464 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4466 tree lo_index = TREE_OPERAND (index, 0);
4467 tree hi_index = TREE_OPERAND (index, 1);
4469 if (! host_integerp (lo_index, 1)
4470 || ! host_integerp (hi_index, 1))
4472 need_to_clear = 1;
4473 break;
4476 this_node_count = (tree_low_cst (hi_index, 1)
4477 - tree_low_cst (lo_index, 1) + 1);
4479 else
4480 this_node_count = 1;
4482 count += this_node_count;
4483 if (mostly_zeros_p (TREE_VALUE (elt)))
4484 zero_count += this_node_count;
4487 /* Clear the entire array first if there are any missing elements,
4488 or if the incidence of zero elements is >= 75%. */
4489 if (! need_to_clear
4490 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4491 need_to_clear = 1;
4494 if (need_to_clear && size > 0)
4496 if (! cleared)
4497 clear_storage (target, GEN_INT (size), align);
4498 cleared = 1;
4500 else
4501 /* Inform later passes that the old value is dead. */
4502 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4504 /* Store each element of the constructor into
4505 the corresponding element of TARGET, determined
4506 by counting the elements. */
4507 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4508 elt;
4509 elt = TREE_CHAIN (elt), i++)
4511 register enum machine_mode mode;
4512 HOST_WIDE_INT bitsize;
4513 HOST_WIDE_INT bitpos;
4514 int unsignedp;
4515 tree value = TREE_VALUE (elt);
4516 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4517 tree index = TREE_PURPOSE (elt);
4518 rtx xtarget = target;
4520 if (cleared && is_zeros_p (value))
4521 continue;
4523 unsignedp = TREE_UNSIGNED (elttype);
4524 mode = TYPE_MODE (elttype);
4525 if (mode == BLKmode)
4526 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4527 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4528 : -1);
4529 else
4530 bitsize = GET_MODE_BITSIZE (mode);
4532 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4534 tree lo_index = TREE_OPERAND (index, 0);
4535 tree hi_index = TREE_OPERAND (index, 1);
4536 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4537 struct nesting *loop;
4538 HOST_WIDE_INT lo, hi, count;
4539 tree position;
4541 /* If the range is constant and "small", unroll the loop. */
4542 if (const_bounds_p
4543 && host_integerp (lo_index, 0)
4544 && host_integerp (hi_index, 0)
4545 && (lo = tree_low_cst (lo_index, 0),
4546 hi = tree_low_cst (hi_index, 0),
4547 count = hi - lo + 1,
4548 (GET_CODE (target) != MEM
4549 || count <= 2
4550 || (host_integerp (TYPE_SIZE (elttype), 1)
4551 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4552 <= 40 * 8)))))
4554 lo -= minelt; hi -= minelt;
4555 for (; lo <= hi; lo++)
4557 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4558 store_constructor_field
4559 (target, bitsize, bitpos, mode, value, type, align,
4560 cleared,
4561 TYPE_NONALIASED_COMPONENT (type)
4562 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4565 else
4567 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4568 loop_top = gen_label_rtx ();
4569 loop_end = gen_label_rtx ();
4571 unsignedp = TREE_UNSIGNED (domain);
4573 index = build_decl (VAR_DECL, NULL_TREE, domain);
4575 DECL_RTL (index) = index_r
4576 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4577 &unsignedp, 0));
4579 if (TREE_CODE (value) == SAVE_EXPR
4580 && SAVE_EXPR_RTL (value) == 0)
4582 /* Make sure value gets expanded once before the
4583 loop. */
4584 expand_expr (value, const0_rtx, VOIDmode, 0);
4585 emit_queue ();
4587 store_expr (lo_index, index_r, 0);
4588 loop = expand_start_loop (0);
4590 /* Assign value to element index. */
4591 position
4592 = convert (ssizetype,
4593 fold (build (MINUS_EXPR, TREE_TYPE (index),
4594 index, TYPE_MIN_VALUE (domain))));
4595 position = size_binop (MULT_EXPR, position,
4596 convert (ssizetype,
4597 TYPE_SIZE_UNIT (elttype)));
4599 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4600 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4601 xtarget = change_address (target, mode, addr);
4602 if (TREE_CODE (value) == CONSTRUCTOR)
4603 store_constructor (value, xtarget, align, cleared,
4604 bitsize / BITS_PER_UNIT);
4605 else
4606 store_expr (value, xtarget, 0);
4608 expand_exit_loop_if_false (loop,
4609 build (LT_EXPR, integer_type_node,
4610 index, hi_index));
4612 expand_increment (build (PREINCREMENT_EXPR,
4613 TREE_TYPE (index),
4614 index, integer_one_node), 0, 0);
4615 expand_end_loop ();
4616 emit_label (loop_end);
4619 else if ((index != 0 && ! host_integerp (index, 0))
4620 || ! host_integerp (TYPE_SIZE (elttype), 1))
4622 rtx pos_rtx, addr;
4623 tree position;
4625 if (index == 0)
4626 index = ssize_int (1);
4628 if (minelt)
4629 index = convert (ssizetype,
4630 fold (build (MINUS_EXPR, index,
4631 TYPE_MIN_VALUE (domain))));
4633 position = size_binop (MULT_EXPR, index,
4634 convert (ssizetype,
4635 TYPE_SIZE_UNIT (elttype)));
4636 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4637 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4638 xtarget = change_address (target, mode, addr);
4639 store_expr (value, xtarget, 0);
4641 else
4643 if (index != 0)
4644 bitpos = ((tree_low_cst (index, 0) - minelt)
4645 * tree_low_cst (TYPE_SIZE (elttype), 1));
4646 else
4647 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4649 store_constructor_field (target, bitsize, bitpos, mode, value,
4650 type, align, cleared,
4651 TYPE_NONALIASED_COMPONENT (type)
4652 ? MEM_ALIAS_SET (target) :
4653 get_alias_set (elttype));
4659 /* Set constructor assignments. */
4660 else if (TREE_CODE (type) == SET_TYPE)
4662 tree elt = CONSTRUCTOR_ELTS (exp);
4663 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4664 tree domain = TYPE_DOMAIN (type);
4665 tree domain_min, domain_max, bitlength;
4667 /* The default implementation strategy is to extract the constant
4668 parts of the constructor, use that to initialize the target,
4669 and then "or" in whatever non-constant ranges we need in addition.
4671 If a large set is all zero or all ones, it is
4672 probably better to set it using memset (if available) or bzero.
4673 Also, if a large set has just a single range, it may also be
4674 better to first clear all the first clear the set (using
4675 bzero/memset), and set the bits we want. */
4677 /* Check for all zeros. */
4678 if (elt == NULL_TREE && size > 0)
4680 if (!cleared)
4681 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4682 return;
4685 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4686 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4687 bitlength = size_binop (PLUS_EXPR,
4688 size_diffop (domain_max, domain_min),
4689 ssize_int (1));
4691 nbits = tree_low_cst (bitlength, 1);
4693 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4694 are "complicated" (more than one range), initialize (the
4695 constant parts) by copying from a constant. */
4696 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4697 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4699 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4700 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4701 char *bit_buffer = (char *) alloca (nbits);
4702 HOST_WIDE_INT word = 0;
4703 unsigned int bit_pos = 0;
4704 unsigned int ibit = 0;
4705 unsigned int offset = 0; /* In bytes from beginning of set. */
4707 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4708 for (;;)
4710 if (bit_buffer[ibit])
4712 if (BYTES_BIG_ENDIAN)
4713 word |= (1 << (set_word_size - 1 - bit_pos));
4714 else
4715 word |= 1 << bit_pos;
4718 bit_pos++; ibit++;
4719 if (bit_pos >= set_word_size || ibit == nbits)
4721 if (word != 0 || ! cleared)
4723 rtx datum = GEN_INT (word);
4724 rtx to_rtx;
4726 /* The assumption here is that it is safe to use
4727 XEXP if the set is multi-word, but not if
4728 it's single-word. */
4729 if (GET_CODE (target) == MEM)
4731 to_rtx = plus_constant (XEXP (target, 0), offset);
4732 to_rtx = change_address (target, mode, to_rtx);
4734 else if (offset == 0)
4735 to_rtx = target;
4736 else
4737 abort ();
4738 emit_move_insn (to_rtx, datum);
4741 if (ibit == nbits)
4742 break;
4743 word = 0;
4744 bit_pos = 0;
4745 offset += set_word_size / BITS_PER_UNIT;
4749 else if (!cleared)
4750 /* Don't bother clearing storage if the set is all ones. */
4751 if (TREE_CHAIN (elt) != NULL_TREE
4752 || (TREE_PURPOSE (elt) == NULL_TREE
4753 ? nbits != 1
4754 : ( ! host_integerp (TREE_VALUE (elt), 0)
4755 || ! host_integerp (TREE_PURPOSE (elt), 0)
4756 || (tree_low_cst (TREE_VALUE (elt), 0)
4757 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4758 != (HOST_WIDE_INT) nbits))))
4759 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4761 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4763 /* Start of range of element or NULL. */
4764 tree startbit = TREE_PURPOSE (elt);
4765 /* End of range of element, or element value. */
4766 tree endbit = TREE_VALUE (elt);
4767 #ifdef TARGET_MEM_FUNCTIONS
4768 HOST_WIDE_INT startb, endb;
4769 #endif
4770 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4772 bitlength_rtx = expand_expr (bitlength,
4773 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4775 /* Handle non-range tuple element like [ expr ]. */
4776 if (startbit == NULL_TREE)
4778 startbit = save_expr (endbit);
4779 endbit = startbit;
4782 startbit = convert (sizetype, startbit);
4783 endbit = convert (sizetype, endbit);
4784 if (! integer_zerop (domain_min))
4786 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4787 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4789 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4790 EXPAND_CONST_ADDRESS);
4791 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4792 EXPAND_CONST_ADDRESS);
4794 if (REG_P (target))
4796 targetx = assign_stack_temp (GET_MODE (target),
4797 GET_MODE_SIZE (GET_MODE (target)),
4799 emit_move_insn (targetx, target);
4802 else if (GET_CODE (target) == MEM)
4803 targetx = target;
4804 else
4805 abort ();
4807 #ifdef TARGET_MEM_FUNCTIONS
4808 /* Optimization: If startbit and endbit are
4809 constants divisible by BITS_PER_UNIT,
4810 call memset instead. */
4811 if (TREE_CODE (startbit) == INTEGER_CST
4812 && TREE_CODE (endbit) == INTEGER_CST
4813 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4814 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4816 emit_library_call (memset_libfunc, LCT_NORMAL,
4817 VOIDmode, 3,
4818 plus_constant (XEXP (targetx, 0),
4819 startb / BITS_PER_UNIT),
4820 Pmode,
4821 constm1_rtx, TYPE_MODE (integer_type_node),
4822 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4823 TYPE_MODE (sizetype));
4825 else
4826 #endif
4827 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4828 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4829 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4830 startbit_rtx, TYPE_MODE (sizetype),
4831 endbit_rtx, TYPE_MODE (sizetype));
4833 if (REG_P (target))
4834 emit_move_insn (target, targetx);
4838 else
4839 abort ();
4842 /* Store the value of EXP (an expression tree)
4843 into a subfield of TARGET which has mode MODE and occupies
4844 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4845 If MODE is VOIDmode, it means that we are storing into a bit-field.
4847 If VALUE_MODE is VOIDmode, return nothing in particular.
4848 UNSIGNEDP is not used in this case.
4850 Otherwise, return an rtx for the value stored. This rtx
4851 has mode VALUE_MODE if that is convenient to do.
4852 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4854 ALIGN is the alignment that TARGET is known to have.
4855 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4857 ALIAS_SET is the alias set for the destination. This value will
4858 (in general) be different from that for TARGET, since TARGET is a
4859 reference to the containing structure. */
4861 static rtx
4862 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4863 unsignedp, align, total_size, alias_set)
4864 rtx target;
4865 HOST_WIDE_INT bitsize;
4866 HOST_WIDE_INT bitpos;
4867 enum machine_mode mode;
4868 tree exp;
4869 enum machine_mode value_mode;
4870 int unsignedp;
4871 unsigned int align;
4872 HOST_WIDE_INT total_size;
4873 int alias_set;
4875 HOST_WIDE_INT width_mask = 0;
4877 if (TREE_CODE (exp) == ERROR_MARK)
4878 return const0_rtx;
4880 if (bitsize < HOST_BITS_PER_WIDE_INT)
4881 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4883 /* If we are storing into an unaligned field of an aligned union that is
4884 in a register, we may have the mode of TARGET being an integer mode but
4885 MODE == BLKmode. In that case, get an aligned object whose size and
4886 alignment are the same as TARGET and store TARGET into it (we can avoid
4887 the store if the field being stored is the entire width of TARGET). Then
4888 call ourselves recursively to store the field into a BLKmode version of
4889 that object. Finally, load from the object into TARGET. This is not
4890 very efficient in general, but should only be slightly more expensive
4891 than the otherwise-required unaligned accesses. Perhaps this can be
4892 cleaned up later. */
4894 if (mode == BLKmode
4895 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4897 rtx object = assign_stack_temp (GET_MODE (target),
4898 GET_MODE_SIZE (GET_MODE (target)), 0);
4899 rtx blk_object = copy_rtx (object);
4901 MEM_SET_IN_STRUCT_P (object, 1);
4902 MEM_SET_IN_STRUCT_P (blk_object, 1);
4903 PUT_MODE (blk_object, BLKmode);
4905 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4906 emit_move_insn (object, target);
4908 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4909 align, total_size, alias_set);
4911 /* Even though we aren't returning target, we need to
4912 give it the updated value. */
4913 emit_move_insn (target, object);
4915 return blk_object;
4918 if (GET_CODE (target) == CONCAT)
4920 /* We're storing into a struct containing a single __complex. */
4922 if (bitpos != 0)
4923 abort ();
4924 return store_expr (exp, target, 0);
4927 /* If the structure is in a register or if the component
4928 is a bit field, we cannot use addressing to access it.
4929 Use bit-field techniques or SUBREG to store in it. */
4931 if (mode == VOIDmode
4932 || (mode != BLKmode && ! direct_store[(int) mode]
4933 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4934 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4935 || GET_CODE (target) == REG
4936 || GET_CODE (target) == SUBREG
4937 /* If the field isn't aligned enough to store as an ordinary memref,
4938 store it as a bit field. */
4939 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4940 && (align < GET_MODE_ALIGNMENT (mode)
4941 || bitpos % GET_MODE_ALIGNMENT (mode)))
4942 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4943 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4944 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4945 /* If the RHS and field are a constant size and the size of the
4946 RHS isn't the same size as the bitfield, we must use bitfield
4947 operations. */
4948 || (bitsize >= 0
4949 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4950 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4952 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4954 /* If BITSIZE is narrower than the size of the type of EXP
4955 we will be narrowing TEMP. Normally, what's wanted are the
4956 low-order bits. However, if EXP's type is a record and this is
4957 big-endian machine, we want the upper BITSIZE bits. */
4958 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4959 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4960 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4961 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4962 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4963 - bitsize),
4964 temp, 1);
4966 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4967 MODE. */
4968 if (mode != VOIDmode && mode != BLKmode
4969 && mode != TYPE_MODE (TREE_TYPE (exp)))
4970 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4972 /* If the modes of TARGET and TEMP are both BLKmode, both
4973 must be in memory and BITPOS must be aligned on a byte
4974 boundary. If so, we simply do a block copy. */
4975 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4977 unsigned int exp_align = expr_align (exp);
4979 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4980 || bitpos % BITS_PER_UNIT != 0)
4981 abort ();
4983 target = change_address (target, VOIDmode,
4984 plus_constant (XEXP (target, 0),
4985 bitpos / BITS_PER_UNIT));
4987 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4988 align = MIN (exp_align, align);
4990 /* Find an alignment that is consistent with the bit position. */
4991 while ((bitpos % align) != 0)
4992 align >>= 1;
4994 emit_block_move (target, temp,
4995 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4996 / BITS_PER_UNIT),
4997 align);
4999 return value_mode == VOIDmode ? const0_rtx : target;
5002 /* Store the value in the bitfield. */
5003 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5004 if (value_mode != VOIDmode)
5006 /* The caller wants an rtx for the value. */
5007 /* If possible, avoid refetching from the bitfield itself. */
5008 if (width_mask != 0
5009 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5011 tree count;
5012 enum machine_mode tmode;
5014 if (unsignedp)
5015 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5016 tmode = GET_MODE (temp);
5017 if (tmode == VOIDmode)
5018 tmode = value_mode;
5019 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5020 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5021 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5023 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5024 NULL_RTX, value_mode, 0, align,
5025 total_size);
5027 return const0_rtx;
5029 else
5031 rtx addr = XEXP (target, 0);
5032 rtx to_rtx;
5034 /* If a value is wanted, it must be the lhs;
5035 so make the address stable for multiple use. */
5037 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5038 && ! CONSTANT_ADDRESS_P (addr)
5039 /* A frame-pointer reference is already stable. */
5040 && ! (GET_CODE (addr) == PLUS
5041 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5042 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5043 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5044 addr = copy_to_reg (addr);
5046 /* Now build a reference to just the desired component. */
5048 to_rtx = copy_rtx (change_address (target, mode,
5049 plus_constant (addr,
5050 (bitpos
5051 / BITS_PER_UNIT))));
5052 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5053 MEM_ALIAS_SET (to_rtx) = alias_set;
5055 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5059 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5060 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5061 ARRAY_REFs and find the ultimate containing object, which we return.
5063 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5064 bit position, and *PUNSIGNEDP to the signedness of the field.
5065 If the position of the field is variable, we store a tree
5066 giving the variable offset (in units) in *POFFSET.
5067 This offset is in addition to the bit position.
5068 If the position is not variable, we store 0 in *POFFSET.
5069 We set *PALIGNMENT to the alignment of the address that will be
5070 computed. This is the alignment of the thing we return if *POFFSET
5071 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5073 If any of the extraction expressions is volatile,
5074 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5076 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5077 is a mode that can be used to access the field. In that case, *PBITSIZE
5078 is redundant.
5080 If the field describes a variable-sized object, *PMODE is set to
5081 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5082 this case, but the address of the object can be found. */
5084 tree
5085 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5086 punsignedp, pvolatilep, palignment)
5087 tree exp;
5088 HOST_WIDE_INT *pbitsize;
5089 HOST_WIDE_INT *pbitpos;
5090 tree *poffset;
5091 enum machine_mode *pmode;
5092 int *punsignedp;
5093 int *pvolatilep;
5094 unsigned int *palignment;
5096 tree size_tree = 0;
5097 enum machine_mode mode = VOIDmode;
5098 tree offset = size_zero_node;
5099 tree bit_offset = bitsize_zero_node;
5100 unsigned int alignment = BIGGEST_ALIGNMENT;
5101 tree tem;
5103 /* First get the mode, signedness, and size. We do this from just the
5104 outermost expression. */
5105 if (TREE_CODE (exp) == COMPONENT_REF)
5107 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5108 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5109 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5111 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5113 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5115 size_tree = TREE_OPERAND (exp, 1);
5116 *punsignedp = TREE_UNSIGNED (exp);
5118 else
5120 mode = TYPE_MODE (TREE_TYPE (exp));
5121 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5123 if (mode == BLKmode)
5124 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5125 else
5126 *pbitsize = GET_MODE_BITSIZE (mode);
5129 if (size_tree != 0)
5131 if (! host_integerp (size_tree, 1))
5132 mode = BLKmode, *pbitsize = -1;
5133 else
5134 *pbitsize = tree_low_cst (size_tree, 1);
5137 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5138 and find the ultimate containing object. */
5139 while (1)
5141 if (TREE_CODE (exp) == BIT_FIELD_REF)
5142 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5143 else if (TREE_CODE (exp) == COMPONENT_REF)
5145 tree field = TREE_OPERAND (exp, 1);
5146 tree this_offset = DECL_FIELD_OFFSET (field);
5148 /* If this field hasn't been filled in yet, don't go
5149 past it. This should only happen when folding expressions
5150 made during type construction. */
5151 if (this_offset == 0)
5152 break;
5153 else if (! TREE_CONSTANT (this_offset)
5154 && contains_placeholder_p (this_offset))
5155 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5157 offset = size_binop (PLUS_EXPR, offset, this_offset);
5158 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5159 DECL_FIELD_BIT_OFFSET (field));
5161 if (! host_integerp (offset, 0))
5162 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5165 else if (TREE_CODE (exp) == ARRAY_REF)
5167 tree index = TREE_OPERAND (exp, 1);
5168 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5169 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5170 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5172 /* We assume all arrays have sizes that are a multiple of a byte.
5173 First subtract the lower bound, if any, in the type of the
5174 index, then convert to sizetype and multiply by the size of the
5175 array element. */
5176 if (low_bound != 0 && ! integer_zerop (low_bound))
5177 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5178 index, low_bound));
5180 /* If the index has a self-referential type, pass it to a
5181 WITH_RECORD_EXPR; if the component size is, pass our
5182 component to one. */
5183 if (! TREE_CONSTANT (index)
5184 && contains_placeholder_p (index))
5185 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5186 if (! TREE_CONSTANT (unit_size)
5187 && contains_placeholder_p (unit_size))
5188 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5189 TREE_OPERAND (exp, 0));
5191 offset = size_binop (PLUS_EXPR, offset,
5192 size_binop (MULT_EXPR,
5193 convert (sizetype, index),
5194 unit_size));
5197 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5198 && ! ((TREE_CODE (exp) == NOP_EXPR
5199 || TREE_CODE (exp) == CONVERT_EXPR)
5200 && (TYPE_MODE (TREE_TYPE (exp))
5201 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5202 break;
5204 /* If any reference in the chain is volatile, the effect is volatile. */
5205 if (TREE_THIS_VOLATILE (exp))
5206 *pvolatilep = 1;
5208 /* If the offset is non-constant already, then we can't assume any
5209 alignment more than the alignment here. */
5210 if (! TREE_CONSTANT (offset))
5211 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5213 exp = TREE_OPERAND (exp, 0);
5216 if (DECL_P (exp))
5217 alignment = MIN (alignment, DECL_ALIGN (exp));
5218 else if (TREE_TYPE (exp) != 0)
5219 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5221 /* If OFFSET is constant, see if we can return the whole thing as a
5222 constant bit position. Otherwise, split it up. */
5223 if (host_integerp (offset, 0)
5224 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5225 bitsize_unit_node))
5226 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5227 && host_integerp (tem, 0))
5228 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5229 else
5230 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5232 *pmode = mode;
5233 *palignment = alignment;
5234 return exp;
5237 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5239 static enum memory_use_mode
5240 get_memory_usage_from_modifier (modifier)
5241 enum expand_modifier modifier;
5243 switch (modifier)
5245 case EXPAND_NORMAL:
5246 case EXPAND_SUM:
5247 return MEMORY_USE_RO;
5248 break;
5249 case EXPAND_MEMORY_USE_WO:
5250 return MEMORY_USE_WO;
5251 break;
5252 case EXPAND_MEMORY_USE_RW:
5253 return MEMORY_USE_RW;
5254 break;
5255 case EXPAND_MEMORY_USE_DONT:
5256 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5257 MEMORY_USE_DONT, because they are modifiers to a call of
5258 expand_expr in the ADDR_EXPR case of expand_expr. */
5259 case EXPAND_CONST_ADDRESS:
5260 case EXPAND_INITIALIZER:
5261 return MEMORY_USE_DONT;
5262 case EXPAND_MEMORY_USE_BAD:
5263 default:
5264 abort ();
5268 /* Given an rtx VALUE that may contain additions and multiplications,
5269 return an equivalent value that just refers to a register or memory.
5270 This is done by generating instructions to perform the arithmetic
5271 and returning a pseudo-register containing the value.
5273 The returned value may be a REG, SUBREG, MEM or constant. */
5276 force_operand (value, target)
5277 rtx value, target;
5279 register optab binoptab = 0;
5280 /* Use a temporary to force order of execution of calls to
5281 `force_operand'. */
5282 rtx tmp;
5283 register rtx op2;
5284 /* Use subtarget as the target for operand 0 of a binary operation. */
5285 register rtx subtarget = get_subtarget (target);
5287 /* Check for a PIC address load. */
5288 if (flag_pic
5289 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5290 && XEXP (value, 0) == pic_offset_table_rtx
5291 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5292 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5293 || GET_CODE (XEXP (value, 1)) == CONST))
5295 if (!subtarget)
5296 subtarget = gen_reg_rtx (GET_MODE (value));
5297 emit_move_insn (subtarget, value);
5298 return subtarget;
5301 if (GET_CODE (value) == PLUS)
5302 binoptab = add_optab;
5303 else if (GET_CODE (value) == MINUS)
5304 binoptab = sub_optab;
5305 else if (GET_CODE (value) == MULT)
5307 op2 = XEXP (value, 1);
5308 if (!CONSTANT_P (op2)
5309 && !(GET_CODE (op2) == REG && op2 != subtarget))
5310 subtarget = 0;
5311 tmp = force_operand (XEXP (value, 0), subtarget);
5312 return expand_mult (GET_MODE (value), tmp,
5313 force_operand (op2, NULL_RTX),
5314 target, 0);
5317 if (binoptab)
5319 op2 = XEXP (value, 1);
5320 if (!CONSTANT_P (op2)
5321 && !(GET_CODE (op2) == REG && op2 != subtarget))
5322 subtarget = 0;
5323 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5325 binoptab = add_optab;
5326 op2 = negate_rtx (GET_MODE (value), op2);
5329 /* Check for an addition with OP2 a constant integer and our first
5330 operand a PLUS of a virtual register and something else. In that
5331 case, we want to emit the sum of the virtual register and the
5332 constant first and then add the other value. This allows virtual
5333 register instantiation to simply modify the constant rather than
5334 creating another one around this addition. */
5335 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5336 && GET_CODE (XEXP (value, 0)) == PLUS
5337 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5338 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5339 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5341 rtx temp = expand_binop (GET_MODE (value), binoptab,
5342 XEXP (XEXP (value, 0), 0), op2,
5343 subtarget, 0, OPTAB_LIB_WIDEN);
5344 return expand_binop (GET_MODE (value), binoptab, temp,
5345 force_operand (XEXP (XEXP (value, 0), 1), 0),
5346 target, 0, OPTAB_LIB_WIDEN);
5349 tmp = force_operand (XEXP (value, 0), subtarget);
5350 return expand_binop (GET_MODE (value), binoptab, tmp,
5351 force_operand (op2, NULL_RTX),
5352 target, 0, OPTAB_LIB_WIDEN);
5353 /* We give UNSIGNEDP = 0 to expand_binop
5354 because the only operations we are expanding here are signed ones. */
5356 return value;
5359 /* Subroutine of expand_expr:
5360 save the non-copied parts (LIST) of an expr (LHS), and return a list
5361 which can restore these values to their previous values,
5362 should something modify their storage. */
5364 static tree
5365 save_noncopied_parts (lhs, list)
5366 tree lhs;
5367 tree list;
5369 tree tail;
5370 tree parts = 0;
5372 for (tail = list; tail; tail = TREE_CHAIN (tail))
5373 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5374 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5375 else
5377 tree part = TREE_VALUE (tail);
5378 tree part_type = TREE_TYPE (part);
5379 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5380 rtx target = assign_temp (part_type, 0, 1, 1);
5381 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5382 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5383 parts = tree_cons (to_be_saved,
5384 build (RTL_EXPR, part_type, NULL_TREE,
5385 (tree) target),
5386 parts);
5387 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5389 return parts;
5392 /* Subroutine of expand_expr:
5393 record the non-copied parts (LIST) of an expr (LHS), and return a list
5394 which specifies the initial values of these parts. */
5396 static tree
5397 init_noncopied_parts (lhs, list)
5398 tree lhs;
5399 tree list;
5401 tree tail;
5402 tree parts = 0;
5404 for (tail = list; tail; tail = TREE_CHAIN (tail))
5405 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5406 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5407 else if (TREE_PURPOSE (tail))
5409 tree part = TREE_VALUE (tail);
5410 tree part_type = TREE_TYPE (part);
5411 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5412 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5414 return parts;
5417 /* Subroutine of expand_expr: return nonzero iff there is no way that
5418 EXP can reference X, which is being modified. TOP_P is nonzero if this
5419 call is going to be used to determine whether we need a temporary
5420 for EXP, as opposed to a recursive call to this function.
5422 It is always safe for this routine to return zero since it merely
5423 searches for optimization opportunities. */
5426 safe_from_p (x, exp, top_p)
5427 rtx x;
5428 tree exp;
5429 int top_p;
5431 rtx exp_rtl = 0;
5432 int i, nops;
5433 static int save_expr_count;
5434 static int save_expr_size = 0;
5435 static tree *save_expr_rewritten;
5436 static tree save_expr_trees[256];
5438 if (x == 0
5439 /* If EXP has varying size, we MUST use a target since we currently
5440 have no way of allocating temporaries of variable size
5441 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5442 So we assume here that something at a higher level has prevented a
5443 clash. This is somewhat bogus, but the best we can do. Only
5444 do this when X is BLKmode and when we are at the top level. */
5445 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5446 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5447 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5448 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5449 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5450 != INTEGER_CST)
5451 && GET_MODE (x) == BLKmode))
5452 return 1;
5454 if (top_p && save_expr_size == 0)
5456 int rtn;
5458 save_expr_count = 0;
5459 save_expr_size = ARRAY_SIZE (save_expr_trees);
5460 save_expr_rewritten = &save_expr_trees[0];
5462 rtn = safe_from_p (x, exp, 1);
5464 for (i = 0; i < save_expr_count; ++i)
5466 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5467 abort ();
5468 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5471 save_expr_size = 0;
5473 return rtn;
5476 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5477 find the underlying pseudo. */
5478 if (GET_CODE (x) == SUBREG)
5480 x = SUBREG_REG (x);
5481 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5482 return 0;
5485 /* If X is a location in the outgoing argument area, it is always safe. */
5486 if (GET_CODE (x) == MEM
5487 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5488 || (GET_CODE (XEXP (x, 0)) == PLUS
5489 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5490 return 1;
5492 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5494 case 'd':
5495 exp_rtl = DECL_RTL (exp);
5496 break;
5498 case 'c':
5499 return 1;
5501 case 'x':
5502 if (TREE_CODE (exp) == TREE_LIST)
5503 return ((TREE_VALUE (exp) == 0
5504 || safe_from_p (x, TREE_VALUE (exp), 0))
5505 && (TREE_CHAIN (exp) == 0
5506 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5507 else if (TREE_CODE (exp) == ERROR_MARK)
5508 return 1; /* An already-visited SAVE_EXPR? */
5509 else
5510 return 0;
5512 case '1':
5513 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5515 case '2':
5516 case '<':
5517 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5518 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5520 case 'e':
5521 case 'r':
5522 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5523 the expression. If it is set, we conflict iff we are that rtx or
5524 both are in memory. Otherwise, we check all operands of the
5525 expression recursively. */
5527 switch (TREE_CODE (exp))
5529 case ADDR_EXPR:
5530 return (staticp (TREE_OPERAND (exp, 0))
5531 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5532 || TREE_STATIC (exp));
5534 case INDIRECT_REF:
5535 if (GET_CODE (x) == MEM)
5536 return 0;
5537 break;
5539 case CALL_EXPR:
5540 exp_rtl = CALL_EXPR_RTL (exp);
5541 if (exp_rtl == 0)
5543 /* Assume that the call will clobber all hard registers and
5544 all of memory. */
5545 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5546 || GET_CODE (x) == MEM)
5547 return 0;
5550 break;
5552 case RTL_EXPR:
5553 /* If a sequence exists, we would have to scan every instruction
5554 in the sequence to see if it was safe. This is probably not
5555 worthwhile. */
5556 if (RTL_EXPR_SEQUENCE (exp))
5557 return 0;
5559 exp_rtl = RTL_EXPR_RTL (exp);
5560 break;
5562 case WITH_CLEANUP_EXPR:
5563 exp_rtl = RTL_EXPR_RTL (exp);
5564 break;
5566 case CLEANUP_POINT_EXPR:
5567 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5569 case SAVE_EXPR:
5570 exp_rtl = SAVE_EXPR_RTL (exp);
5571 if (exp_rtl)
5572 break;
5574 /* This SAVE_EXPR might appear many times in the top-level
5575 safe_from_p() expression, and if it has a complex
5576 subexpression, examining it multiple times could result
5577 in a combinatorial explosion. E.g. on an Alpha
5578 running at least 200MHz, a Fortran test case compiled with
5579 optimization took about 28 minutes to compile -- even though
5580 it was only a few lines long, and the complicated line causing
5581 so much time to be spent in the earlier version of safe_from_p()
5582 had only 293 or so unique nodes.
5584 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5585 where it is so we can turn it back in the top-level safe_from_p()
5586 when we're done. */
5588 /* For now, don't bother re-sizing the array. */
5589 if (save_expr_count >= save_expr_size)
5590 return 0;
5591 save_expr_rewritten[save_expr_count++] = exp;
5593 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5594 for (i = 0; i < nops; i++)
5596 tree operand = TREE_OPERAND (exp, i);
5597 if (operand == NULL_TREE)
5598 continue;
5599 TREE_SET_CODE (exp, ERROR_MARK);
5600 if (!safe_from_p (x, operand, 0))
5601 return 0;
5602 TREE_SET_CODE (exp, SAVE_EXPR);
5604 TREE_SET_CODE (exp, ERROR_MARK);
5605 return 1;
5607 case BIND_EXPR:
5608 /* The only operand we look at is operand 1. The rest aren't
5609 part of the expression. */
5610 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5612 case METHOD_CALL_EXPR:
5613 /* This takes a rtx argument, but shouldn't appear here. */
5614 abort ();
5616 default:
5617 break;
5620 /* If we have an rtx, we do not need to scan our operands. */
5621 if (exp_rtl)
5622 break;
5624 nops = first_rtl_op (TREE_CODE (exp));
5625 for (i = 0; i < nops; i++)
5626 if (TREE_OPERAND (exp, i) != 0
5627 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5628 return 0;
5630 /* If this is a language-specific tree code, it may require
5631 special handling. */
5632 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5633 && lang_safe_from_p
5634 && !(*lang_safe_from_p) (x, exp))
5635 return 0;
5638 /* If we have an rtl, find any enclosed object. Then see if we conflict
5639 with it. */
5640 if (exp_rtl)
5642 if (GET_CODE (exp_rtl) == SUBREG)
5644 exp_rtl = SUBREG_REG (exp_rtl);
5645 if (GET_CODE (exp_rtl) == REG
5646 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5647 return 0;
5650 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5651 are memory and EXP is not readonly. */
5652 return ! (rtx_equal_p (x, exp_rtl)
5653 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5654 && ! TREE_READONLY (exp)));
5657 /* If we reach here, it is safe. */
5658 return 1;
5661 /* Subroutine of expand_expr: return nonzero iff EXP is an
5662 expression whose type is statically determinable. */
5664 static int
5665 fixed_type_p (exp)
5666 tree exp;
5668 if (TREE_CODE (exp) == PARM_DECL
5669 || TREE_CODE (exp) == VAR_DECL
5670 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5671 || TREE_CODE (exp) == COMPONENT_REF
5672 || TREE_CODE (exp) == ARRAY_REF)
5673 return 1;
5674 return 0;
5677 /* Subroutine of expand_expr: return rtx if EXP is a
5678 variable or parameter; else return 0. */
5680 static rtx
5681 var_rtx (exp)
5682 tree exp;
5684 STRIP_NOPS (exp);
5685 switch (TREE_CODE (exp))
5687 case PARM_DECL:
5688 case VAR_DECL:
5689 return DECL_RTL (exp);
5690 default:
5691 return 0;
5695 #ifdef MAX_INTEGER_COMPUTATION_MODE
5696 void
5697 check_max_integer_computation_mode (exp)
5698 tree exp;
5700 enum tree_code code;
5701 enum machine_mode mode;
5703 /* Strip any NOPs that don't change the mode. */
5704 STRIP_NOPS (exp);
5705 code = TREE_CODE (exp);
5707 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5708 if (code == NOP_EXPR
5709 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5710 return;
5712 /* First check the type of the overall operation. We need only look at
5713 unary, binary and relational operations. */
5714 if (TREE_CODE_CLASS (code) == '1'
5715 || TREE_CODE_CLASS (code) == '2'
5716 || TREE_CODE_CLASS (code) == '<')
5718 mode = TYPE_MODE (TREE_TYPE (exp));
5719 if (GET_MODE_CLASS (mode) == MODE_INT
5720 && mode > MAX_INTEGER_COMPUTATION_MODE)
5721 fatal ("unsupported wide integer operation");
5724 /* Check operand of a unary op. */
5725 if (TREE_CODE_CLASS (code) == '1')
5727 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5728 if (GET_MODE_CLASS (mode) == MODE_INT
5729 && mode > MAX_INTEGER_COMPUTATION_MODE)
5730 fatal ("unsupported wide integer operation");
5733 /* Check operands of a binary/comparison op. */
5734 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5736 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5737 if (GET_MODE_CLASS (mode) == MODE_INT
5738 && mode > MAX_INTEGER_COMPUTATION_MODE)
5739 fatal ("unsupported wide integer operation");
5741 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5742 if (GET_MODE_CLASS (mode) == MODE_INT
5743 && mode > MAX_INTEGER_COMPUTATION_MODE)
5744 fatal ("unsupported wide integer operation");
5747 #endif
5749 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5750 has any readonly fields. If any of the fields have types that
5751 contain readonly fields, return true as well. */
5753 static int
5754 readonly_fields_p (type)
5755 tree type;
5757 tree field;
5759 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5760 if (TREE_CODE (field) == FIELD_DECL
5761 && (TREE_READONLY (field)
5762 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5763 && readonly_fields_p (TREE_TYPE (field)))))
5764 return 1;
5766 return 0;
5769 /* expand_expr: generate code for computing expression EXP.
5770 An rtx for the computed value is returned. The value is never null.
5771 In the case of a void EXP, const0_rtx is returned.
5773 The value may be stored in TARGET if TARGET is nonzero.
5774 TARGET is just a suggestion; callers must assume that
5775 the rtx returned may not be the same as TARGET.
5777 If TARGET is CONST0_RTX, it means that the value will be ignored.
5779 If TMODE is not VOIDmode, it suggests generating the
5780 result in mode TMODE. But this is done only when convenient.
5781 Otherwise, TMODE is ignored and the value generated in its natural mode.
5782 TMODE is just a suggestion; callers must assume that
5783 the rtx returned may not have mode TMODE.
5785 Note that TARGET may have neither TMODE nor MODE. In that case, it
5786 probably will not be used.
5788 If MODIFIER is EXPAND_SUM then when EXP is an addition
5789 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5790 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5791 products as above, or REG or MEM, or constant.
5792 Ordinarily in such cases we would output mul or add instructions
5793 and then return a pseudo reg containing the sum.
5795 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5796 it also marks a label as absolutely required (it can't be dead).
5797 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5798 This is used for outputting expressions used in initializers.
5800 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5801 with a constant address even if that address is not normally legitimate.
5802 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5805 expand_expr (exp, target, tmode, modifier)
5806 register tree exp;
5807 rtx target;
5808 enum machine_mode tmode;
5809 enum expand_modifier modifier;
5811 register rtx op0, op1, temp;
5812 tree type = TREE_TYPE (exp);
5813 int unsignedp = TREE_UNSIGNED (type);
5814 register enum machine_mode mode;
5815 register enum tree_code code = TREE_CODE (exp);
5816 optab this_optab;
5817 rtx subtarget, original_target;
5818 int ignore;
5819 tree context;
5820 /* Used by check-memory-usage to make modifier read only. */
5821 enum expand_modifier ro_modifier;
5823 /* Handle ERROR_MARK before anybody tries to access its type. */
5824 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5826 op0 = CONST0_RTX (tmode);
5827 if (op0 != 0)
5828 return op0;
5829 return const0_rtx;
5832 mode = TYPE_MODE (type);
5833 /* Use subtarget as the target for operand 0 of a binary operation. */
5834 subtarget = get_subtarget (target);
5835 original_target = target;
5836 ignore = (target == const0_rtx
5837 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5838 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5839 || code == COND_EXPR)
5840 && TREE_CODE (type) == VOID_TYPE));
5842 /* Make a read-only version of the modifier. */
5843 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5844 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5845 ro_modifier = modifier;
5846 else
5847 ro_modifier = EXPAND_NORMAL;
5849 /* If we are going to ignore this result, we need only do something
5850 if there is a side-effect somewhere in the expression. If there
5851 is, short-circuit the most common cases here. Note that we must
5852 not call expand_expr with anything but const0_rtx in case this
5853 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5855 if (ignore)
5857 if (! TREE_SIDE_EFFECTS (exp))
5858 return const0_rtx;
5860 /* Ensure we reference a volatile object even if value is ignored, but
5861 don't do this if all we are doing is taking its address. */
5862 if (TREE_THIS_VOLATILE (exp)
5863 && TREE_CODE (exp) != FUNCTION_DECL
5864 && mode != VOIDmode && mode != BLKmode
5865 && modifier != EXPAND_CONST_ADDRESS)
5867 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5868 if (GET_CODE (temp) == MEM)
5869 temp = copy_to_reg (temp);
5870 return const0_rtx;
5873 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5874 || code == INDIRECT_REF || code == BUFFER_REF)
5875 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5876 VOIDmode, ro_modifier);
5877 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5878 || code == ARRAY_REF)
5880 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5881 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5882 return const0_rtx;
5884 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5885 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5886 /* If the second operand has no side effects, just evaluate
5887 the first. */
5888 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5889 VOIDmode, ro_modifier);
5890 else if (code == BIT_FIELD_REF)
5892 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5893 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5894 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5895 return const0_rtx;
5898 target = 0;
5901 #ifdef MAX_INTEGER_COMPUTATION_MODE
5902 /* Only check stuff here if the mode we want is different from the mode
5903 of the expression; if it's the same, check_max_integer_computiation_mode
5904 will handle it. Do we really need to check this stuff at all? */
5906 if (target
5907 && GET_MODE (target) != mode
5908 && TREE_CODE (exp) != INTEGER_CST
5909 && TREE_CODE (exp) != PARM_DECL
5910 && TREE_CODE (exp) != ARRAY_REF
5911 && TREE_CODE (exp) != COMPONENT_REF
5912 && TREE_CODE (exp) != BIT_FIELD_REF
5913 && TREE_CODE (exp) != INDIRECT_REF
5914 && TREE_CODE (exp) != CALL_EXPR
5915 && TREE_CODE (exp) != VAR_DECL
5916 && TREE_CODE (exp) != RTL_EXPR)
5918 enum machine_mode mode = GET_MODE (target);
5920 if (GET_MODE_CLASS (mode) == MODE_INT
5921 && mode > MAX_INTEGER_COMPUTATION_MODE)
5922 fatal ("unsupported wide integer operation");
5925 if (tmode != mode
5926 && TREE_CODE (exp) != INTEGER_CST
5927 && TREE_CODE (exp) != PARM_DECL
5928 && TREE_CODE (exp) != ARRAY_REF
5929 && TREE_CODE (exp) != COMPONENT_REF
5930 && TREE_CODE (exp) != BIT_FIELD_REF
5931 && TREE_CODE (exp) != INDIRECT_REF
5932 && TREE_CODE (exp) != VAR_DECL
5933 && TREE_CODE (exp) != CALL_EXPR
5934 && TREE_CODE (exp) != RTL_EXPR
5935 && GET_MODE_CLASS (tmode) == MODE_INT
5936 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5937 fatal ("unsupported wide integer operation");
5939 check_max_integer_computation_mode (exp);
5940 #endif
5942 /* If will do cse, generate all results into pseudo registers
5943 since 1) that allows cse to find more things
5944 and 2) otherwise cse could produce an insn the machine
5945 cannot support. */
5947 if (! cse_not_expected && mode != BLKmode && target
5948 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5949 target = subtarget;
5951 switch (code)
5953 case LABEL_DECL:
5955 tree function = decl_function_context (exp);
5956 /* Handle using a label in a containing function. */
5957 if (function != current_function_decl
5958 && function != inline_function_decl && function != 0)
5960 struct function *p = find_function_data (function);
5961 /* Allocate in the memory associated with the function
5962 that the label is in. */
5963 push_obstacks (p->function_obstack,
5964 p->function_maybepermanent_obstack);
5966 p->expr->x_forced_labels
5967 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5968 p->expr->x_forced_labels);
5969 pop_obstacks ();
5971 else
5973 if (modifier == EXPAND_INITIALIZER)
5974 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5975 label_rtx (exp),
5976 forced_labels);
5979 temp = gen_rtx_MEM (FUNCTION_MODE,
5980 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5981 if (function != current_function_decl
5982 && function != inline_function_decl && function != 0)
5983 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5984 return temp;
5987 case PARM_DECL:
5988 if (DECL_RTL (exp) == 0)
5990 error_with_decl (exp, "prior parameter's size depends on `%s'");
5991 return CONST0_RTX (mode);
5994 /* ... fall through ... */
5996 case VAR_DECL:
5997 /* If a static var's type was incomplete when the decl was written,
5998 but the type is complete now, lay out the decl now. */
5999 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6000 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6002 push_obstacks_nochange ();
6003 end_temporary_allocation ();
6004 layout_decl (exp, 0);
6005 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6006 pop_obstacks ();
6009 /* Although static-storage variables start off initialized, according to
6010 ANSI C, a memcpy could overwrite them with uninitialized values. So
6011 we check them too. This also lets us check for read-only variables
6012 accessed via a non-const declaration, in case it won't be detected
6013 any other way (e.g., in an embedded system or OS kernel without
6014 memory protection).
6016 Aggregates are not checked here; they're handled elsewhere. */
6017 if (cfun && current_function_check_memory_usage
6018 && code == VAR_DECL
6019 && GET_CODE (DECL_RTL (exp)) == MEM
6020 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6022 enum memory_use_mode memory_usage;
6023 memory_usage = get_memory_usage_from_modifier (modifier);
6025 in_check_memory_usage = 1;
6026 if (memory_usage != MEMORY_USE_DONT)
6027 emit_library_call (chkr_check_addr_libfunc,
6028 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6029 XEXP (DECL_RTL (exp), 0), Pmode,
6030 GEN_INT (int_size_in_bytes (type)),
6031 TYPE_MODE (sizetype),
6032 GEN_INT (memory_usage),
6033 TYPE_MODE (integer_type_node));
6034 in_check_memory_usage = 0;
6037 /* ... fall through ... */
6039 case FUNCTION_DECL:
6040 case RESULT_DECL:
6041 if (DECL_RTL (exp) == 0)
6042 abort ();
6044 /* Ensure variable marked as used even if it doesn't go through
6045 a parser. If it hasn't be used yet, write out an external
6046 definition. */
6047 if (! TREE_USED (exp))
6049 assemble_external (exp);
6050 TREE_USED (exp) = 1;
6053 /* Show we haven't gotten RTL for this yet. */
6054 temp = 0;
6056 /* Handle variables inherited from containing functions. */
6057 context = decl_function_context (exp);
6059 /* We treat inline_function_decl as an alias for the current function
6060 because that is the inline function whose vars, types, etc.
6061 are being merged into the current function.
6062 See expand_inline_function. */
6064 if (context != 0 && context != current_function_decl
6065 && context != inline_function_decl
6066 /* If var is static, we don't need a static chain to access it. */
6067 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6068 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6070 rtx addr;
6072 /* Mark as non-local and addressable. */
6073 DECL_NONLOCAL (exp) = 1;
6074 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6075 abort ();
6076 mark_addressable (exp);
6077 if (GET_CODE (DECL_RTL (exp)) != MEM)
6078 abort ();
6079 addr = XEXP (DECL_RTL (exp), 0);
6080 if (GET_CODE (addr) == MEM)
6081 addr = change_address (addr, Pmode,
6082 fix_lexical_addr (XEXP (addr, 0), exp));
6083 else
6084 addr = fix_lexical_addr (addr, exp);
6086 temp = change_address (DECL_RTL (exp), mode, addr);
6089 /* This is the case of an array whose size is to be determined
6090 from its initializer, while the initializer is still being parsed.
6091 See expand_decl. */
6093 else if (GET_CODE (DECL_RTL (exp)) == MEM
6094 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6095 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6096 XEXP (DECL_RTL (exp), 0));
6098 /* If DECL_RTL is memory, we are in the normal case and either
6099 the address is not valid or it is not a register and -fforce-addr
6100 is specified, get the address into a register. */
6102 else if (GET_CODE (DECL_RTL (exp)) == MEM
6103 && modifier != EXPAND_CONST_ADDRESS
6104 && modifier != EXPAND_SUM
6105 && modifier != EXPAND_INITIALIZER
6106 && (! memory_address_p (DECL_MODE (exp),
6107 XEXP (DECL_RTL (exp), 0))
6108 || (flag_force_addr
6109 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6110 temp = change_address (DECL_RTL (exp), VOIDmode,
6111 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6113 /* If we got something, return it. But first, set the alignment
6114 the address is a register. */
6115 if (temp != 0)
6117 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6118 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6120 return temp;
6123 /* If the mode of DECL_RTL does not match that of the decl, it
6124 must be a promoted value. We return a SUBREG of the wanted mode,
6125 but mark it so that we know that it was already extended. */
6127 if (GET_CODE (DECL_RTL (exp)) == REG
6128 && GET_MODE (DECL_RTL (exp)) != mode)
6130 /* Get the signedness used for this variable. Ensure we get the
6131 same mode we got when the variable was declared. */
6132 if (GET_MODE (DECL_RTL (exp))
6133 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6134 abort ();
6136 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6137 SUBREG_PROMOTED_VAR_P (temp) = 1;
6138 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6139 return temp;
6142 return DECL_RTL (exp);
6144 case INTEGER_CST:
6145 return immed_double_const (TREE_INT_CST_LOW (exp),
6146 TREE_INT_CST_HIGH (exp), mode);
6148 case CONST_DECL:
6149 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6150 EXPAND_MEMORY_USE_BAD);
6152 case REAL_CST:
6153 /* If optimized, generate immediate CONST_DOUBLE
6154 which will be turned into memory by reload if necessary.
6156 We used to force a register so that loop.c could see it. But
6157 this does not allow gen_* patterns to perform optimizations with
6158 the constants. It also produces two insns in cases like "x = 1.0;".
6159 On most machines, floating-point constants are not permitted in
6160 many insns, so we'd end up copying it to a register in any case.
6162 Now, we do the copying in expand_binop, if appropriate. */
6163 return immed_real_const (exp);
6165 case COMPLEX_CST:
6166 case STRING_CST:
6167 if (! TREE_CST_RTL (exp))
6168 output_constant_def (exp);
6170 /* TREE_CST_RTL probably contains a constant address.
6171 On RISC machines where a constant address isn't valid,
6172 make some insns to get that address into a register. */
6173 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6174 && modifier != EXPAND_CONST_ADDRESS
6175 && modifier != EXPAND_INITIALIZER
6176 && modifier != EXPAND_SUM
6177 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6178 || (flag_force_addr
6179 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6180 return change_address (TREE_CST_RTL (exp), VOIDmode,
6181 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6182 return TREE_CST_RTL (exp);
6184 case EXPR_WITH_FILE_LOCATION:
6186 rtx to_return;
6187 const char *saved_input_filename = input_filename;
6188 int saved_lineno = lineno;
6189 input_filename = EXPR_WFL_FILENAME (exp);
6190 lineno = EXPR_WFL_LINENO (exp);
6191 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6192 emit_line_note (input_filename, lineno);
6193 /* Possibly avoid switching back and force here. */
6194 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6195 input_filename = saved_input_filename;
6196 lineno = saved_lineno;
6197 return to_return;
6200 case SAVE_EXPR:
6201 context = decl_function_context (exp);
6203 /* If this SAVE_EXPR was at global context, assume we are an
6204 initialization function and move it into our context. */
6205 if (context == 0)
6206 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6208 /* We treat inline_function_decl as an alias for the current function
6209 because that is the inline function whose vars, types, etc.
6210 are being merged into the current function.
6211 See expand_inline_function. */
6212 if (context == current_function_decl || context == inline_function_decl)
6213 context = 0;
6215 /* If this is non-local, handle it. */
6216 if (context)
6218 /* The following call just exists to abort if the context is
6219 not of a containing function. */
6220 find_function_data (context);
6222 temp = SAVE_EXPR_RTL (exp);
6223 if (temp && GET_CODE (temp) == REG)
6225 put_var_into_stack (exp);
6226 temp = SAVE_EXPR_RTL (exp);
6228 if (temp == 0 || GET_CODE (temp) != MEM)
6229 abort ();
6230 return change_address (temp, mode,
6231 fix_lexical_addr (XEXP (temp, 0), exp));
6233 if (SAVE_EXPR_RTL (exp) == 0)
6235 if (mode == VOIDmode)
6236 temp = const0_rtx;
6237 else
6238 temp = assign_temp (type, 3, 0, 0);
6240 SAVE_EXPR_RTL (exp) = temp;
6241 if (!optimize && GET_CODE (temp) == REG)
6242 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6243 save_expr_regs);
6245 /* If the mode of TEMP does not match that of the expression, it
6246 must be a promoted value. We pass store_expr a SUBREG of the
6247 wanted mode but mark it so that we know that it was already
6248 extended. Note that `unsignedp' was modified above in
6249 this case. */
6251 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6253 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6254 SUBREG_PROMOTED_VAR_P (temp) = 1;
6255 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6258 if (temp == const0_rtx)
6259 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6260 EXPAND_MEMORY_USE_BAD);
6261 else
6262 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6264 TREE_USED (exp) = 1;
6267 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6268 must be a promoted value. We return a SUBREG of the wanted mode,
6269 but mark it so that we know that it was already extended. */
6271 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6272 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6274 /* Compute the signedness and make the proper SUBREG. */
6275 promote_mode (type, mode, &unsignedp, 0);
6276 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6277 SUBREG_PROMOTED_VAR_P (temp) = 1;
6278 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6279 return temp;
6282 return SAVE_EXPR_RTL (exp);
6284 case UNSAVE_EXPR:
6286 rtx temp;
6287 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6288 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6289 return temp;
6292 case PLACEHOLDER_EXPR:
6294 tree placeholder_expr;
6296 /* If there is an object on the head of the placeholder list,
6297 see if some object in it of type TYPE or a pointer to it. For
6298 further information, see tree.def. */
6299 for (placeholder_expr = placeholder_list;
6300 placeholder_expr != 0;
6301 placeholder_expr = TREE_CHAIN (placeholder_expr))
6303 tree need_type = TYPE_MAIN_VARIANT (type);
6304 tree object = 0;
6305 tree old_list = placeholder_list;
6306 tree elt;
6308 /* Find the outermost reference that is of the type we want.
6309 If none, see if any object has a type that is a pointer to
6310 the type we want. */
6311 for (elt = TREE_PURPOSE (placeholder_expr);
6312 elt != 0 && object == 0;
6314 = ((TREE_CODE (elt) == COMPOUND_EXPR
6315 || TREE_CODE (elt) == COND_EXPR)
6316 ? TREE_OPERAND (elt, 1)
6317 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6318 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6319 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6320 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6321 ? TREE_OPERAND (elt, 0) : 0))
6322 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6323 object = elt;
6325 for (elt = TREE_PURPOSE (placeholder_expr);
6326 elt != 0 && object == 0;
6328 = ((TREE_CODE (elt) == COMPOUND_EXPR
6329 || TREE_CODE (elt) == COND_EXPR)
6330 ? TREE_OPERAND (elt, 1)
6331 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6332 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6333 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6334 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6335 ? TREE_OPERAND (elt, 0) : 0))
6336 if (POINTER_TYPE_P (TREE_TYPE (elt))
6337 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6338 == need_type))
6339 object = build1 (INDIRECT_REF, need_type, elt);
6341 if (object != 0)
6343 /* Expand this object skipping the list entries before
6344 it was found in case it is also a PLACEHOLDER_EXPR.
6345 In that case, we want to translate it using subsequent
6346 entries. */
6347 placeholder_list = TREE_CHAIN (placeholder_expr);
6348 temp = expand_expr (object, original_target, tmode,
6349 ro_modifier);
6350 placeholder_list = old_list;
6351 return temp;
6356 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6357 abort ();
6359 case WITH_RECORD_EXPR:
6360 /* Put the object on the placeholder list, expand our first operand,
6361 and pop the list. */
6362 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6363 placeholder_list);
6364 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6365 tmode, ro_modifier);
6366 placeholder_list = TREE_CHAIN (placeholder_list);
6367 return target;
6369 case GOTO_EXPR:
6370 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6371 expand_goto (TREE_OPERAND (exp, 0));
6372 else
6373 expand_computed_goto (TREE_OPERAND (exp, 0));
6374 return const0_rtx;
6376 case EXIT_EXPR:
6377 expand_exit_loop_if_false (NULL_PTR,
6378 invert_truthvalue (TREE_OPERAND (exp, 0)));
6379 return const0_rtx;
6381 case LABELED_BLOCK_EXPR:
6382 if (LABELED_BLOCK_BODY (exp))
6383 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6384 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6385 return const0_rtx;
6387 case EXIT_BLOCK_EXPR:
6388 if (EXIT_BLOCK_RETURN (exp))
6389 sorry ("returned value in block_exit_expr");
6390 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6391 return const0_rtx;
6393 case LOOP_EXPR:
6394 push_temp_slots ();
6395 expand_start_loop (1);
6396 expand_expr_stmt (TREE_OPERAND (exp, 0));
6397 expand_end_loop ();
6398 pop_temp_slots ();
6400 return const0_rtx;
6402 case BIND_EXPR:
6404 tree vars = TREE_OPERAND (exp, 0);
6405 int vars_need_expansion = 0;
6407 /* Need to open a binding contour here because
6408 if there are any cleanups they must be contained here. */
6409 expand_start_bindings (2);
6411 /* Mark the corresponding BLOCK for output in its proper place. */
6412 if (TREE_OPERAND (exp, 2) != 0
6413 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6414 insert_block (TREE_OPERAND (exp, 2));
6416 /* If VARS have not yet been expanded, expand them now. */
6417 while (vars)
6419 if (DECL_RTL (vars) == 0)
6421 vars_need_expansion = 1;
6422 expand_decl (vars);
6424 expand_decl_init (vars);
6425 vars = TREE_CHAIN (vars);
6428 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6430 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6432 return temp;
6435 case RTL_EXPR:
6436 if (RTL_EXPR_SEQUENCE (exp))
6438 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6439 abort ();
6440 emit_insns (RTL_EXPR_SEQUENCE (exp));
6441 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6443 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6444 free_temps_for_rtl_expr (exp);
6445 return RTL_EXPR_RTL (exp);
6447 case CONSTRUCTOR:
6448 /* If we don't need the result, just ensure we evaluate any
6449 subexpressions. */
6450 if (ignore)
6452 tree elt;
6453 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6454 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6455 EXPAND_MEMORY_USE_BAD);
6456 return const0_rtx;
6459 /* All elts simple constants => refer to a constant in memory. But
6460 if this is a non-BLKmode mode, let it store a field at a time
6461 since that should make a CONST_INT or CONST_DOUBLE when we
6462 fold. Likewise, if we have a target we can use, it is best to
6463 store directly into the target unless the type is large enough
6464 that memcpy will be used. If we are making an initializer and
6465 all operands are constant, put it in memory as well. */
6466 else if ((TREE_STATIC (exp)
6467 && ((mode == BLKmode
6468 && ! (target != 0 && safe_from_p (target, exp, 1)))
6469 || TREE_ADDRESSABLE (exp)
6470 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6471 && (! MOVE_BY_PIECES_P
6472 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6473 TYPE_ALIGN (type)))
6474 && ! mostly_zeros_p (exp))))
6475 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6477 rtx constructor = output_constant_def (exp);
6479 if (modifier != EXPAND_CONST_ADDRESS
6480 && modifier != EXPAND_INITIALIZER
6481 && modifier != EXPAND_SUM
6482 && (! memory_address_p (GET_MODE (constructor),
6483 XEXP (constructor, 0))
6484 || (flag_force_addr
6485 && GET_CODE (XEXP (constructor, 0)) != REG)))
6486 constructor = change_address (constructor, VOIDmode,
6487 XEXP (constructor, 0));
6488 return constructor;
6491 else
6493 /* Handle calls that pass values in multiple non-contiguous
6494 locations. The Irix 6 ABI has examples of this. */
6495 if (target == 0 || ! safe_from_p (target, exp, 1)
6496 || GET_CODE (target) == PARALLEL)
6498 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6499 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6500 else
6501 target = assign_temp (type, 0, 1, 1);
6504 if (TREE_READONLY (exp))
6506 if (GET_CODE (target) == MEM)
6507 target = copy_rtx (target);
6509 RTX_UNCHANGING_P (target) = 1;
6512 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6513 int_size_in_bytes (TREE_TYPE (exp)));
6514 return target;
6517 case INDIRECT_REF:
6519 tree exp1 = TREE_OPERAND (exp, 0);
6520 tree index;
6521 tree string = string_constant (exp1, &index);
6523 /* Try to optimize reads from const strings. */
6524 if (string
6525 && TREE_CODE (string) == STRING_CST
6526 && TREE_CODE (index) == INTEGER_CST
6527 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6528 && GET_MODE_CLASS (mode) == MODE_INT
6529 && GET_MODE_SIZE (mode) == 1
6530 && modifier != EXPAND_MEMORY_USE_WO)
6531 return
6532 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6534 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6535 op0 = memory_address (mode, op0);
6537 if (cfun && current_function_check_memory_usage
6538 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6540 enum memory_use_mode memory_usage;
6541 memory_usage = get_memory_usage_from_modifier (modifier);
6543 if (memory_usage != MEMORY_USE_DONT)
6545 in_check_memory_usage = 1;
6546 emit_library_call (chkr_check_addr_libfunc,
6547 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6548 Pmode, GEN_INT (int_size_in_bytes (type)),
6549 TYPE_MODE (sizetype),
6550 GEN_INT (memory_usage),
6551 TYPE_MODE (integer_type_node));
6552 in_check_memory_usage = 0;
6556 temp = gen_rtx_MEM (mode, op0);
6557 set_mem_attributes (temp, exp, 0);
6559 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6560 here, because, in C and C++, the fact that a location is accessed
6561 through a pointer to const does not mean that the value there can
6562 never change. Languages where it can never change should
6563 also set TREE_STATIC. */
6564 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6566 /* If we are writing to this object and its type is a record with
6567 readonly fields, we must mark it as readonly so it will
6568 conflict with readonly references to those fields. */
6569 if (modifier == EXPAND_MEMORY_USE_WO
6570 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6571 RTX_UNCHANGING_P (temp) = 1;
6573 return temp;
6576 case ARRAY_REF:
6577 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6578 abort ();
6581 tree array = TREE_OPERAND (exp, 0);
6582 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6583 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6584 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6585 HOST_WIDE_INT i;
6587 /* Optimize the special-case of a zero lower bound.
6589 We convert the low_bound to sizetype to avoid some problems
6590 with constant folding. (E.g. suppose the lower bound is 1,
6591 and its mode is QI. Without the conversion, (ARRAY
6592 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6593 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6595 if (! integer_zerop (low_bound))
6596 index = size_diffop (index, convert (sizetype, low_bound));
6598 /* Fold an expression like: "foo"[2].
6599 This is not done in fold so it won't happen inside &.
6600 Don't fold if this is for wide characters since it's too
6601 difficult to do correctly and this is a very rare case. */
6603 if (TREE_CODE (array) == STRING_CST
6604 && TREE_CODE (index) == INTEGER_CST
6605 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6606 && GET_MODE_CLASS (mode) == MODE_INT
6607 && GET_MODE_SIZE (mode) == 1)
6608 return
6609 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6611 /* If this is a constant index into a constant array,
6612 just get the value from the array. Handle both the cases when
6613 we have an explicit constructor and when our operand is a variable
6614 that was declared const. */
6616 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6617 && TREE_CODE (index) == INTEGER_CST
6618 && 0 > compare_tree_int (index,
6619 list_length (CONSTRUCTOR_ELTS
6620 (TREE_OPERAND (exp, 0)))))
6622 tree elem;
6624 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6625 i = TREE_INT_CST_LOW (index);
6626 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6629 if (elem)
6630 return expand_expr (fold (TREE_VALUE (elem)), target,
6631 tmode, ro_modifier);
6634 else if (optimize >= 1
6635 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6636 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6637 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6639 if (TREE_CODE (index) == INTEGER_CST)
6641 tree init = DECL_INITIAL (array);
6643 if (TREE_CODE (init) == CONSTRUCTOR)
6645 tree elem;
6647 for (elem = CONSTRUCTOR_ELTS (init);
6648 (elem
6649 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6650 elem = TREE_CHAIN (elem))
6653 if (elem)
6654 return expand_expr (fold (TREE_VALUE (elem)), target,
6655 tmode, ro_modifier);
6657 else if (TREE_CODE (init) == STRING_CST
6658 && 0 > compare_tree_int (index,
6659 TREE_STRING_LENGTH (init)))
6661 tree type = TREE_TYPE (TREE_TYPE (init));
6662 enum machine_mode mode = TYPE_MODE (type);
6664 if (GET_MODE_CLASS (mode) == MODE_INT
6665 && GET_MODE_SIZE (mode) == 1)
6666 return (GEN_INT
6667 (TREE_STRING_POINTER
6668 (init)[TREE_INT_CST_LOW (index)]));
6673 /* Fall through. */
6675 case COMPONENT_REF:
6676 case BIT_FIELD_REF:
6677 /* If the operand is a CONSTRUCTOR, we can just extract the
6678 appropriate field if it is present. Don't do this if we have
6679 already written the data since we want to refer to that copy
6680 and varasm.c assumes that's what we'll do. */
6681 if (code != ARRAY_REF
6682 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6683 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6685 tree elt;
6687 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6688 elt = TREE_CHAIN (elt))
6689 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6690 /* We can normally use the value of the field in the
6691 CONSTRUCTOR. However, if this is a bitfield in
6692 an integral mode that we can fit in a HOST_WIDE_INT,
6693 we must mask only the number of bits in the bitfield,
6694 since this is done implicitly by the constructor. If
6695 the bitfield does not meet either of those conditions,
6696 we can't do this optimization. */
6697 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6698 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6699 == MODE_INT)
6700 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6701 <= HOST_BITS_PER_WIDE_INT))))
6703 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6704 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6706 HOST_WIDE_INT bitsize
6707 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6709 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6711 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6712 op0 = expand_and (op0, op1, target);
6714 else
6716 enum machine_mode imode
6717 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6718 tree count
6719 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6722 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6723 target, 0);
6724 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6725 target, 0);
6729 return op0;
6734 enum machine_mode mode1;
6735 HOST_WIDE_INT bitsize, bitpos;
6736 tree offset;
6737 int volatilep = 0;
6738 unsigned int alignment;
6739 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6740 &mode1, &unsignedp, &volatilep,
6741 &alignment);
6743 /* If we got back the original object, something is wrong. Perhaps
6744 we are evaluating an expression too early. In any event, don't
6745 infinitely recurse. */
6746 if (tem == exp)
6747 abort ();
6749 /* If TEM's type is a union of variable size, pass TARGET to the inner
6750 computation, since it will need a temporary and TARGET is known
6751 to have to do. This occurs in unchecked conversion in Ada. */
6753 op0 = expand_expr (tem,
6754 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6755 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6756 != INTEGER_CST)
6757 ? target : NULL_RTX),
6758 VOIDmode,
6759 (modifier == EXPAND_INITIALIZER
6760 || modifier == EXPAND_CONST_ADDRESS)
6761 ? modifier : EXPAND_NORMAL);
6763 /* If this is a constant, put it into a register if it is a
6764 legitimate constant and OFFSET is 0 and memory if it isn't. */
6765 if (CONSTANT_P (op0))
6767 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6768 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6769 && offset == 0)
6770 op0 = force_reg (mode, op0);
6771 else
6772 op0 = validize_mem (force_const_mem (mode, op0));
6775 if (offset != 0)
6777 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6779 /* If this object is in memory, put it into a register.
6780 This case can't occur in C, but can in Ada if we have
6781 unchecked conversion of an expression from a scalar type to
6782 an array or record type. */
6783 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6784 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6786 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6788 mark_temp_addr_taken (memloc);
6789 emit_move_insn (memloc, op0);
6790 op0 = memloc;
6793 if (GET_CODE (op0) != MEM)
6794 abort ();
6796 if (GET_MODE (offset_rtx) != ptr_mode)
6798 #ifdef POINTERS_EXTEND_UNSIGNED
6799 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6800 #else
6801 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6802 #endif
6805 /* A constant address in OP0 can have VOIDmode, we must not try
6806 to call force_reg for that case. Avoid that case. */
6807 if (GET_CODE (op0) == MEM
6808 && GET_MODE (op0) == BLKmode
6809 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6810 && bitsize != 0
6811 && (bitpos % bitsize) == 0
6812 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6813 && alignment == GET_MODE_ALIGNMENT (mode1))
6815 rtx temp = change_address (op0, mode1,
6816 plus_constant (XEXP (op0, 0),
6817 (bitpos /
6818 BITS_PER_UNIT)));
6819 if (GET_CODE (XEXP (temp, 0)) == REG)
6820 op0 = temp;
6821 else
6822 op0 = change_address (op0, mode1,
6823 force_reg (GET_MODE (XEXP (temp, 0)),
6824 XEXP (temp, 0)));
6825 bitpos = 0;
6828 op0 = change_address (op0, VOIDmode,
6829 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6830 force_reg (ptr_mode,
6831 offset_rtx)));
6834 /* Don't forget about volatility even if this is a bitfield. */
6835 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6837 op0 = copy_rtx (op0);
6838 MEM_VOLATILE_P (op0) = 1;
6841 /* Check the access. */
6842 if (cfun != 0 && current_function_check_memory_usage
6843 && GET_CODE (op0) == MEM)
6845 enum memory_use_mode memory_usage;
6846 memory_usage = get_memory_usage_from_modifier (modifier);
6848 if (memory_usage != MEMORY_USE_DONT)
6850 rtx to;
6851 int size;
6853 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6854 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6856 /* Check the access right of the pointer. */
6857 in_check_memory_usage = 1;
6858 if (size > BITS_PER_UNIT)
6859 emit_library_call (chkr_check_addr_libfunc,
6860 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6861 Pmode, GEN_INT (size / BITS_PER_UNIT),
6862 TYPE_MODE (sizetype),
6863 GEN_INT (memory_usage),
6864 TYPE_MODE (integer_type_node));
6865 in_check_memory_usage = 0;
6869 /* In cases where an aligned union has an unaligned object
6870 as a field, we might be extracting a BLKmode value from
6871 an integer-mode (e.g., SImode) object. Handle this case
6872 by doing the extract into an object as wide as the field
6873 (which we know to be the width of a basic mode), then
6874 storing into memory, and changing the mode to BLKmode.
6875 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6876 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6877 if (mode1 == VOIDmode
6878 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6879 || (modifier != EXPAND_CONST_ADDRESS
6880 && modifier != EXPAND_INITIALIZER
6881 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6882 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6883 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6884 /* If the field isn't aligned enough to fetch as a memref,
6885 fetch it as a bit field. */
6886 || (mode1 != BLKmode
6887 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6888 && ((TYPE_ALIGN (TREE_TYPE (tem))
6889 < GET_MODE_ALIGNMENT (mode))
6890 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6891 /* If the type and the field are a constant size and the
6892 size of the type isn't the same size as the bitfield,
6893 we must use bitfield operations. */
6894 || ((bitsize >= 0
6895 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6896 == INTEGER_CST)
6897 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6898 bitsize)))))
6899 || (modifier != EXPAND_CONST_ADDRESS
6900 && modifier != EXPAND_INITIALIZER
6901 && mode == BLKmode
6902 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6903 && (TYPE_ALIGN (type) > alignment
6904 || bitpos % TYPE_ALIGN (type) != 0)))
6906 enum machine_mode ext_mode = mode;
6908 if (ext_mode == BLKmode
6909 && ! (target != 0 && GET_CODE (op0) == MEM
6910 && GET_CODE (target) == MEM
6911 && bitpos % BITS_PER_UNIT == 0))
6912 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6914 if (ext_mode == BLKmode)
6916 /* In this case, BITPOS must start at a byte boundary and
6917 TARGET, if specified, must be a MEM. */
6918 if (GET_CODE (op0) != MEM
6919 || (target != 0 && GET_CODE (target) != MEM)
6920 || bitpos % BITS_PER_UNIT != 0)
6921 abort ();
6923 op0 = change_address (op0, VOIDmode,
6924 plus_constant (XEXP (op0, 0),
6925 bitpos / BITS_PER_UNIT));
6926 if (target == 0)
6927 target = assign_temp (type, 0, 1, 1);
6929 emit_block_move (target, op0,
6930 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6931 / BITS_PER_UNIT),
6932 BITS_PER_UNIT);
6934 return target;
6937 op0 = validize_mem (op0);
6939 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6940 mark_reg_pointer (XEXP (op0, 0), alignment);
6942 op0 = extract_bit_field (op0, bitsize, bitpos,
6943 unsignedp, target, ext_mode, ext_mode,
6944 alignment,
6945 int_size_in_bytes (TREE_TYPE (tem)));
6947 /* If the result is a record type and BITSIZE is narrower than
6948 the mode of OP0, an integral mode, and this is a big endian
6949 machine, we must put the field into the high-order bits. */
6950 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6951 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6952 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6953 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6954 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6955 - bitsize),
6956 op0, 1);
6958 if (mode == BLKmode)
6960 rtx new = assign_stack_temp (ext_mode,
6961 bitsize / BITS_PER_UNIT, 0);
6963 emit_move_insn (new, op0);
6964 op0 = copy_rtx (new);
6965 PUT_MODE (op0, BLKmode);
6966 MEM_SET_IN_STRUCT_P (op0, 1);
6969 return op0;
6972 /* If the result is BLKmode, use that to access the object
6973 now as well. */
6974 if (mode == BLKmode)
6975 mode1 = BLKmode;
6977 /* Get a reference to just this component. */
6978 if (modifier == EXPAND_CONST_ADDRESS
6979 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6981 rtx new = gen_rtx_MEM (mode1,
6982 plus_constant (XEXP (op0, 0),
6983 (bitpos / BITS_PER_UNIT)));
6985 MEM_COPY_ATTRIBUTES (new, op0);
6986 op0 = new;
6988 else
6989 op0 = change_address (op0, mode1,
6990 plus_constant (XEXP (op0, 0),
6991 (bitpos / BITS_PER_UNIT)));
6993 set_mem_attributes (op0, exp, 0);
6994 if (GET_CODE (XEXP (op0, 0)) == REG)
6995 mark_reg_pointer (XEXP (op0, 0), alignment);
6997 MEM_VOLATILE_P (op0) |= volatilep;
6998 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6999 || modifier == EXPAND_CONST_ADDRESS
7000 || modifier == EXPAND_INITIALIZER)
7001 return op0;
7002 else if (target == 0)
7003 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7005 convert_move (target, op0, unsignedp);
7006 return target;
7009 /* Intended for a reference to a buffer of a file-object in Pascal.
7010 But it's not certain that a special tree code will really be
7011 necessary for these. INDIRECT_REF might work for them. */
7012 case BUFFER_REF:
7013 abort ();
7015 case IN_EXPR:
7017 /* Pascal set IN expression.
7019 Algorithm:
7020 rlo = set_low - (set_low%bits_per_word);
7021 the_word = set [ (index - rlo)/bits_per_word ];
7022 bit_index = index % bits_per_word;
7023 bitmask = 1 << bit_index;
7024 return !!(the_word & bitmask); */
7026 tree set = TREE_OPERAND (exp, 0);
7027 tree index = TREE_OPERAND (exp, 1);
7028 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7029 tree set_type = TREE_TYPE (set);
7030 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7031 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7032 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7033 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7034 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7035 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7036 rtx setaddr = XEXP (setval, 0);
7037 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7038 rtx rlow;
7039 rtx diff, quo, rem, addr, bit, result;
7041 preexpand_calls (exp);
7043 /* If domain is empty, answer is no. Likewise if index is constant
7044 and out of bounds. */
7045 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7046 && TREE_CODE (set_low_bound) == INTEGER_CST
7047 && tree_int_cst_lt (set_high_bound, set_low_bound))
7048 || (TREE_CODE (index) == INTEGER_CST
7049 && TREE_CODE (set_low_bound) == INTEGER_CST
7050 && tree_int_cst_lt (index, set_low_bound))
7051 || (TREE_CODE (set_high_bound) == INTEGER_CST
7052 && TREE_CODE (index) == INTEGER_CST
7053 && tree_int_cst_lt (set_high_bound, index))))
7054 return const0_rtx;
7056 if (target == 0)
7057 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7059 /* If we get here, we have to generate the code for both cases
7060 (in range and out of range). */
7062 op0 = gen_label_rtx ();
7063 op1 = gen_label_rtx ();
7065 if (! (GET_CODE (index_val) == CONST_INT
7066 && GET_CODE (lo_r) == CONST_INT))
7068 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7069 GET_MODE (index_val), iunsignedp, 0, op1);
7072 if (! (GET_CODE (index_val) == CONST_INT
7073 && GET_CODE (hi_r) == CONST_INT))
7075 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7076 GET_MODE (index_val), iunsignedp, 0, op1);
7079 /* Calculate the element number of bit zero in the first word
7080 of the set. */
7081 if (GET_CODE (lo_r) == CONST_INT)
7082 rlow = GEN_INT (INTVAL (lo_r)
7083 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7084 else
7085 rlow = expand_binop (index_mode, and_optab, lo_r,
7086 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7087 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7089 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7090 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7092 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7093 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7094 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7095 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7097 addr = memory_address (byte_mode,
7098 expand_binop (index_mode, add_optab, diff,
7099 setaddr, NULL_RTX, iunsignedp,
7100 OPTAB_LIB_WIDEN));
7102 /* Extract the bit we want to examine. */
7103 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7104 gen_rtx_MEM (byte_mode, addr),
7105 make_tree (TREE_TYPE (index), rem),
7106 NULL_RTX, 1);
7107 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7108 GET_MODE (target) == byte_mode ? target : 0,
7109 1, OPTAB_LIB_WIDEN);
7111 if (result != target)
7112 convert_move (target, result, 1);
7114 /* Output the code to handle the out-of-range case. */
7115 emit_jump (op0);
7116 emit_label (op1);
7117 emit_move_insn (target, const0_rtx);
7118 emit_label (op0);
7119 return target;
7122 case WITH_CLEANUP_EXPR:
7123 if (RTL_EXPR_RTL (exp) == 0)
7125 RTL_EXPR_RTL (exp)
7126 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7127 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7129 /* That's it for this cleanup. */
7130 TREE_OPERAND (exp, 2) = 0;
7132 return RTL_EXPR_RTL (exp);
7134 case CLEANUP_POINT_EXPR:
7136 /* Start a new binding layer that will keep track of all cleanup
7137 actions to be performed. */
7138 expand_start_bindings (2);
7140 target_temp_slot_level = temp_slot_level;
7142 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7143 /* If we're going to use this value, load it up now. */
7144 if (! ignore)
7145 op0 = force_not_mem (op0);
7146 preserve_temp_slots (op0);
7147 expand_end_bindings (NULL_TREE, 0, 0);
7149 return op0;
7151 case CALL_EXPR:
7152 /* Check for a built-in function. */
7153 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7154 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7155 == FUNCTION_DECL)
7156 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7157 return expand_builtin (exp, target, subtarget, tmode, ignore);
7159 /* If this call was expanded already by preexpand_calls,
7160 just return the result we got. */
7161 if (CALL_EXPR_RTL (exp) != 0)
7162 return CALL_EXPR_RTL (exp);
7164 return expand_call (exp, target, ignore);
7166 case NON_LVALUE_EXPR:
7167 case NOP_EXPR:
7168 case CONVERT_EXPR:
7169 case REFERENCE_EXPR:
7170 if (TREE_OPERAND (exp, 0) == error_mark_node)
7171 return const0_rtx;
7173 if (TREE_CODE (type) == UNION_TYPE)
7175 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7177 /* If both input and output are BLKmode, this conversion
7178 isn't actually doing anything unless we need to make the
7179 alignment stricter. */
7180 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7181 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7182 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7183 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7184 modifier);
7186 if (target == 0)
7188 if (mode != BLKmode)
7189 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7190 else
7191 target = assign_temp (type, 0, 1, 1);
7194 if (GET_CODE (target) == MEM)
7195 /* Store data into beginning of memory target. */
7196 store_expr (TREE_OPERAND (exp, 0),
7197 change_address (target, TYPE_MODE (valtype), 0), 0);
7199 else if (GET_CODE (target) == REG)
7200 /* Store this field into a union of the proper type. */
7201 store_field (target,
7202 MIN ((int_size_in_bytes (TREE_TYPE
7203 (TREE_OPERAND (exp, 0)))
7204 * BITS_PER_UNIT),
7205 GET_MODE_BITSIZE (mode)),
7206 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7207 VOIDmode, 0, BITS_PER_UNIT,
7208 int_size_in_bytes (type), 0);
7209 else
7210 abort ();
7212 /* Return the entire union. */
7213 return target;
7216 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7218 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7219 ro_modifier);
7221 /* If the signedness of the conversion differs and OP0 is
7222 a promoted SUBREG, clear that indication since we now
7223 have to do the proper extension. */
7224 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7225 && GET_CODE (op0) == SUBREG)
7226 SUBREG_PROMOTED_VAR_P (op0) = 0;
7228 return op0;
7231 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7232 if (GET_MODE (op0) == mode)
7233 return op0;
7235 /* If OP0 is a constant, just convert it into the proper mode. */
7236 if (CONSTANT_P (op0))
7237 return
7238 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7239 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7241 if (modifier == EXPAND_INITIALIZER)
7242 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7244 if (target == 0)
7245 return
7246 convert_to_mode (mode, op0,
7247 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7248 else
7249 convert_move (target, op0,
7250 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7251 return target;
7253 case PLUS_EXPR:
7254 /* We come here from MINUS_EXPR when the second operand is a
7255 constant. */
7256 plus_expr:
7257 this_optab = add_optab;
7259 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7260 something else, make sure we add the register to the constant and
7261 then to the other thing. This case can occur during strength
7262 reduction and doing it this way will produce better code if the
7263 frame pointer or argument pointer is eliminated.
7265 fold-const.c will ensure that the constant is always in the inner
7266 PLUS_EXPR, so the only case we need to do anything about is if
7267 sp, ap, or fp is our second argument, in which case we must swap
7268 the innermost first argument and our second argument. */
7270 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7271 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7272 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7273 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7274 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7275 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7277 tree t = TREE_OPERAND (exp, 1);
7279 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7280 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7283 /* If the result is to be ptr_mode and we are adding an integer to
7284 something, we might be forming a constant. So try to use
7285 plus_constant. If it produces a sum and we can't accept it,
7286 use force_operand. This allows P = &ARR[const] to generate
7287 efficient code on machines where a SYMBOL_REF is not a valid
7288 address.
7290 If this is an EXPAND_SUM call, always return the sum. */
7291 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7292 || mode == ptr_mode)
7294 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7295 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7296 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7298 rtx constant_part;
7300 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7301 EXPAND_SUM);
7302 /* Use immed_double_const to ensure that the constant is
7303 truncated according to the mode of OP1, then sign extended
7304 to a HOST_WIDE_INT. Using the constant directly can result
7305 in non-canonical RTL in a 64x32 cross compile. */
7306 constant_part
7307 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7308 (HOST_WIDE_INT) 0,
7309 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7310 op1 = plus_constant (op1, INTVAL (constant_part));
7311 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7312 op1 = force_operand (op1, target);
7313 return op1;
7316 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7317 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7318 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7320 rtx constant_part;
7322 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7323 EXPAND_SUM);
7324 if (! CONSTANT_P (op0))
7326 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7327 VOIDmode, modifier);
7328 /* Don't go to both_summands if modifier
7329 says it's not right to return a PLUS. */
7330 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7331 goto binop2;
7332 goto both_summands;
7334 /* Use immed_double_const to ensure that the constant is
7335 truncated according to the mode of OP1, then sign extended
7336 to a HOST_WIDE_INT. Using the constant directly can result
7337 in non-canonical RTL in a 64x32 cross compile. */
7338 constant_part
7339 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7340 (HOST_WIDE_INT) 0,
7341 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7342 op0 = plus_constant (op0, INTVAL (constant_part));
7343 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7344 op0 = force_operand (op0, target);
7345 return op0;
7349 /* No sense saving up arithmetic to be done
7350 if it's all in the wrong mode to form part of an address.
7351 And force_operand won't know whether to sign-extend or
7352 zero-extend. */
7353 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7354 || mode != ptr_mode)
7355 goto binop;
7357 preexpand_calls (exp);
7358 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7359 subtarget = 0;
7361 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7362 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7364 both_summands:
7365 /* Make sure any term that's a sum with a constant comes last. */
7366 if (GET_CODE (op0) == PLUS
7367 && CONSTANT_P (XEXP (op0, 1)))
7369 temp = op0;
7370 op0 = op1;
7371 op1 = temp;
7373 /* If adding to a sum including a constant,
7374 associate it to put the constant outside. */
7375 if (GET_CODE (op1) == PLUS
7376 && CONSTANT_P (XEXP (op1, 1)))
7378 rtx constant_term = const0_rtx;
7380 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7381 if (temp != 0)
7382 op0 = temp;
7383 /* Ensure that MULT comes first if there is one. */
7384 else if (GET_CODE (op0) == MULT)
7385 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7386 else
7387 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7389 /* Let's also eliminate constants from op0 if possible. */
7390 op0 = eliminate_constant_term (op0, &constant_term);
7392 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7393 their sum should be a constant. Form it into OP1, since the
7394 result we want will then be OP0 + OP1. */
7396 temp = simplify_binary_operation (PLUS, mode, constant_term,
7397 XEXP (op1, 1));
7398 if (temp != 0)
7399 op1 = temp;
7400 else
7401 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7404 /* Put a constant term last and put a multiplication first. */
7405 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7406 temp = op1, op1 = op0, op0 = temp;
7408 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7409 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7411 case MINUS_EXPR:
7412 /* For initializers, we are allowed to return a MINUS of two
7413 symbolic constants. Here we handle all cases when both operands
7414 are constant. */
7415 /* Handle difference of two symbolic constants,
7416 for the sake of an initializer. */
7417 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7418 && really_constant_p (TREE_OPERAND (exp, 0))
7419 && really_constant_p (TREE_OPERAND (exp, 1)))
7421 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7422 VOIDmode, ro_modifier);
7423 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7424 VOIDmode, ro_modifier);
7426 /* If the last operand is a CONST_INT, use plus_constant of
7427 the negated constant. Else make the MINUS. */
7428 if (GET_CODE (op1) == CONST_INT)
7429 return plus_constant (op0, - INTVAL (op1));
7430 else
7431 return gen_rtx_MINUS (mode, op0, op1);
7433 /* Convert A - const to A + (-const). */
7434 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7436 tree negated = fold (build1 (NEGATE_EXPR, type,
7437 TREE_OPERAND (exp, 1)));
7439 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7440 /* If we can't negate the constant in TYPE, leave it alone and
7441 expand_binop will negate it for us. We used to try to do it
7442 here in the signed version of TYPE, but that doesn't work
7443 on POINTER_TYPEs. */;
7444 else
7446 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7447 goto plus_expr;
7450 this_optab = sub_optab;
7451 goto binop;
7453 case MULT_EXPR:
7454 preexpand_calls (exp);
7455 /* If first operand is constant, swap them.
7456 Thus the following special case checks need only
7457 check the second operand. */
7458 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7460 register tree t1 = TREE_OPERAND (exp, 0);
7461 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7462 TREE_OPERAND (exp, 1) = t1;
7465 /* Attempt to return something suitable for generating an
7466 indexed address, for machines that support that. */
7468 if (modifier == EXPAND_SUM && mode == ptr_mode
7469 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7470 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7472 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7473 EXPAND_SUM);
7475 /* Apply distributive law if OP0 is x+c. */
7476 if (GET_CODE (op0) == PLUS
7477 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7478 return
7479 gen_rtx_PLUS
7480 (mode,
7481 gen_rtx_MULT
7482 (mode, XEXP (op0, 0),
7483 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7484 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7485 * INTVAL (XEXP (op0, 1))));
7487 if (GET_CODE (op0) != REG)
7488 op0 = force_operand (op0, NULL_RTX);
7489 if (GET_CODE (op0) != REG)
7490 op0 = copy_to_mode_reg (mode, op0);
7492 return
7493 gen_rtx_MULT (mode, op0,
7494 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7497 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7498 subtarget = 0;
7500 /* Check for multiplying things that have been extended
7501 from a narrower type. If this machine supports multiplying
7502 in that narrower type with a result in the desired type,
7503 do it that way, and avoid the explicit type-conversion. */
7504 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7505 && TREE_CODE (type) == INTEGER_TYPE
7506 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7507 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7508 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7509 && int_fits_type_p (TREE_OPERAND (exp, 1),
7510 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7511 /* Don't use a widening multiply if a shift will do. */
7512 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7513 > HOST_BITS_PER_WIDE_INT)
7514 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7516 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7517 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7519 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7520 /* If both operands are extended, they must either both
7521 be zero-extended or both be sign-extended. */
7522 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7524 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7526 enum machine_mode innermode
7527 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7528 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7529 ? smul_widen_optab : umul_widen_optab);
7530 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7531 ? umul_widen_optab : smul_widen_optab);
7532 if (mode == GET_MODE_WIDER_MODE (innermode))
7534 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7536 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7537 NULL_RTX, VOIDmode, 0);
7538 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7539 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7540 VOIDmode, 0);
7541 else
7542 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7543 NULL_RTX, VOIDmode, 0);
7544 goto binop2;
7546 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7547 && innermode == word_mode)
7549 rtx htem;
7550 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7551 NULL_RTX, VOIDmode, 0);
7552 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7553 op1 = convert_modes (innermode, mode,
7554 expand_expr (TREE_OPERAND (exp, 1),
7555 NULL_RTX, VOIDmode, 0),
7556 unsignedp);
7557 else
7558 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7559 NULL_RTX, VOIDmode, 0);
7560 temp = expand_binop (mode, other_optab, op0, op1, target,
7561 unsignedp, OPTAB_LIB_WIDEN);
7562 htem = expand_mult_highpart_adjust (innermode,
7563 gen_highpart (innermode, temp),
7564 op0, op1,
7565 gen_highpart (innermode, temp),
7566 unsignedp);
7567 emit_move_insn (gen_highpart (innermode, temp), htem);
7568 return temp;
7572 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7573 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7574 return expand_mult (mode, op0, op1, target, unsignedp);
7576 case TRUNC_DIV_EXPR:
7577 case FLOOR_DIV_EXPR:
7578 case CEIL_DIV_EXPR:
7579 case ROUND_DIV_EXPR:
7580 case EXACT_DIV_EXPR:
7581 preexpand_calls (exp);
7582 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7583 subtarget = 0;
7584 /* Possible optimization: compute the dividend with EXPAND_SUM
7585 then if the divisor is constant can optimize the case
7586 where some terms of the dividend have coeffs divisible by it. */
7587 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7588 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7589 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7591 case RDIV_EXPR:
7592 this_optab = flodiv_optab;
7593 goto binop;
7595 case TRUNC_MOD_EXPR:
7596 case FLOOR_MOD_EXPR:
7597 case CEIL_MOD_EXPR:
7598 case ROUND_MOD_EXPR:
7599 preexpand_calls (exp);
7600 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7601 subtarget = 0;
7602 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7603 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7604 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7606 case FIX_ROUND_EXPR:
7607 case FIX_FLOOR_EXPR:
7608 case FIX_CEIL_EXPR:
7609 abort (); /* Not used for C. */
7611 case FIX_TRUNC_EXPR:
7612 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7613 if (target == 0)
7614 target = gen_reg_rtx (mode);
7615 expand_fix (target, op0, unsignedp);
7616 return target;
7618 case FLOAT_EXPR:
7619 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7620 if (target == 0)
7621 target = gen_reg_rtx (mode);
7622 /* expand_float can't figure out what to do if FROM has VOIDmode.
7623 So give it the correct mode. With -O, cse will optimize this. */
7624 if (GET_MODE (op0) == VOIDmode)
7625 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7626 op0);
7627 expand_float (target, op0,
7628 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7629 return target;
7631 case NEGATE_EXPR:
7632 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7633 temp = expand_unop (mode, neg_optab, op0, target, 0);
7634 if (temp == 0)
7635 abort ();
7636 return temp;
7638 case ABS_EXPR:
7639 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7641 /* Handle complex values specially. */
7642 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7643 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7644 return expand_complex_abs (mode, op0, target, unsignedp);
7646 /* Unsigned abs is simply the operand. Testing here means we don't
7647 risk generating incorrect code below. */
7648 if (TREE_UNSIGNED (type))
7649 return op0;
7651 return expand_abs (mode, op0, target,
7652 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7654 case MAX_EXPR:
7655 case MIN_EXPR:
7656 target = original_target;
7657 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7658 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7659 || GET_MODE (target) != mode
7660 || (GET_CODE (target) == REG
7661 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7662 target = gen_reg_rtx (mode);
7663 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7664 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7666 /* First try to do it with a special MIN or MAX instruction.
7667 If that does not win, use a conditional jump to select the proper
7668 value. */
7669 this_optab = (TREE_UNSIGNED (type)
7670 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7671 : (code == MIN_EXPR ? smin_optab : smax_optab));
7673 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7674 OPTAB_WIDEN);
7675 if (temp != 0)
7676 return temp;
7678 /* At this point, a MEM target is no longer useful; we will get better
7679 code without it. */
7681 if (GET_CODE (target) == MEM)
7682 target = gen_reg_rtx (mode);
7684 if (target != op0)
7685 emit_move_insn (target, op0);
7687 op0 = gen_label_rtx ();
7689 /* If this mode is an integer too wide to compare properly,
7690 compare word by word. Rely on cse to optimize constant cases. */
7691 if (GET_MODE_CLASS (mode) == MODE_INT
7692 && ! can_compare_p (GE, mode, ccp_jump))
7694 if (code == MAX_EXPR)
7695 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7696 target, op1, NULL_RTX, op0);
7697 else
7698 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7699 op1, target, NULL_RTX, op0);
7701 else
7703 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7704 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7705 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7706 op0);
7708 emit_move_insn (target, op1);
7709 emit_label (op0);
7710 return target;
7712 case BIT_NOT_EXPR:
7713 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7714 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7715 if (temp == 0)
7716 abort ();
7717 return temp;
7719 case FFS_EXPR:
7720 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7721 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7722 if (temp == 0)
7723 abort ();
7724 return temp;
7726 /* ??? Can optimize bitwise operations with one arg constant.
7727 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7728 and (a bitwise1 b) bitwise2 b (etc)
7729 but that is probably not worth while. */
7731 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7732 boolean values when we want in all cases to compute both of them. In
7733 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7734 as actual zero-or-1 values and then bitwise anding. In cases where
7735 there cannot be any side effects, better code would be made by
7736 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7737 how to recognize those cases. */
7739 case TRUTH_AND_EXPR:
7740 case BIT_AND_EXPR:
7741 this_optab = and_optab;
7742 goto binop;
7744 case TRUTH_OR_EXPR:
7745 case BIT_IOR_EXPR:
7746 this_optab = ior_optab;
7747 goto binop;
7749 case TRUTH_XOR_EXPR:
7750 case BIT_XOR_EXPR:
7751 this_optab = xor_optab;
7752 goto binop;
7754 case LSHIFT_EXPR:
7755 case RSHIFT_EXPR:
7756 case LROTATE_EXPR:
7757 case RROTATE_EXPR:
7758 preexpand_calls (exp);
7759 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7760 subtarget = 0;
7761 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7762 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7763 unsignedp);
7765 /* Could determine the answer when only additive constants differ. Also,
7766 the addition of one can be handled by changing the condition. */
7767 case LT_EXPR:
7768 case LE_EXPR:
7769 case GT_EXPR:
7770 case GE_EXPR:
7771 case EQ_EXPR:
7772 case NE_EXPR:
7773 case UNORDERED_EXPR:
7774 case ORDERED_EXPR:
7775 case UNLT_EXPR:
7776 case UNLE_EXPR:
7777 case UNGT_EXPR:
7778 case UNGE_EXPR:
7779 case UNEQ_EXPR:
7780 preexpand_calls (exp);
7781 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7782 if (temp != 0)
7783 return temp;
7785 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7786 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7787 && original_target
7788 && GET_CODE (original_target) == REG
7789 && (GET_MODE (original_target)
7790 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7792 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7793 VOIDmode, 0);
7795 if (temp != original_target)
7796 temp = copy_to_reg (temp);
7798 op1 = gen_label_rtx ();
7799 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7800 GET_MODE (temp), unsignedp, 0, op1);
7801 emit_move_insn (temp, const1_rtx);
7802 emit_label (op1);
7803 return temp;
7806 /* If no set-flag instruction, must generate a conditional
7807 store into a temporary variable. Drop through
7808 and handle this like && and ||. */
7810 case TRUTH_ANDIF_EXPR:
7811 case TRUTH_ORIF_EXPR:
7812 if (! ignore
7813 && (target == 0 || ! safe_from_p (target, exp, 1)
7814 /* Make sure we don't have a hard reg (such as function's return
7815 value) live across basic blocks, if not optimizing. */
7816 || (!optimize && GET_CODE (target) == REG
7817 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7818 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7820 if (target)
7821 emit_clr_insn (target);
7823 op1 = gen_label_rtx ();
7824 jumpifnot (exp, op1);
7826 if (target)
7827 emit_0_to_1_insn (target);
7829 emit_label (op1);
7830 return ignore ? const0_rtx : target;
7832 case TRUTH_NOT_EXPR:
7833 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7834 /* The parser is careful to generate TRUTH_NOT_EXPR
7835 only with operands that are always zero or one. */
7836 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7837 target, 1, OPTAB_LIB_WIDEN);
7838 if (temp == 0)
7839 abort ();
7840 return temp;
7842 case COMPOUND_EXPR:
7843 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7844 emit_queue ();
7845 return expand_expr (TREE_OPERAND (exp, 1),
7846 (ignore ? const0_rtx : target),
7847 VOIDmode, 0);
7849 case COND_EXPR:
7850 /* If we would have a "singleton" (see below) were it not for a
7851 conversion in each arm, bring that conversion back out. */
7852 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7853 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7854 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7855 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7857 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7858 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7860 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7861 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7862 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7863 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7864 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7865 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7866 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7867 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7868 return expand_expr (build1 (NOP_EXPR, type,
7869 build (COND_EXPR, TREE_TYPE (true),
7870 TREE_OPERAND (exp, 0),
7871 true, false)),
7872 target, tmode, modifier);
7876 /* Note that COND_EXPRs whose type is a structure or union
7877 are required to be constructed to contain assignments of
7878 a temporary variable, so that we can evaluate them here
7879 for side effect only. If type is void, we must do likewise. */
7881 /* If an arm of the branch requires a cleanup,
7882 only that cleanup is performed. */
7884 tree singleton = 0;
7885 tree binary_op = 0, unary_op = 0;
7887 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7888 convert it to our mode, if necessary. */
7889 if (integer_onep (TREE_OPERAND (exp, 1))
7890 && integer_zerop (TREE_OPERAND (exp, 2))
7891 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7893 if (ignore)
7895 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7896 ro_modifier);
7897 return const0_rtx;
7900 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7901 if (GET_MODE (op0) == mode)
7902 return op0;
7904 if (target == 0)
7905 target = gen_reg_rtx (mode);
7906 convert_move (target, op0, unsignedp);
7907 return target;
7910 /* Check for X ? A + B : A. If we have this, we can copy A to the
7911 output and conditionally add B. Similarly for unary operations.
7912 Don't do this if X has side-effects because those side effects
7913 might affect A or B and the "?" operation is a sequence point in
7914 ANSI. (operand_equal_p tests for side effects.) */
7916 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7917 && operand_equal_p (TREE_OPERAND (exp, 2),
7918 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7919 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7920 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7921 && operand_equal_p (TREE_OPERAND (exp, 1),
7922 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7923 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7924 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7925 && operand_equal_p (TREE_OPERAND (exp, 2),
7926 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7927 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7928 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7929 && operand_equal_p (TREE_OPERAND (exp, 1),
7930 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7931 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7933 /* If we are not to produce a result, we have no target. Otherwise,
7934 if a target was specified use it; it will not be used as an
7935 intermediate target unless it is safe. If no target, use a
7936 temporary. */
7938 if (ignore)
7939 temp = 0;
7940 else if (original_target
7941 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7942 || (singleton && GET_CODE (original_target) == REG
7943 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7944 && original_target == var_rtx (singleton)))
7945 && GET_MODE (original_target) == mode
7946 #ifdef HAVE_conditional_move
7947 && (! can_conditionally_move_p (mode)
7948 || GET_CODE (original_target) == REG
7949 || TREE_ADDRESSABLE (type))
7950 #endif
7951 && ! (GET_CODE (original_target) == MEM
7952 && MEM_VOLATILE_P (original_target)))
7953 temp = original_target;
7954 else if (TREE_ADDRESSABLE (type))
7955 abort ();
7956 else
7957 temp = assign_temp (type, 0, 0, 1);
7959 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7960 do the test of X as a store-flag operation, do this as
7961 A + ((X != 0) << log C). Similarly for other simple binary
7962 operators. Only do for C == 1 if BRANCH_COST is low. */
7963 if (temp && singleton && binary_op
7964 && (TREE_CODE (binary_op) == PLUS_EXPR
7965 || TREE_CODE (binary_op) == MINUS_EXPR
7966 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7967 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7968 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7969 : integer_onep (TREE_OPERAND (binary_op, 1)))
7970 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7972 rtx result;
7973 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7974 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7975 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7976 : xor_optab);
7978 /* If we had X ? A : A + 1, do this as A + (X == 0).
7980 We have to invert the truth value here and then put it
7981 back later if do_store_flag fails. We cannot simply copy
7982 TREE_OPERAND (exp, 0) to another variable and modify that
7983 because invert_truthvalue can modify the tree pointed to
7984 by its argument. */
7985 if (singleton == TREE_OPERAND (exp, 1))
7986 TREE_OPERAND (exp, 0)
7987 = invert_truthvalue (TREE_OPERAND (exp, 0));
7989 result = do_store_flag (TREE_OPERAND (exp, 0),
7990 (safe_from_p (temp, singleton, 1)
7991 ? temp : NULL_RTX),
7992 mode, BRANCH_COST <= 1);
7994 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7995 result = expand_shift (LSHIFT_EXPR, mode, result,
7996 build_int_2 (tree_log2
7997 (TREE_OPERAND
7998 (binary_op, 1)),
8000 (safe_from_p (temp, singleton, 1)
8001 ? temp : NULL_RTX), 0);
8003 if (result)
8005 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8006 return expand_binop (mode, boptab, op1, result, temp,
8007 unsignedp, OPTAB_LIB_WIDEN);
8009 else if (singleton == TREE_OPERAND (exp, 1))
8010 TREE_OPERAND (exp, 0)
8011 = invert_truthvalue (TREE_OPERAND (exp, 0));
8014 do_pending_stack_adjust ();
8015 NO_DEFER_POP;
8016 op0 = gen_label_rtx ();
8018 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8020 if (temp != 0)
8022 /* If the target conflicts with the other operand of the
8023 binary op, we can't use it. Also, we can't use the target
8024 if it is a hard register, because evaluating the condition
8025 might clobber it. */
8026 if ((binary_op
8027 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8028 || (GET_CODE (temp) == REG
8029 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8030 temp = gen_reg_rtx (mode);
8031 store_expr (singleton, temp, 0);
8033 else
8034 expand_expr (singleton,
8035 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8036 if (singleton == TREE_OPERAND (exp, 1))
8037 jumpif (TREE_OPERAND (exp, 0), op0);
8038 else
8039 jumpifnot (TREE_OPERAND (exp, 0), op0);
8041 start_cleanup_deferral ();
8042 if (binary_op && temp == 0)
8043 /* Just touch the other operand. */
8044 expand_expr (TREE_OPERAND (binary_op, 1),
8045 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8046 else if (binary_op)
8047 store_expr (build (TREE_CODE (binary_op), type,
8048 make_tree (type, temp),
8049 TREE_OPERAND (binary_op, 1)),
8050 temp, 0);
8051 else
8052 store_expr (build1 (TREE_CODE (unary_op), type,
8053 make_tree (type, temp)),
8054 temp, 0);
8055 op1 = op0;
8057 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8058 comparison operator. If we have one of these cases, set the
8059 output to A, branch on A (cse will merge these two references),
8060 then set the output to FOO. */
8061 else if (temp
8062 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8063 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8064 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8065 TREE_OPERAND (exp, 1), 0)
8066 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8067 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8068 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8070 if (GET_CODE (temp) == REG
8071 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8072 temp = gen_reg_rtx (mode);
8073 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8074 jumpif (TREE_OPERAND (exp, 0), op0);
8076 start_cleanup_deferral ();
8077 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8078 op1 = op0;
8080 else if (temp
8081 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8082 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8083 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8084 TREE_OPERAND (exp, 2), 0)
8085 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8086 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8087 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8089 if (GET_CODE (temp) == REG
8090 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8091 temp = gen_reg_rtx (mode);
8092 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8093 jumpifnot (TREE_OPERAND (exp, 0), op0);
8095 start_cleanup_deferral ();
8096 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8097 op1 = op0;
8099 else
8101 op1 = gen_label_rtx ();
8102 jumpifnot (TREE_OPERAND (exp, 0), op0);
8104 start_cleanup_deferral ();
8106 /* One branch of the cond can be void, if it never returns. For
8107 example A ? throw : E */
8108 if (temp != 0
8109 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8110 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8111 else
8112 expand_expr (TREE_OPERAND (exp, 1),
8113 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8114 end_cleanup_deferral ();
8115 emit_queue ();
8116 emit_jump_insn (gen_jump (op1));
8117 emit_barrier ();
8118 emit_label (op0);
8119 start_cleanup_deferral ();
8120 if (temp != 0
8121 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8122 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8123 else
8124 expand_expr (TREE_OPERAND (exp, 2),
8125 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8128 end_cleanup_deferral ();
8130 emit_queue ();
8131 emit_label (op1);
8132 OK_DEFER_POP;
8134 return temp;
8137 case TARGET_EXPR:
8139 /* Something needs to be initialized, but we didn't know
8140 where that thing was when building the tree. For example,
8141 it could be the return value of a function, or a parameter
8142 to a function which lays down in the stack, or a temporary
8143 variable which must be passed by reference.
8145 We guarantee that the expression will either be constructed
8146 or copied into our original target. */
8148 tree slot = TREE_OPERAND (exp, 0);
8149 tree cleanups = NULL_TREE;
8150 tree exp1;
8152 if (TREE_CODE (slot) != VAR_DECL)
8153 abort ();
8155 if (! ignore)
8156 target = original_target;
8158 /* Set this here so that if we get a target that refers to a
8159 register variable that's already been used, put_reg_into_stack
8160 knows that it should fix up those uses. */
8161 TREE_USED (slot) = 1;
8163 if (target == 0)
8165 if (DECL_RTL (slot) != 0)
8167 target = DECL_RTL (slot);
8168 /* If we have already expanded the slot, so don't do
8169 it again. (mrs) */
8170 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8171 return target;
8173 else
8175 target = assign_temp (type, 2, 0, 1);
8176 /* All temp slots at this level must not conflict. */
8177 preserve_temp_slots (target);
8178 DECL_RTL (slot) = target;
8179 if (TREE_ADDRESSABLE (slot))
8180 put_var_into_stack (slot);
8182 /* Since SLOT is not known to the called function
8183 to belong to its stack frame, we must build an explicit
8184 cleanup. This case occurs when we must build up a reference
8185 to pass the reference as an argument. In this case,
8186 it is very likely that such a reference need not be
8187 built here. */
8189 if (TREE_OPERAND (exp, 2) == 0)
8190 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8191 cleanups = TREE_OPERAND (exp, 2);
8194 else
8196 /* This case does occur, when expanding a parameter which
8197 needs to be constructed on the stack. The target
8198 is the actual stack address that we want to initialize.
8199 The function we call will perform the cleanup in this case. */
8201 /* If we have already assigned it space, use that space,
8202 not target that we were passed in, as our target
8203 parameter is only a hint. */
8204 if (DECL_RTL (slot) != 0)
8206 target = DECL_RTL (slot);
8207 /* If we have already expanded the slot, so don't do
8208 it again. (mrs) */
8209 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8210 return target;
8212 else
8214 DECL_RTL (slot) = target;
8215 /* If we must have an addressable slot, then make sure that
8216 the RTL that we just stored in slot is OK. */
8217 if (TREE_ADDRESSABLE (slot))
8218 put_var_into_stack (slot);
8222 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8223 /* Mark it as expanded. */
8224 TREE_OPERAND (exp, 1) = NULL_TREE;
8226 store_expr (exp1, target, 0);
8228 expand_decl_cleanup (NULL_TREE, cleanups);
8230 return target;
8233 case INIT_EXPR:
8235 tree lhs = TREE_OPERAND (exp, 0);
8236 tree rhs = TREE_OPERAND (exp, 1);
8237 tree noncopied_parts = 0;
8238 tree lhs_type = TREE_TYPE (lhs);
8240 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8241 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8242 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8243 TYPE_NONCOPIED_PARTS (lhs_type));
8244 while (noncopied_parts != 0)
8246 expand_assignment (TREE_VALUE (noncopied_parts),
8247 TREE_PURPOSE (noncopied_parts), 0, 0);
8248 noncopied_parts = TREE_CHAIN (noncopied_parts);
8250 return temp;
8253 case MODIFY_EXPR:
8255 /* If lhs is complex, expand calls in rhs before computing it.
8256 That's so we don't compute a pointer and save it over a call.
8257 If lhs is simple, compute it first so we can give it as a
8258 target if the rhs is just a call. This avoids an extra temp and copy
8259 and that prevents a partial-subsumption which makes bad code.
8260 Actually we could treat component_ref's of vars like vars. */
8262 tree lhs = TREE_OPERAND (exp, 0);
8263 tree rhs = TREE_OPERAND (exp, 1);
8264 tree noncopied_parts = 0;
8265 tree lhs_type = TREE_TYPE (lhs);
8267 temp = 0;
8269 if (TREE_CODE (lhs) != VAR_DECL
8270 && TREE_CODE (lhs) != RESULT_DECL
8271 && TREE_CODE (lhs) != PARM_DECL
8272 && ! (TREE_CODE (lhs) == INDIRECT_REF
8273 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8274 preexpand_calls (exp);
8276 /* Check for |= or &= of a bitfield of size one into another bitfield
8277 of size 1. In this case, (unless we need the result of the
8278 assignment) we can do this more efficiently with a
8279 test followed by an assignment, if necessary.
8281 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8282 things change so we do, this code should be enhanced to
8283 support it. */
8284 if (ignore
8285 && TREE_CODE (lhs) == COMPONENT_REF
8286 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8287 || TREE_CODE (rhs) == BIT_AND_EXPR)
8288 && TREE_OPERAND (rhs, 0) == lhs
8289 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8290 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8291 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8293 rtx label = gen_label_rtx ();
8295 do_jump (TREE_OPERAND (rhs, 1),
8296 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8297 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8298 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8299 (TREE_CODE (rhs) == BIT_IOR_EXPR
8300 ? integer_one_node
8301 : integer_zero_node)),
8302 0, 0);
8303 do_pending_stack_adjust ();
8304 emit_label (label);
8305 return const0_rtx;
8308 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8309 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8310 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8311 TYPE_NONCOPIED_PARTS (lhs_type));
8313 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8314 while (noncopied_parts != 0)
8316 expand_assignment (TREE_PURPOSE (noncopied_parts),
8317 TREE_VALUE (noncopied_parts), 0, 0);
8318 noncopied_parts = TREE_CHAIN (noncopied_parts);
8320 return temp;
8323 case RETURN_EXPR:
8324 if (!TREE_OPERAND (exp, 0))
8325 expand_null_return ();
8326 else
8327 expand_return (TREE_OPERAND (exp, 0));
8328 return const0_rtx;
8330 case PREINCREMENT_EXPR:
8331 case PREDECREMENT_EXPR:
8332 return expand_increment (exp, 0, ignore);
8334 case POSTINCREMENT_EXPR:
8335 case POSTDECREMENT_EXPR:
8336 /* Faster to treat as pre-increment if result is not used. */
8337 return expand_increment (exp, ! ignore, ignore);
8339 case ADDR_EXPR:
8340 /* If nonzero, TEMP will be set to the address of something that might
8341 be a MEM corresponding to a stack slot. */
8342 temp = 0;
8344 /* Are we taking the address of a nested function? */
8345 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8346 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8347 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8348 && ! TREE_STATIC (exp))
8350 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8351 op0 = force_operand (op0, target);
8353 /* If we are taking the address of something erroneous, just
8354 return a zero. */
8355 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8356 return const0_rtx;
8357 else
8359 /* We make sure to pass const0_rtx down if we came in with
8360 ignore set, to avoid doing the cleanups twice for something. */
8361 op0 = expand_expr (TREE_OPERAND (exp, 0),
8362 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8363 (modifier == EXPAND_INITIALIZER
8364 ? modifier : EXPAND_CONST_ADDRESS));
8366 /* If we are going to ignore the result, OP0 will have been set
8367 to const0_rtx, so just return it. Don't get confused and
8368 think we are taking the address of the constant. */
8369 if (ignore)
8370 return op0;
8372 op0 = protect_from_queue (op0, 0);
8374 /* We would like the object in memory. If it is a constant, we can
8375 have it be statically allocated into memory. For a non-constant,
8376 we need to allocate some memory and store the value into it. */
8378 if (CONSTANT_P (op0))
8379 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8380 op0);
8381 else if (GET_CODE (op0) == MEM)
8383 mark_temp_addr_taken (op0);
8384 temp = XEXP (op0, 0);
8387 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8388 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8390 /* If this object is in a register, it must be not
8391 be BLKmode. */
8392 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8393 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8395 mark_temp_addr_taken (memloc);
8396 emit_move_insn (memloc, op0);
8397 op0 = memloc;
8400 if (GET_CODE (op0) != MEM)
8401 abort ();
8403 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8405 temp = XEXP (op0, 0);
8406 #ifdef POINTERS_EXTEND_UNSIGNED
8407 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8408 && mode == ptr_mode)
8409 temp = convert_memory_address (ptr_mode, temp);
8410 #endif
8411 return temp;
8414 op0 = force_operand (XEXP (op0, 0), target);
8417 if (flag_force_addr && GET_CODE (op0) != REG)
8418 op0 = force_reg (Pmode, op0);
8420 if (GET_CODE (op0) == REG
8421 && ! REG_USERVAR_P (op0))
8422 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8424 /* If we might have had a temp slot, add an equivalent address
8425 for it. */
8426 if (temp != 0)
8427 update_temp_slot_address (temp, op0);
8429 #ifdef POINTERS_EXTEND_UNSIGNED
8430 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8431 && mode == ptr_mode)
8432 op0 = convert_memory_address (ptr_mode, op0);
8433 #endif
8435 return op0;
8437 case ENTRY_VALUE_EXPR:
8438 abort ();
8440 /* COMPLEX type for Extended Pascal & Fortran */
8441 case COMPLEX_EXPR:
8443 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8444 rtx insns;
8446 /* Get the rtx code of the operands. */
8447 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8448 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8450 if (! target)
8451 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8453 start_sequence ();
8455 /* Move the real (op0) and imaginary (op1) parts to their location. */
8456 emit_move_insn (gen_realpart (mode, target), op0);
8457 emit_move_insn (gen_imagpart (mode, target), op1);
8459 insns = get_insns ();
8460 end_sequence ();
8462 /* Complex construction should appear as a single unit. */
8463 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8464 each with a separate pseudo as destination.
8465 It's not correct for flow to treat them as a unit. */
8466 if (GET_CODE (target) != CONCAT)
8467 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8468 else
8469 emit_insns (insns);
8471 return target;
8474 case REALPART_EXPR:
8475 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8476 return gen_realpart (mode, op0);
8478 case IMAGPART_EXPR:
8479 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8480 return gen_imagpart (mode, op0);
8482 case CONJ_EXPR:
8484 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8485 rtx imag_t;
8486 rtx insns;
8488 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8490 if (! target)
8491 target = gen_reg_rtx (mode);
8493 start_sequence ();
8495 /* Store the realpart and the negated imagpart to target. */
8496 emit_move_insn (gen_realpart (partmode, target),
8497 gen_realpart (partmode, op0));
8499 imag_t = gen_imagpart (partmode, target);
8500 temp = expand_unop (partmode, neg_optab,
8501 gen_imagpart (partmode, op0), imag_t, 0);
8502 if (temp != imag_t)
8503 emit_move_insn (imag_t, temp);
8505 insns = get_insns ();
8506 end_sequence ();
8508 /* Conjugate should appear as a single unit
8509 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8510 each with a separate pseudo as destination.
8511 It's not correct for flow to treat them as a unit. */
8512 if (GET_CODE (target) != CONCAT)
8513 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8514 else
8515 emit_insns (insns);
8517 return target;
8520 case TRY_CATCH_EXPR:
8522 tree handler = TREE_OPERAND (exp, 1);
8524 expand_eh_region_start ();
8526 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8528 expand_eh_region_end (handler);
8530 return op0;
8533 case TRY_FINALLY_EXPR:
8535 tree try_block = TREE_OPERAND (exp, 0);
8536 tree finally_block = TREE_OPERAND (exp, 1);
8537 rtx finally_label = gen_label_rtx ();
8538 rtx done_label = gen_label_rtx ();
8539 rtx return_link = gen_reg_rtx (Pmode);
8540 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8541 (tree) finally_label, (tree) return_link);
8542 TREE_SIDE_EFFECTS (cleanup) = 1;
8544 /* Start a new binding layer that will keep track of all cleanup
8545 actions to be performed. */
8546 expand_start_bindings (2);
8548 target_temp_slot_level = temp_slot_level;
8550 expand_decl_cleanup (NULL_TREE, cleanup);
8551 op0 = expand_expr (try_block, target, tmode, modifier);
8553 preserve_temp_slots (op0);
8554 expand_end_bindings (NULL_TREE, 0, 0);
8555 emit_jump (done_label);
8556 emit_label (finally_label);
8557 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8558 emit_indirect_jump (return_link);
8559 emit_label (done_label);
8560 return op0;
8563 case GOTO_SUBROUTINE_EXPR:
8565 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8566 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8567 rtx return_address = gen_label_rtx ();
8568 emit_move_insn (return_link,
8569 gen_rtx_LABEL_REF (Pmode, return_address));
8570 emit_jump (subr);
8571 emit_label (return_address);
8572 return const0_rtx;
8575 case POPDCC_EXPR:
8577 rtx dcc = get_dynamic_cleanup_chain ();
8578 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8579 return const0_rtx;
8582 case POPDHC_EXPR:
8584 rtx dhc = get_dynamic_handler_chain ();
8585 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8586 return const0_rtx;
8589 case VA_ARG_EXPR:
8590 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8592 default:
8593 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8596 /* Here to do an ordinary binary operator, generating an instruction
8597 from the optab already placed in `this_optab'. */
8598 binop:
8599 preexpand_calls (exp);
8600 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8601 subtarget = 0;
8602 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8603 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8604 binop2:
8605 temp = expand_binop (mode, this_optab, op0, op1, target,
8606 unsignedp, OPTAB_LIB_WIDEN);
8607 if (temp == 0)
8608 abort ();
8609 return temp;
8612 /* Similar to expand_expr, except that we don't specify a target, target
8613 mode, or modifier and we return the alignment of the inner type. This is
8614 used in cases where it is not necessary to align the result to the
8615 alignment of its type as long as we know the alignment of the result, for
8616 example for comparisons of BLKmode values. */
8618 static rtx
8619 expand_expr_unaligned (exp, palign)
8620 register tree exp;
8621 unsigned int *palign;
8623 register rtx op0;
8624 tree type = TREE_TYPE (exp);
8625 register enum machine_mode mode = TYPE_MODE (type);
8627 /* Default the alignment we return to that of the type. */
8628 *palign = TYPE_ALIGN (type);
8630 /* The only cases in which we do anything special is if the resulting mode
8631 is BLKmode. */
8632 if (mode != BLKmode)
8633 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8635 switch (TREE_CODE (exp))
8637 case CONVERT_EXPR:
8638 case NOP_EXPR:
8639 case NON_LVALUE_EXPR:
8640 /* Conversions between BLKmode values don't change the underlying
8641 alignment or value. */
8642 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8643 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8644 break;
8646 case ARRAY_REF:
8647 /* Much of the code for this case is copied directly from expand_expr.
8648 We need to duplicate it here because we will do something different
8649 in the fall-through case, so we need to handle the same exceptions
8650 it does. */
8652 tree array = TREE_OPERAND (exp, 0);
8653 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8654 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8655 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8656 HOST_WIDE_INT i;
8658 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8659 abort ();
8661 /* Optimize the special-case of a zero lower bound.
8663 We convert the low_bound to sizetype to avoid some problems
8664 with constant folding. (E.g. suppose the lower bound is 1,
8665 and its mode is QI. Without the conversion, (ARRAY
8666 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8667 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8669 if (! integer_zerop (low_bound))
8670 index = size_diffop (index, convert (sizetype, low_bound));
8672 /* If this is a constant index into a constant array,
8673 just get the value from the array. Handle both the cases when
8674 we have an explicit constructor and when our operand is a variable
8675 that was declared const. */
8677 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8678 && 0 > compare_tree_int (index,
8679 list_length (CONSTRUCTOR_ELTS
8680 (TREE_OPERAND (exp, 0)))))
8682 tree elem;
8684 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8685 i = TREE_INT_CST_LOW (index);
8686 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8689 if (elem)
8690 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8693 else if (optimize >= 1
8694 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8695 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8696 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8698 if (TREE_CODE (index) == INTEGER_CST)
8700 tree init = DECL_INITIAL (array);
8702 if (TREE_CODE (init) == CONSTRUCTOR)
8704 tree elem;
8706 for (elem = CONSTRUCTOR_ELTS (init);
8707 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8708 elem = TREE_CHAIN (elem))
8711 if (elem)
8712 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8713 palign);
8718 /* Fall through. */
8720 case COMPONENT_REF:
8721 case BIT_FIELD_REF:
8722 /* If the operand is a CONSTRUCTOR, we can just extract the
8723 appropriate field if it is present. Don't do this if we have
8724 already written the data since we want to refer to that copy
8725 and varasm.c assumes that's what we'll do. */
8726 if (TREE_CODE (exp) != ARRAY_REF
8727 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8728 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8730 tree elt;
8732 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8733 elt = TREE_CHAIN (elt))
8734 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8735 /* Note that unlike the case in expand_expr, we know this is
8736 BLKmode and hence not an integer. */
8737 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8741 enum machine_mode mode1;
8742 HOST_WIDE_INT bitsize, bitpos;
8743 tree offset;
8744 int volatilep = 0;
8745 unsigned int alignment;
8746 int unsignedp;
8747 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8748 &mode1, &unsignedp, &volatilep,
8749 &alignment);
8751 /* If we got back the original object, something is wrong. Perhaps
8752 we are evaluating an expression too early. In any event, don't
8753 infinitely recurse. */
8754 if (tem == exp)
8755 abort ();
8757 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8759 /* If this is a constant, put it into a register if it is a
8760 legitimate constant and OFFSET is 0 and memory if it isn't. */
8761 if (CONSTANT_P (op0))
8763 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8765 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8766 && offset == 0)
8767 op0 = force_reg (inner_mode, op0);
8768 else
8769 op0 = validize_mem (force_const_mem (inner_mode, op0));
8772 if (offset != 0)
8774 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8776 /* If this object is in a register, put it into memory.
8777 This case can't occur in C, but can in Ada if we have
8778 unchecked conversion of an expression from a scalar type to
8779 an array or record type. */
8780 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8781 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8783 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8785 mark_temp_addr_taken (memloc);
8786 emit_move_insn (memloc, op0);
8787 op0 = memloc;
8790 if (GET_CODE (op0) != MEM)
8791 abort ();
8793 if (GET_MODE (offset_rtx) != ptr_mode)
8795 #ifdef POINTERS_EXTEND_UNSIGNED
8796 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8797 #else
8798 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8799 #endif
8802 op0 = change_address (op0, VOIDmode,
8803 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8804 force_reg (ptr_mode,
8805 offset_rtx)));
8808 /* Don't forget about volatility even if this is a bitfield. */
8809 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8811 op0 = copy_rtx (op0);
8812 MEM_VOLATILE_P (op0) = 1;
8815 /* Check the access. */
8816 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8818 rtx to;
8819 int size;
8821 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8822 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8824 /* Check the access right of the pointer. */
8825 in_check_memory_usage = 1;
8826 if (size > BITS_PER_UNIT)
8827 emit_library_call (chkr_check_addr_libfunc,
8828 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8829 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8830 TYPE_MODE (sizetype),
8831 GEN_INT (MEMORY_USE_RO),
8832 TYPE_MODE (integer_type_node));
8833 in_check_memory_usage = 0;
8836 /* In cases where an aligned union has an unaligned object
8837 as a field, we might be extracting a BLKmode value from
8838 an integer-mode (e.g., SImode) object. Handle this case
8839 by doing the extract into an object as wide as the field
8840 (which we know to be the width of a basic mode), then
8841 storing into memory, and changing the mode to BLKmode.
8842 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8843 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8844 if (mode1 == VOIDmode
8845 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8846 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8847 && (TYPE_ALIGN (type) > alignment
8848 || bitpos % TYPE_ALIGN (type) != 0)))
8850 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8852 if (ext_mode == BLKmode)
8854 /* In this case, BITPOS must start at a byte boundary. */
8855 if (GET_CODE (op0) != MEM
8856 || bitpos % BITS_PER_UNIT != 0)
8857 abort ();
8859 op0 = change_address (op0, VOIDmode,
8860 plus_constant (XEXP (op0, 0),
8861 bitpos / BITS_PER_UNIT));
8863 else
8865 rtx new = assign_stack_temp (ext_mode,
8866 bitsize / BITS_PER_UNIT, 0);
8868 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8869 unsignedp, NULL_RTX, ext_mode,
8870 ext_mode, alignment,
8871 int_size_in_bytes (TREE_TYPE (tem)));
8873 /* If the result is a record type and BITSIZE is narrower than
8874 the mode of OP0, an integral mode, and this is a big endian
8875 machine, we must put the field into the high-order bits. */
8876 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8877 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8878 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8879 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8880 size_int (GET_MODE_BITSIZE
8881 (GET_MODE (op0))
8882 - bitsize),
8883 op0, 1);
8885 emit_move_insn (new, op0);
8886 op0 = copy_rtx (new);
8887 PUT_MODE (op0, BLKmode);
8890 else
8891 /* Get a reference to just this component. */
8892 op0 = change_address (op0, mode1,
8893 plus_constant (XEXP (op0, 0),
8894 (bitpos / BITS_PER_UNIT)));
8896 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8898 /* Adjust the alignment in case the bit position is not
8899 a multiple of the alignment of the inner object. */
8900 while (bitpos % alignment != 0)
8901 alignment >>= 1;
8903 if (GET_CODE (XEXP (op0, 0)) == REG)
8904 mark_reg_pointer (XEXP (op0, 0), alignment);
8906 MEM_IN_STRUCT_P (op0) = 1;
8907 MEM_VOLATILE_P (op0) |= volatilep;
8909 *palign = alignment;
8910 return op0;
8913 default:
8914 break;
8918 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8921 /* Return the tree node if a ARG corresponds to a string constant or zero
8922 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8923 in bytes within the string that ARG is accessing. The type of the
8924 offset will be `sizetype'. */
8926 tree
8927 string_constant (arg, ptr_offset)
8928 tree arg;
8929 tree *ptr_offset;
8931 STRIP_NOPS (arg);
8933 if (TREE_CODE (arg) == ADDR_EXPR
8934 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8936 *ptr_offset = size_zero_node;
8937 return TREE_OPERAND (arg, 0);
8939 else if (TREE_CODE (arg) == PLUS_EXPR)
8941 tree arg0 = TREE_OPERAND (arg, 0);
8942 tree arg1 = TREE_OPERAND (arg, 1);
8944 STRIP_NOPS (arg0);
8945 STRIP_NOPS (arg1);
8947 if (TREE_CODE (arg0) == ADDR_EXPR
8948 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8950 *ptr_offset = convert (sizetype, arg1);
8951 return TREE_OPERAND (arg0, 0);
8953 else if (TREE_CODE (arg1) == ADDR_EXPR
8954 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8956 *ptr_offset = convert (sizetype, arg0);
8957 return TREE_OPERAND (arg1, 0);
8961 return 0;
8964 /* Expand code for a post- or pre- increment or decrement
8965 and return the RTX for the result.
8966 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8968 static rtx
8969 expand_increment (exp, post, ignore)
8970 register tree exp;
8971 int post, ignore;
8973 register rtx op0, op1;
8974 register rtx temp, value;
8975 register tree incremented = TREE_OPERAND (exp, 0);
8976 optab this_optab = add_optab;
8977 int icode;
8978 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8979 int op0_is_copy = 0;
8980 int single_insn = 0;
8981 /* 1 means we can't store into OP0 directly,
8982 because it is a subreg narrower than a word,
8983 and we don't dare clobber the rest of the word. */
8984 int bad_subreg = 0;
8986 /* Stabilize any component ref that might need to be
8987 evaluated more than once below. */
8988 if (!post
8989 || TREE_CODE (incremented) == BIT_FIELD_REF
8990 || (TREE_CODE (incremented) == COMPONENT_REF
8991 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8992 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8993 incremented = stabilize_reference (incremented);
8994 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8995 ones into save exprs so that they don't accidentally get evaluated
8996 more than once by the code below. */
8997 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8998 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8999 incremented = save_expr (incremented);
9001 /* Compute the operands as RTX.
9002 Note whether OP0 is the actual lvalue or a copy of it:
9003 I believe it is a copy iff it is a register or subreg
9004 and insns were generated in computing it. */
9006 temp = get_last_insn ();
9007 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9009 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9010 in place but instead must do sign- or zero-extension during assignment,
9011 so we copy it into a new register and let the code below use it as
9012 a copy.
9014 Note that we can safely modify this SUBREG since it is know not to be
9015 shared (it was made by the expand_expr call above). */
9017 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9019 if (post)
9020 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9021 else
9022 bad_subreg = 1;
9024 else if (GET_CODE (op0) == SUBREG
9025 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9027 /* We cannot increment this SUBREG in place. If we are
9028 post-incrementing, get a copy of the old value. Otherwise,
9029 just mark that we cannot increment in place. */
9030 if (post)
9031 op0 = copy_to_reg (op0);
9032 else
9033 bad_subreg = 1;
9036 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9037 && temp != get_last_insn ());
9038 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9039 EXPAND_MEMORY_USE_BAD);
9041 /* Decide whether incrementing or decrementing. */
9042 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9043 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9044 this_optab = sub_optab;
9046 /* Convert decrement by a constant into a negative increment. */
9047 if (this_optab == sub_optab
9048 && GET_CODE (op1) == CONST_INT)
9050 op1 = GEN_INT (-INTVAL (op1));
9051 this_optab = add_optab;
9054 /* For a preincrement, see if we can do this with a single instruction. */
9055 if (!post)
9057 icode = (int) this_optab->handlers[(int) mode].insn_code;
9058 if (icode != (int) CODE_FOR_nothing
9059 /* Make sure that OP0 is valid for operands 0 and 1
9060 of the insn we want to queue. */
9061 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9062 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9063 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9064 single_insn = 1;
9067 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9068 then we cannot just increment OP0. We must therefore contrive to
9069 increment the original value. Then, for postincrement, we can return
9070 OP0 since it is a copy of the old value. For preincrement, expand here
9071 unless we can do it with a single insn.
9073 Likewise if storing directly into OP0 would clobber high bits
9074 we need to preserve (bad_subreg). */
9075 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9077 /* This is the easiest way to increment the value wherever it is.
9078 Problems with multiple evaluation of INCREMENTED are prevented
9079 because either (1) it is a component_ref or preincrement,
9080 in which case it was stabilized above, or (2) it is an array_ref
9081 with constant index in an array in a register, which is
9082 safe to reevaluate. */
9083 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9084 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9085 ? MINUS_EXPR : PLUS_EXPR),
9086 TREE_TYPE (exp),
9087 incremented,
9088 TREE_OPERAND (exp, 1));
9090 while (TREE_CODE (incremented) == NOP_EXPR
9091 || TREE_CODE (incremented) == CONVERT_EXPR)
9093 newexp = convert (TREE_TYPE (incremented), newexp);
9094 incremented = TREE_OPERAND (incremented, 0);
9097 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9098 return post ? op0 : temp;
9101 if (post)
9103 /* We have a true reference to the value in OP0.
9104 If there is an insn to add or subtract in this mode, queue it.
9105 Queueing the increment insn avoids the register shuffling
9106 that often results if we must increment now and first save
9107 the old value for subsequent use. */
9109 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9110 op0 = stabilize (op0);
9111 #endif
9113 icode = (int) this_optab->handlers[(int) mode].insn_code;
9114 if (icode != (int) CODE_FOR_nothing
9115 /* Make sure that OP0 is valid for operands 0 and 1
9116 of the insn we want to queue. */
9117 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9118 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9120 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9121 op1 = force_reg (mode, op1);
9123 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9125 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9127 rtx addr = (general_operand (XEXP (op0, 0), mode)
9128 ? force_reg (Pmode, XEXP (op0, 0))
9129 : copy_to_reg (XEXP (op0, 0)));
9130 rtx temp, result;
9132 op0 = change_address (op0, VOIDmode, addr);
9133 temp = force_reg (GET_MODE (op0), op0);
9134 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9135 op1 = force_reg (mode, op1);
9137 /* The increment queue is LIFO, thus we have to `queue'
9138 the instructions in reverse order. */
9139 enqueue_insn (op0, gen_move_insn (op0, temp));
9140 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9141 return result;
9145 /* Preincrement, or we can't increment with one simple insn. */
9146 if (post)
9147 /* Save a copy of the value before inc or dec, to return it later. */
9148 temp = value = copy_to_reg (op0);
9149 else
9150 /* Arrange to return the incremented value. */
9151 /* Copy the rtx because expand_binop will protect from the queue,
9152 and the results of that would be invalid for us to return
9153 if our caller does emit_queue before using our result. */
9154 temp = copy_rtx (value = op0);
9156 /* Increment however we can. */
9157 op1 = expand_binop (mode, this_optab, value, op1,
9158 current_function_check_memory_usage ? NULL_RTX : op0,
9159 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9160 /* Make sure the value is stored into OP0. */
9161 if (op1 != op0)
9162 emit_move_insn (op0, op1);
9164 return temp;
9167 /* Expand all function calls contained within EXP, innermost ones first.
9168 But don't look within expressions that have sequence points.
9169 For each CALL_EXPR, record the rtx for its value
9170 in the CALL_EXPR_RTL field. */
9172 static void
9173 preexpand_calls (exp)
9174 tree exp;
9176 register int nops, i;
9177 int class = TREE_CODE_CLASS (TREE_CODE (exp));
9179 if (! do_preexpand_calls)
9180 return;
9182 /* Only expressions and references can contain calls. */
9184 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9185 return;
9187 switch (TREE_CODE (exp))
9189 case CALL_EXPR:
9190 /* Do nothing if already expanded. */
9191 if (CALL_EXPR_RTL (exp) != 0
9192 /* Do nothing if the call returns a variable-sized object. */
9193 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9194 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9195 /* Do nothing to built-in functions. */
9196 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9197 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9198 == FUNCTION_DECL)
9199 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9200 return;
9202 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9203 return;
9205 case COMPOUND_EXPR:
9206 case COND_EXPR:
9207 case TRUTH_ANDIF_EXPR:
9208 case TRUTH_ORIF_EXPR:
9209 /* If we find one of these, then we can be sure
9210 the adjust will be done for it (since it makes jumps).
9211 Do it now, so that if this is inside an argument
9212 of a function, we don't get the stack adjustment
9213 after some other args have already been pushed. */
9214 do_pending_stack_adjust ();
9215 return;
9217 case BLOCK:
9218 case RTL_EXPR:
9219 case WITH_CLEANUP_EXPR:
9220 case CLEANUP_POINT_EXPR:
9221 case TRY_CATCH_EXPR:
9222 return;
9224 case SAVE_EXPR:
9225 if (SAVE_EXPR_RTL (exp) != 0)
9226 return;
9228 default:
9229 break;
9232 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
9233 for (i = 0; i < nops; i++)
9234 if (TREE_OPERAND (exp, i) != 0)
9236 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9237 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9238 It doesn't happen before the call is made. */
9240 else
9242 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9243 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9244 preexpand_calls (TREE_OPERAND (exp, i));
9249 /* At the start of a function, record that we have no previously-pushed
9250 arguments waiting to be popped. */
9252 void
9253 init_pending_stack_adjust ()
9255 pending_stack_adjust = 0;
9258 /* When exiting from function, if safe, clear out any pending stack adjust
9259 so the adjustment won't get done.
9261 Note, if the current function calls alloca, then it must have a
9262 frame pointer regardless of the value of flag_omit_frame_pointer. */
9264 void
9265 clear_pending_stack_adjust ()
9267 #ifdef EXIT_IGNORE_STACK
9268 if (optimize > 0
9269 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9270 && EXIT_IGNORE_STACK
9271 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9272 && ! flag_inline_functions)
9274 stack_pointer_delta -= pending_stack_adjust,
9275 pending_stack_adjust = 0;
9277 #endif
9280 /* Pop any previously-pushed arguments that have not been popped yet. */
9282 void
9283 do_pending_stack_adjust ()
9285 if (inhibit_defer_pop == 0)
9287 if (pending_stack_adjust != 0)
9288 adjust_stack (GEN_INT (pending_stack_adjust));
9289 pending_stack_adjust = 0;
9293 /* Expand conditional expressions. */
9295 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9296 LABEL is an rtx of code CODE_LABEL, in this function and all the
9297 functions here. */
9299 void
9300 jumpifnot (exp, label)
9301 tree exp;
9302 rtx label;
9304 do_jump (exp, label, NULL_RTX);
9307 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9309 void
9310 jumpif (exp, label)
9311 tree exp;
9312 rtx label;
9314 do_jump (exp, NULL_RTX, label);
9317 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9318 the result is zero, or IF_TRUE_LABEL if the result is one.
9319 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9320 meaning fall through in that case.
9322 do_jump always does any pending stack adjust except when it does not
9323 actually perform a jump. An example where there is no jump
9324 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9326 This function is responsible for optimizing cases such as
9327 &&, || and comparison operators in EXP. */
9329 void
9330 do_jump (exp, if_false_label, if_true_label)
9331 tree exp;
9332 rtx if_false_label, if_true_label;
9334 register enum tree_code code = TREE_CODE (exp);
9335 /* Some cases need to create a label to jump to
9336 in order to properly fall through.
9337 These cases set DROP_THROUGH_LABEL nonzero. */
9338 rtx drop_through_label = 0;
9339 rtx temp;
9340 int i;
9341 tree type;
9342 enum machine_mode mode;
9344 #ifdef MAX_INTEGER_COMPUTATION_MODE
9345 check_max_integer_computation_mode (exp);
9346 #endif
9348 emit_queue ();
9350 switch (code)
9352 case ERROR_MARK:
9353 break;
9355 case INTEGER_CST:
9356 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9357 if (temp)
9358 emit_jump (temp);
9359 break;
9361 #if 0
9362 /* This is not true with #pragma weak */
9363 case ADDR_EXPR:
9364 /* The address of something can never be zero. */
9365 if (if_true_label)
9366 emit_jump (if_true_label);
9367 break;
9368 #endif
9370 case NOP_EXPR:
9371 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9372 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9373 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9374 goto normal;
9375 case CONVERT_EXPR:
9376 /* If we are narrowing the operand, we have to do the compare in the
9377 narrower mode. */
9378 if ((TYPE_PRECISION (TREE_TYPE (exp))
9379 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9380 goto normal;
9381 case NON_LVALUE_EXPR:
9382 case REFERENCE_EXPR:
9383 case ABS_EXPR:
9384 case NEGATE_EXPR:
9385 case LROTATE_EXPR:
9386 case RROTATE_EXPR:
9387 /* These cannot change zero->non-zero or vice versa. */
9388 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9389 break;
9391 case WITH_RECORD_EXPR:
9392 /* Put the object on the placeholder list, recurse through our first
9393 operand, and pop the list. */
9394 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9395 placeholder_list);
9396 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9397 placeholder_list = TREE_CHAIN (placeholder_list);
9398 break;
9400 #if 0
9401 /* This is never less insns than evaluating the PLUS_EXPR followed by
9402 a test and can be longer if the test is eliminated. */
9403 case PLUS_EXPR:
9404 /* Reduce to minus. */
9405 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9406 TREE_OPERAND (exp, 0),
9407 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9408 TREE_OPERAND (exp, 1))));
9409 /* Process as MINUS. */
9410 #endif
9412 case MINUS_EXPR:
9413 /* Non-zero iff operands of minus differ. */
9414 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9415 TREE_OPERAND (exp, 0),
9416 TREE_OPERAND (exp, 1)),
9417 NE, NE, if_false_label, if_true_label);
9418 break;
9420 case BIT_AND_EXPR:
9421 /* If we are AND'ing with a small constant, do this comparison in the
9422 smallest type that fits. If the machine doesn't have comparisons
9423 that small, it will be converted back to the wider comparison.
9424 This helps if we are testing the sign bit of a narrower object.
9425 combine can't do this for us because it can't know whether a
9426 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9428 if (! SLOW_BYTE_ACCESS
9429 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9430 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9431 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9432 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9433 && (type = type_for_mode (mode, 1)) != 0
9434 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9435 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9436 != CODE_FOR_nothing))
9438 do_jump (convert (type, exp), if_false_label, if_true_label);
9439 break;
9441 goto normal;
9443 case TRUTH_NOT_EXPR:
9444 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9445 break;
9447 case TRUTH_ANDIF_EXPR:
9448 if (if_false_label == 0)
9449 if_false_label = drop_through_label = gen_label_rtx ();
9450 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9451 start_cleanup_deferral ();
9452 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9453 end_cleanup_deferral ();
9454 break;
9456 case TRUTH_ORIF_EXPR:
9457 if (if_true_label == 0)
9458 if_true_label = drop_through_label = gen_label_rtx ();
9459 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9460 start_cleanup_deferral ();
9461 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9462 end_cleanup_deferral ();
9463 break;
9465 case COMPOUND_EXPR:
9466 push_temp_slots ();
9467 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9468 preserve_temp_slots (NULL_RTX);
9469 free_temp_slots ();
9470 pop_temp_slots ();
9471 emit_queue ();
9472 do_pending_stack_adjust ();
9473 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9474 break;
9476 case COMPONENT_REF:
9477 case BIT_FIELD_REF:
9478 case ARRAY_REF:
9480 HOST_WIDE_INT bitsize, bitpos;
9481 int unsignedp;
9482 enum machine_mode mode;
9483 tree type;
9484 tree offset;
9485 int volatilep = 0;
9486 unsigned int alignment;
9488 /* Get description of this reference. We don't actually care
9489 about the underlying object here. */
9490 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9491 &unsignedp, &volatilep, &alignment);
9493 type = type_for_size (bitsize, unsignedp);
9494 if (! SLOW_BYTE_ACCESS
9495 && type != 0 && bitsize >= 0
9496 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9497 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9498 != CODE_FOR_nothing))
9500 do_jump (convert (type, exp), if_false_label, if_true_label);
9501 break;
9503 goto normal;
9506 case COND_EXPR:
9507 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9508 if (integer_onep (TREE_OPERAND (exp, 1))
9509 && integer_zerop (TREE_OPERAND (exp, 2)))
9510 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9512 else if (integer_zerop (TREE_OPERAND (exp, 1))
9513 && integer_onep (TREE_OPERAND (exp, 2)))
9514 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9516 else
9518 register rtx label1 = gen_label_rtx ();
9519 drop_through_label = gen_label_rtx ();
9521 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9523 start_cleanup_deferral ();
9524 /* Now the THEN-expression. */
9525 do_jump (TREE_OPERAND (exp, 1),
9526 if_false_label ? if_false_label : drop_through_label,
9527 if_true_label ? if_true_label : drop_through_label);
9528 /* In case the do_jump just above never jumps. */
9529 do_pending_stack_adjust ();
9530 emit_label (label1);
9532 /* Now the ELSE-expression. */
9533 do_jump (TREE_OPERAND (exp, 2),
9534 if_false_label ? if_false_label : drop_through_label,
9535 if_true_label ? if_true_label : drop_through_label);
9536 end_cleanup_deferral ();
9538 break;
9540 case EQ_EXPR:
9542 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9544 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9545 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9547 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9548 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9549 do_jump
9550 (fold
9551 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9552 fold (build (EQ_EXPR, TREE_TYPE (exp),
9553 fold (build1 (REALPART_EXPR,
9554 TREE_TYPE (inner_type),
9555 exp0)),
9556 fold (build1 (REALPART_EXPR,
9557 TREE_TYPE (inner_type),
9558 exp1)))),
9559 fold (build (EQ_EXPR, TREE_TYPE (exp),
9560 fold (build1 (IMAGPART_EXPR,
9561 TREE_TYPE (inner_type),
9562 exp0)),
9563 fold (build1 (IMAGPART_EXPR,
9564 TREE_TYPE (inner_type),
9565 exp1)))))),
9566 if_false_label, if_true_label);
9569 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9570 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9572 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9573 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9574 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9575 else
9576 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9577 break;
9580 case NE_EXPR:
9582 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9584 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9585 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9587 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9588 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9589 do_jump
9590 (fold
9591 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9592 fold (build (NE_EXPR, TREE_TYPE (exp),
9593 fold (build1 (REALPART_EXPR,
9594 TREE_TYPE (inner_type),
9595 exp0)),
9596 fold (build1 (REALPART_EXPR,
9597 TREE_TYPE (inner_type),
9598 exp1)))),
9599 fold (build (NE_EXPR, TREE_TYPE (exp),
9600 fold (build1 (IMAGPART_EXPR,
9601 TREE_TYPE (inner_type),
9602 exp0)),
9603 fold (build1 (IMAGPART_EXPR,
9604 TREE_TYPE (inner_type),
9605 exp1)))))),
9606 if_false_label, if_true_label);
9609 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9610 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9612 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9613 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9614 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9615 else
9616 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9617 break;
9620 case LT_EXPR:
9621 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9622 if (GET_MODE_CLASS (mode) == MODE_INT
9623 && ! can_compare_p (LT, mode, ccp_jump))
9624 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9625 else
9626 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9627 break;
9629 case LE_EXPR:
9630 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9631 if (GET_MODE_CLASS (mode) == MODE_INT
9632 && ! can_compare_p (LE, mode, ccp_jump))
9633 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9634 else
9635 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9636 break;
9638 case GT_EXPR:
9639 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9640 if (GET_MODE_CLASS (mode) == MODE_INT
9641 && ! can_compare_p (GT, mode, ccp_jump))
9642 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9643 else
9644 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9645 break;
9647 case GE_EXPR:
9648 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9649 if (GET_MODE_CLASS (mode) == MODE_INT
9650 && ! can_compare_p (GE, mode, ccp_jump))
9651 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9652 else
9653 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9654 break;
9656 case UNORDERED_EXPR:
9657 case ORDERED_EXPR:
9659 enum rtx_code cmp, rcmp;
9660 int do_rev;
9662 if (code == UNORDERED_EXPR)
9663 cmp = UNORDERED, rcmp = ORDERED;
9664 else
9665 cmp = ORDERED, rcmp = UNORDERED;
9666 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9668 do_rev = 0;
9669 if (! can_compare_p (cmp, mode, ccp_jump)
9670 && (can_compare_p (rcmp, mode, ccp_jump)
9671 /* If the target doesn't provide either UNORDERED or ORDERED
9672 comparisons, canonicalize on UNORDERED for the library. */
9673 || rcmp == UNORDERED))
9674 do_rev = 1;
9676 if (! do_rev)
9677 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9678 else
9679 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9681 break;
9684 enum rtx_code rcode1;
9685 enum tree_code tcode2;
9687 case UNLT_EXPR:
9688 rcode1 = UNLT;
9689 tcode2 = LT_EXPR;
9690 goto unordered_bcc;
9691 case UNLE_EXPR:
9692 rcode1 = UNLE;
9693 tcode2 = LE_EXPR;
9694 goto unordered_bcc;
9695 case UNGT_EXPR:
9696 rcode1 = UNGT;
9697 tcode2 = GT_EXPR;
9698 goto unordered_bcc;
9699 case UNGE_EXPR:
9700 rcode1 = UNGE;
9701 tcode2 = GE_EXPR;
9702 goto unordered_bcc;
9703 case UNEQ_EXPR:
9704 rcode1 = UNEQ;
9705 tcode2 = EQ_EXPR;
9706 goto unordered_bcc;
9708 unordered_bcc:
9709 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9710 if (can_compare_p (rcode1, mode, ccp_jump))
9711 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9712 if_true_label);
9713 else
9715 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9716 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9717 tree cmp0, cmp1;
9719 /* If the target doesn't support combined unordered
9720 compares, decompose into UNORDERED + comparison. */
9721 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9722 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9723 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9724 do_jump (exp, if_false_label, if_true_label);
9727 break;
9729 default:
9730 normal:
9731 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9732 #if 0
9733 /* This is not needed any more and causes poor code since it causes
9734 comparisons and tests from non-SI objects to have different code
9735 sequences. */
9736 /* Copy to register to avoid generating bad insns by cse
9737 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9738 if (!cse_not_expected && GET_CODE (temp) == MEM)
9739 temp = copy_to_reg (temp);
9740 #endif
9741 do_pending_stack_adjust ();
9742 /* Do any postincrements in the expression that was tested. */
9743 emit_queue ();
9745 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9747 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9748 if (target)
9749 emit_jump (target);
9751 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9752 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9753 /* Note swapping the labels gives us not-equal. */
9754 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9755 else if (GET_MODE (temp) != VOIDmode)
9756 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9757 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9758 GET_MODE (temp), NULL_RTX, 0,
9759 if_false_label, if_true_label);
9760 else
9761 abort ();
9764 if (drop_through_label)
9766 /* If do_jump produces code that might be jumped around,
9767 do any stack adjusts from that code, before the place
9768 where control merges in. */
9769 do_pending_stack_adjust ();
9770 emit_label (drop_through_label);
9774 /* Given a comparison expression EXP for values too wide to be compared
9775 with one insn, test the comparison and jump to the appropriate label.
9776 The code of EXP is ignored; we always test GT if SWAP is 0,
9777 and LT if SWAP is 1. */
9779 static void
9780 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9781 tree exp;
9782 int swap;
9783 rtx if_false_label, if_true_label;
9785 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9786 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9787 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9788 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9790 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9793 /* Compare OP0 with OP1, word at a time, in mode MODE.
9794 UNSIGNEDP says to do unsigned comparison.
9795 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9797 void
9798 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9799 enum machine_mode mode;
9800 int unsignedp;
9801 rtx op0, op1;
9802 rtx if_false_label, if_true_label;
9804 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9805 rtx drop_through_label = 0;
9806 int i;
9808 if (! if_true_label || ! if_false_label)
9809 drop_through_label = gen_label_rtx ();
9810 if (! if_true_label)
9811 if_true_label = drop_through_label;
9812 if (! if_false_label)
9813 if_false_label = drop_through_label;
9815 /* Compare a word at a time, high order first. */
9816 for (i = 0; i < nwords; i++)
9818 rtx op0_word, op1_word;
9820 if (WORDS_BIG_ENDIAN)
9822 op0_word = operand_subword_force (op0, i, mode);
9823 op1_word = operand_subword_force (op1, i, mode);
9825 else
9827 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9828 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9831 /* All but high-order word must be compared as unsigned. */
9832 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9833 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9834 NULL_RTX, if_true_label);
9836 /* Consider lower words only if these are equal. */
9837 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9838 NULL_RTX, 0, NULL_RTX, if_false_label);
9841 if (if_false_label)
9842 emit_jump (if_false_label);
9843 if (drop_through_label)
9844 emit_label (drop_through_label);
9847 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9848 with one insn, test the comparison and jump to the appropriate label. */
9850 static void
9851 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9852 tree exp;
9853 rtx if_false_label, if_true_label;
9855 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9856 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9857 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9858 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9859 int i;
9860 rtx drop_through_label = 0;
9862 if (! if_false_label)
9863 drop_through_label = if_false_label = gen_label_rtx ();
9865 for (i = 0; i < nwords; i++)
9866 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9867 operand_subword_force (op1, i, mode),
9868 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9869 word_mode, NULL_RTX, 0, if_false_label,
9870 NULL_RTX);
9872 if (if_true_label)
9873 emit_jump (if_true_label);
9874 if (drop_through_label)
9875 emit_label (drop_through_label);
9878 /* Jump according to whether OP0 is 0.
9879 We assume that OP0 has an integer mode that is too wide
9880 for the available compare insns. */
9882 void
9883 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9884 rtx op0;
9885 rtx if_false_label, if_true_label;
9887 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9888 rtx part;
9889 int i;
9890 rtx drop_through_label = 0;
9892 /* The fastest way of doing this comparison on almost any machine is to
9893 "or" all the words and compare the result. If all have to be loaded
9894 from memory and this is a very wide item, it's possible this may
9895 be slower, but that's highly unlikely. */
9897 part = gen_reg_rtx (word_mode);
9898 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9899 for (i = 1; i < nwords && part != 0; i++)
9900 part = expand_binop (word_mode, ior_optab, part,
9901 operand_subword_force (op0, i, GET_MODE (op0)),
9902 part, 1, OPTAB_WIDEN);
9904 if (part != 0)
9906 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9907 NULL_RTX, 0, if_false_label, if_true_label);
9909 return;
9912 /* If we couldn't do the "or" simply, do this with a series of compares. */
9913 if (! if_false_label)
9914 drop_through_label = if_false_label = gen_label_rtx ();
9916 for (i = 0; i < nwords; i++)
9917 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9918 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9919 if_false_label, NULL_RTX);
9921 if (if_true_label)
9922 emit_jump (if_true_label);
9924 if (drop_through_label)
9925 emit_label (drop_through_label);
9928 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9929 (including code to compute the values to be compared)
9930 and set (CC0) according to the result.
9931 The decision as to signed or unsigned comparison must be made by the caller.
9933 We force a stack adjustment unless there are currently
9934 things pushed on the stack that aren't yet used.
9936 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9937 compared.
9939 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9940 size of MODE should be used. */
9943 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9944 register rtx op0, op1;
9945 enum rtx_code code;
9946 int unsignedp;
9947 enum machine_mode mode;
9948 rtx size;
9949 unsigned int align;
9951 rtx tem;
9953 /* If one operand is constant, make it the second one. Only do this
9954 if the other operand is not constant as well. */
9956 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9957 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9959 tem = op0;
9960 op0 = op1;
9961 op1 = tem;
9962 code = swap_condition (code);
9965 if (flag_force_mem)
9967 op0 = force_not_mem (op0);
9968 op1 = force_not_mem (op1);
9971 do_pending_stack_adjust ();
9973 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9974 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9975 return tem;
9977 #if 0
9978 /* There's no need to do this now that combine.c can eliminate lots of
9979 sign extensions. This can be less efficient in certain cases on other
9980 machines. */
9982 /* If this is a signed equality comparison, we can do it as an
9983 unsigned comparison since zero-extension is cheaper than sign
9984 extension and comparisons with zero are done as unsigned. This is
9985 the case even on machines that can do fast sign extension, since
9986 zero-extension is easier to combine with other operations than
9987 sign-extension is. If we are comparing against a constant, we must
9988 convert it to what it would look like unsigned. */
9989 if ((code == EQ || code == NE) && ! unsignedp
9990 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9992 if (GET_CODE (op1) == CONST_INT
9993 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9994 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9995 unsignedp = 1;
9997 #endif
9999 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10001 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10004 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10005 The decision as to signed or unsigned comparison must be made by the caller.
10007 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10008 compared.
10010 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10011 size of MODE should be used. */
10013 void
10014 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10015 if_false_label, if_true_label)
10016 register rtx op0, op1;
10017 enum rtx_code code;
10018 int unsignedp;
10019 enum machine_mode mode;
10020 rtx size;
10021 unsigned int align;
10022 rtx if_false_label, if_true_label;
10024 rtx tem;
10025 int dummy_true_label = 0;
10027 /* Reverse the comparison if that is safe and we want to jump if it is
10028 false. */
10029 if (! if_true_label && ! FLOAT_MODE_P (mode))
10031 if_true_label = if_false_label;
10032 if_false_label = 0;
10033 code = reverse_condition (code);
10036 /* If one operand is constant, make it the second one. Only do this
10037 if the other operand is not constant as well. */
10039 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10040 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10042 tem = op0;
10043 op0 = op1;
10044 op1 = tem;
10045 code = swap_condition (code);
10048 if (flag_force_mem)
10050 op0 = force_not_mem (op0);
10051 op1 = force_not_mem (op1);
10054 do_pending_stack_adjust ();
10056 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10057 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10059 if (tem == const_true_rtx)
10061 if (if_true_label)
10062 emit_jump (if_true_label);
10064 else
10066 if (if_false_label)
10067 emit_jump (if_false_label);
10069 return;
10072 #if 0
10073 /* There's no need to do this now that combine.c can eliminate lots of
10074 sign extensions. This can be less efficient in certain cases on other
10075 machines. */
10077 /* If this is a signed equality comparison, we can do it as an
10078 unsigned comparison since zero-extension is cheaper than sign
10079 extension and comparisons with zero are done as unsigned. This is
10080 the case even on machines that can do fast sign extension, since
10081 zero-extension is easier to combine with other operations than
10082 sign-extension is. If we are comparing against a constant, we must
10083 convert it to what it would look like unsigned. */
10084 if ((code == EQ || code == NE) && ! unsignedp
10085 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10087 if (GET_CODE (op1) == CONST_INT
10088 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10089 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10090 unsignedp = 1;
10092 #endif
10094 if (! if_true_label)
10096 dummy_true_label = 1;
10097 if_true_label = gen_label_rtx ();
10100 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10101 if_true_label);
10103 if (if_false_label)
10104 emit_jump (if_false_label);
10105 if (dummy_true_label)
10106 emit_label (if_true_label);
10109 /* Generate code for a comparison expression EXP (including code to compute
10110 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10111 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10112 generated code will drop through.
10113 SIGNED_CODE should be the rtx operation for this comparison for
10114 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10116 We force a stack adjustment unless there are currently
10117 things pushed on the stack that aren't yet used. */
10119 static void
10120 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10121 if_true_label)
10122 register tree exp;
10123 enum rtx_code signed_code, unsigned_code;
10124 rtx if_false_label, if_true_label;
10126 unsigned int align0, align1;
10127 register rtx op0, op1;
10128 register tree type;
10129 register enum machine_mode mode;
10130 int unsignedp;
10131 enum rtx_code code;
10133 /* Don't crash if the comparison was erroneous. */
10134 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10135 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10136 return;
10138 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10139 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10140 mode = TYPE_MODE (type);
10141 unsignedp = TREE_UNSIGNED (type);
10142 code = unsignedp ? unsigned_code : signed_code;
10144 #ifdef HAVE_canonicalize_funcptr_for_compare
10145 /* If function pointers need to be "canonicalized" before they can
10146 be reliably compared, then canonicalize them. */
10147 if (HAVE_canonicalize_funcptr_for_compare
10148 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10149 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10150 == FUNCTION_TYPE))
10152 rtx new_op0 = gen_reg_rtx (mode);
10154 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10155 op0 = new_op0;
10158 if (HAVE_canonicalize_funcptr_for_compare
10159 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10160 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10161 == FUNCTION_TYPE))
10163 rtx new_op1 = gen_reg_rtx (mode);
10165 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10166 op1 = new_op1;
10168 #endif
10170 /* Do any postincrements in the expression that was tested. */
10171 emit_queue ();
10173 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10174 ((mode == BLKmode)
10175 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10176 MIN (align0, align1),
10177 if_false_label, if_true_label);
10180 /* Generate code to calculate EXP using a store-flag instruction
10181 and return an rtx for the result. EXP is either a comparison
10182 or a TRUTH_NOT_EXPR whose operand is a comparison.
10184 If TARGET is nonzero, store the result there if convenient.
10186 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10187 cheap.
10189 Return zero if there is no suitable set-flag instruction
10190 available on this machine.
10192 Once expand_expr has been called on the arguments of the comparison,
10193 we are committed to doing the store flag, since it is not safe to
10194 re-evaluate the expression. We emit the store-flag insn by calling
10195 emit_store_flag, but only expand the arguments if we have a reason
10196 to believe that emit_store_flag will be successful. If we think that
10197 it will, but it isn't, we have to simulate the store-flag with a
10198 set/jump/set sequence. */
10200 static rtx
10201 do_store_flag (exp, target, mode, only_cheap)
10202 tree exp;
10203 rtx target;
10204 enum machine_mode mode;
10205 int only_cheap;
10207 enum rtx_code code;
10208 tree arg0, arg1, type;
10209 tree tem;
10210 enum machine_mode operand_mode;
10211 int invert = 0;
10212 int unsignedp;
10213 rtx op0, op1;
10214 enum insn_code icode;
10215 rtx subtarget = target;
10216 rtx result, label;
10218 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10219 result at the end. We can't simply invert the test since it would
10220 have already been inverted if it were valid. This case occurs for
10221 some floating-point comparisons. */
10223 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10224 invert = 1, exp = TREE_OPERAND (exp, 0);
10226 arg0 = TREE_OPERAND (exp, 0);
10227 arg1 = TREE_OPERAND (exp, 1);
10228 type = TREE_TYPE (arg0);
10229 operand_mode = TYPE_MODE (type);
10230 unsignedp = TREE_UNSIGNED (type);
10232 /* We won't bother with BLKmode store-flag operations because it would mean
10233 passing a lot of information to emit_store_flag. */
10234 if (operand_mode == BLKmode)
10235 return 0;
10237 /* We won't bother with store-flag operations involving function pointers
10238 when function pointers must be canonicalized before comparisons. */
10239 #ifdef HAVE_canonicalize_funcptr_for_compare
10240 if (HAVE_canonicalize_funcptr_for_compare
10241 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10242 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10243 == FUNCTION_TYPE))
10244 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10245 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10246 == FUNCTION_TYPE))))
10247 return 0;
10248 #endif
10250 STRIP_NOPS (arg0);
10251 STRIP_NOPS (arg1);
10253 /* Get the rtx comparison code to use. We know that EXP is a comparison
10254 operation of some type. Some comparisons against 1 and -1 can be
10255 converted to comparisons with zero. Do so here so that the tests
10256 below will be aware that we have a comparison with zero. These
10257 tests will not catch constants in the first operand, but constants
10258 are rarely passed as the first operand. */
10260 switch (TREE_CODE (exp))
10262 case EQ_EXPR:
10263 code = EQ;
10264 break;
10265 case NE_EXPR:
10266 code = NE;
10267 break;
10268 case LT_EXPR:
10269 if (integer_onep (arg1))
10270 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10271 else
10272 code = unsignedp ? LTU : LT;
10273 break;
10274 case LE_EXPR:
10275 if (! unsignedp && integer_all_onesp (arg1))
10276 arg1 = integer_zero_node, code = LT;
10277 else
10278 code = unsignedp ? LEU : LE;
10279 break;
10280 case GT_EXPR:
10281 if (! unsignedp && integer_all_onesp (arg1))
10282 arg1 = integer_zero_node, code = GE;
10283 else
10284 code = unsignedp ? GTU : GT;
10285 break;
10286 case GE_EXPR:
10287 if (integer_onep (arg1))
10288 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10289 else
10290 code = unsignedp ? GEU : GE;
10291 break;
10293 case UNORDERED_EXPR:
10294 code = UNORDERED;
10295 break;
10296 case ORDERED_EXPR:
10297 code = ORDERED;
10298 break;
10299 case UNLT_EXPR:
10300 code = UNLT;
10301 break;
10302 case UNLE_EXPR:
10303 code = UNLE;
10304 break;
10305 case UNGT_EXPR:
10306 code = UNGT;
10307 break;
10308 case UNGE_EXPR:
10309 code = UNGE;
10310 break;
10311 case UNEQ_EXPR:
10312 code = UNEQ;
10313 break;
10315 default:
10316 abort ();
10319 /* Put a constant second. */
10320 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10322 tem = arg0; arg0 = arg1; arg1 = tem;
10323 code = swap_condition (code);
10326 /* If this is an equality or inequality test of a single bit, we can
10327 do this by shifting the bit being tested to the low-order bit and
10328 masking the result with the constant 1. If the condition was EQ,
10329 we xor it with 1. This does not require an scc insn and is faster
10330 than an scc insn even if we have it. */
10332 if ((code == NE || code == EQ)
10333 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10334 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10336 tree inner = TREE_OPERAND (arg0, 0);
10337 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10338 int ops_unsignedp;
10340 /* If INNER is a right shift of a constant and it plus BITNUM does
10341 not overflow, adjust BITNUM and INNER. */
10343 if (TREE_CODE (inner) == RSHIFT_EXPR
10344 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10345 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10346 && bitnum < TYPE_PRECISION (type)
10347 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10348 bitnum - TYPE_PRECISION (type)))
10350 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10351 inner = TREE_OPERAND (inner, 0);
10354 /* If we are going to be able to omit the AND below, we must do our
10355 operations as unsigned. If we must use the AND, we have a choice.
10356 Normally unsigned is faster, but for some machines signed is. */
10357 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10358 #ifdef LOAD_EXTEND_OP
10359 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10360 #else
10362 #endif
10365 if (! get_subtarget (subtarget)
10366 || GET_MODE (subtarget) != operand_mode
10367 || ! safe_from_p (subtarget, inner, 1))
10368 subtarget = 0;
10370 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10372 if (bitnum != 0)
10373 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10374 size_int (bitnum), subtarget, ops_unsignedp);
10376 if (GET_MODE (op0) != mode)
10377 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10379 if ((code == EQ && ! invert) || (code == NE && invert))
10380 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10381 ops_unsignedp, OPTAB_LIB_WIDEN);
10383 /* Put the AND last so it can combine with more things. */
10384 if (bitnum != TYPE_PRECISION (type) - 1)
10385 op0 = expand_and (op0, const1_rtx, subtarget);
10387 return op0;
10390 /* Now see if we are likely to be able to do this. Return if not. */
10391 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10392 return 0;
10394 icode = setcc_gen_code[(int) code];
10395 if (icode == CODE_FOR_nothing
10396 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10398 /* We can only do this if it is one of the special cases that
10399 can be handled without an scc insn. */
10400 if ((code == LT && integer_zerop (arg1))
10401 || (! only_cheap && code == GE && integer_zerop (arg1)))
10403 else if (BRANCH_COST >= 0
10404 && ! only_cheap && (code == NE || code == EQ)
10405 && TREE_CODE (type) != REAL_TYPE
10406 && ((abs_optab->handlers[(int) operand_mode].insn_code
10407 != CODE_FOR_nothing)
10408 || (ffs_optab->handlers[(int) operand_mode].insn_code
10409 != CODE_FOR_nothing)))
10411 else
10412 return 0;
10415 preexpand_calls (exp);
10416 if (! get_subtarget (target)
10417 || GET_MODE (subtarget) != operand_mode
10418 || ! safe_from_p (subtarget, arg1, 1))
10419 subtarget = 0;
10421 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10422 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10424 if (target == 0)
10425 target = gen_reg_rtx (mode);
10427 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10428 because, if the emit_store_flag does anything it will succeed and
10429 OP0 and OP1 will not be used subsequently. */
10431 result = emit_store_flag (target, code,
10432 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10433 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10434 operand_mode, unsignedp, 1);
10436 if (result)
10438 if (invert)
10439 result = expand_binop (mode, xor_optab, result, const1_rtx,
10440 result, 0, OPTAB_LIB_WIDEN);
10441 return result;
10444 /* If this failed, we have to do this with set/compare/jump/set code. */
10445 if (GET_CODE (target) != REG
10446 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10447 target = gen_reg_rtx (GET_MODE (target));
10449 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10450 result = compare_from_rtx (op0, op1, code, unsignedp,
10451 operand_mode, NULL_RTX, 0);
10452 if (GET_CODE (result) == CONST_INT)
10453 return (((result == const0_rtx && ! invert)
10454 || (result != const0_rtx && invert))
10455 ? const0_rtx : const1_rtx);
10457 label = gen_label_rtx ();
10458 if (bcc_gen_fctn[(int) code] == 0)
10459 abort ();
10461 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10462 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10463 emit_label (label);
10465 return target;
10468 /* Generate a tablejump instruction (used for switch statements). */
10470 #ifdef HAVE_tablejump
10472 /* INDEX is the value being switched on, with the lowest value
10473 in the table already subtracted.
10474 MODE is its expected mode (needed if INDEX is constant).
10475 RANGE is the length of the jump table.
10476 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10478 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10479 index value is out of range. */
10481 void
10482 do_tablejump (index, mode, range, table_label, default_label)
10483 rtx index, range, table_label, default_label;
10484 enum machine_mode mode;
10486 register rtx temp, vector;
10488 /* Do an unsigned comparison (in the proper mode) between the index
10489 expression and the value which represents the length of the range.
10490 Since we just finished subtracting the lower bound of the range
10491 from the index expression, this comparison allows us to simultaneously
10492 check that the original index expression value is both greater than
10493 or equal to the minimum value of the range and less than or equal to
10494 the maximum value of the range. */
10496 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10497 0, default_label);
10499 /* If index is in range, it must fit in Pmode.
10500 Convert to Pmode so we can index with it. */
10501 if (mode != Pmode)
10502 index = convert_to_mode (Pmode, index, 1);
10504 /* Don't let a MEM slip thru, because then INDEX that comes
10505 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10506 and break_out_memory_refs will go to work on it and mess it up. */
10507 #ifdef PIC_CASE_VECTOR_ADDRESS
10508 if (flag_pic && GET_CODE (index) != REG)
10509 index = copy_to_mode_reg (Pmode, index);
10510 #endif
10512 /* If flag_force_addr were to affect this address
10513 it could interfere with the tricky assumptions made
10514 about addresses that contain label-refs,
10515 which may be valid only very near the tablejump itself. */
10516 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10517 GET_MODE_SIZE, because this indicates how large insns are. The other
10518 uses should all be Pmode, because they are addresses. This code
10519 could fail if addresses and insns are not the same size. */
10520 index = gen_rtx_PLUS (Pmode,
10521 gen_rtx_MULT (Pmode, index,
10522 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10523 gen_rtx_LABEL_REF (Pmode, table_label));
10524 #ifdef PIC_CASE_VECTOR_ADDRESS
10525 if (flag_pic)
10526 index = PIC_CASE_VECTOR_ADDRESS (index);
10527 else
10528 #endif
10529 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10530 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10531 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10532 RTX_UNCHANGING_P (vector) = 1;
10533 convert_move (temp, vector, 0);
10535 emit_jump_insn (gen_tablejump (temp, table_label));
10537 /* If we are generating PIC code or if the table is PC-relative, the
10538 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10539 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10540 emit_barrier ();
10543 #endif /* HAVE_tablejump */