* config/h8300/h8300.h (ENCODE_SECTION_INFO): Check to see if DECL
[official-gcc.git] / gcc / expr.c
blobbd60d3f59b96163e13b8557dc413e9305c5fabbd
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "intl.h"
43 #include "tm_p.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
51 #ifdef PUSH_ROUNDING
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
55 #endif
57 #endif
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
62 #else
63 #define STACK_PUSH_CODE PRE_INC
64 #endif
65 #endif
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
70 #endif
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
78 parameter. */
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99 struct move_by_pieces
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
111 int reverse;
114 /* This structure is used by store_by_pieces to describe the clear to
115 be performed. */
117 struct store_by_pieces
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
126 PTR constfundata;
127 int reverse;
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
147 enum machine_mode,
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
155 int));
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
157 HOST_WIDE_INT));
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
187 #ifndef MOVE_RATIO
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
189 #define MOVE_RATIO 2
190 #else
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
193 #endif
194 #endif
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
201 #endif
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
213 #endif
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
218 void
219 init_expr_once ()
221 rtx insn, pat;
222 enum machine_mode mode;
223 int num_clobbers;
224 rtx mem, mem1;
226 start_sequence ();
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
240 int regno;
241 rtx reg;
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
253 regno++)
255 if (! HARD_REGNO_MODE_OK (regno, mode))
256 continue;
258 reg = gen_rtx_REG (mode, regno);
260 SET_SRC (pat) = mem;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
282 end_sequence ();
285 /* This is run at the start of compiling a function. */
287 void
288 init_expr ()
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
292 pending_chain = 0;
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
296 saveregs_value = 0;
297 apply_args_value = 0;
298 forced_labels = 0;
301 void
302 mark_expr_status (p)
303 struct expr_status *p;
305 if (p == NULL)
306 return;
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
313 void
314 free_expr_status (f)
315 struct function *f;
317 free (f->expr);
318 f->expr = NULL;
321 /* Small sanity check that the queue is empty at the end of a function. */
323 void
324 finish_expr_for_function ()
326 if (pending_chain)
327 abort ();
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
340 static rtx
341 enqueue_insn (var, body)
342 rtx var, body;
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
365 protect_from_queue (x, modify)
366 register rtx x;
367 int modify;
369 register RTX_CODE code = GET_CODE (x);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
374 return x;
375 #endif
377 if (code != QUEUED)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
383 shared. */
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 rtx y = XEXP (x, 0);
388 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
390 if (QUEUED_INSN (y))
392 rtx temp = gen_reg_rtx (GET_MODE (x));
394 emit_insn_before (gen_move_insn (temp, new),
395 QUEUED_INSN (y));
396 return temp;
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
401 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
406 if (code == MEM)
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
411 x = copy_rtx (x);
412 XEXP (x, 0) = tem;
415 else if (code == PLUS || code == MULT)
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
421 x = copy_rtx (x);
422 XEXP (x, 0) = new0;
423 XEXP (x, 1) = new1;
426 return x;
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
430 emit_queue. */
431 if (QUEUED_INSN (x) == 0)
432 return copy_to_reg (QUEUED_VAR (x));
433 /* If the increment has happened and a pre-increment copy exists,
434 use that copy. */
435 if (QUEUED_COPY (x) != 0)
436 return QUEUED_COPY (x);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
441 QUEUED_INSN (x));
442 return QUEUED_COPY (x);
445 /* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
451 queued_subexp_p (x)
452 rtx x;
454 register enum rtx_code code = GET_CODE (x);
455 switch (code)
457 case QUEUED:
458 return 1;
459 case MEM:
460 return queued_subexp_p (XEXP (x, 0));
461 case MULT:
462 case PLUS:
463 case MINUS:
464 return (queued_subexp_p (XEXP (x, 0))
465 || queued_subexp_p (XEXP (x, 1)));
466 default:
467 return 0;
471 /* Perform all the pending incrementations. */
473 void
474 emit_queue ()
476 register rtx p;
477 while ((p = pending_chain))
479 rtx body = QUEUED_BODY (p);
481 if (GET_CODE (body) == SEQUENCE)
483 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
484 emit_insn (QUEUED_BODY (p));
486 else
487 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
488 pending_chain = QUEUED_NEXT (p);
492 /* Copy data from FROM to TO, where the machine modes are not the same.
493 Both modes may be integer, or both may be floating.
494 UNSIGNEDP should be nonzero if FROM is an unsigned type.
495 This causes zero-extension instead of sign-extension. */
497 void
498 convert_move (to, from, unsignedp)
499 register rtx to, from;
500 int unsignedp;
502 enum machine_mode to_mode = GET_MODE (to);
503 enum machine_mode from_mode = GET_MODE (from);
504 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
505 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
506 enum insn_code code;
507 rtx libcall;
509 /* rtx code for making an equivalent value. */
510 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
512 to = protect_from_queue (to, 1);
513 from = protect_from_queue (from, 0);
515 if (to_real != from_real)
516 abort ();
518 /* If FROM is a SUBREG that indicates that we have already done at least
519 the required extension, strip it. We don't handle such SUBREGs as
520 TO here. */
522 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
524 >= GET_MODE_SIZE (to_mode))
525 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
526 from = gen_lowpart (to_mode, from), from_mode = to_mode;
528 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
529 abort ();
531 if (to_mode == from_mode
532 || (from_mode == VOIDmode && CONSTANT_P (from)))
534 emit_move_insn (to, from);
535 return;
538 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
540 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
541 abort ();
543 if (VECTOR_MODE_P (to_mode))
544 from = gen_rtx_SUBREG (to_mode, from, 0);
545 else
546 to = gen_rtx_SUBREG (from_mode, to, 0);
548 emit_move_insn (to, from);
549 return;
552 if (to_real != from_real)
553 abort ();
555 if (to_real)
557 rtx value, insns;
559 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
561 /* Try converting directly if the insn is supported. */
562 if ((code = can_extend_p (to_mode, from_mode, 0))
563 != CODE_FOR_nothing)
565 emit_unop_insn (code, to, from, UNKNOWN);
566 return;
570 #ifdef HAVE_trunchfqf2
571 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
573 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
574 return;
576 #endif
577 #ifdef HAVE_trunctqfqf2
578 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
580 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
581 return;
583 #endif
584 #ifdef HAVE_truncsfqf2
585 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
587 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
588 return;
590 #endif
591 #ifdef HAVE_truncdfqf2
592 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
594 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
595 return;
597 #endif
598 #ifdef HAVE_truncxfqf2
599 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
602 return;
604 #endif
605 #ifdef HAVE_trunctfqf2
606 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
609 return;
611 #endif
613 #ifdef HAVE_trunctqfhf2
614 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
616 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
617 return;
619 #endif
620 #ifdef HAVE_truncsfhf2
621 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
623 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
624 return;
626 #endif
627 #ifdef HAVE_truncdfhf2
628 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
630 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
631 return;
633 #endif
634 #ifdef HAVE_truncxfhf2
635 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
637 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_trunctfhf2
642 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
644 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
645 return;
647 #endif
649 #ifdef HAVE_truncsftqf2
650 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
652 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
653 return;
655 #endif
656 #ifdef HAVE_truncdftqf2
657 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
659 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
660 return;
662 #endif
663 #ifdef HAVE_truncxftqf2
664 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
666 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
667 return;
669 #endif
670 #ifdef HAVE_trunctftqf2
671 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
673 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
674 return;
676 #endif
678 #ifdef HAVE_truncdfsf2
679 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
681 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
682 return;
684 #endif
685 #ifdef HAVE_truncxfsf2
686 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
688 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
689 return;
691 #endif
692 #ifdef HAVE_trunctfsf2
693 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
695 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
696 return;
698 #endif
699 #ifdef HAVE_truncxfdf2
700 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
702 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
703 return;
705 #endif
706 #ifdef HAVE_trunctfdf2
707 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
709 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
710 return;
712 #endif
714 libcall = (rtx) 0;
715 switch (from_mode)
717 case SFmode:
718 switch (to_mode)
720 case DFmode:
721 libcall = extendsfdf2_libfunc;
722 break;
724 case XFmode:
725 libcall = extendsfxf2_libfunc;
726 break;
728 case TFmode:
729 libcall = extendsftf2_libfunc;
730 break;
732 default:
733 break;
735 break;
737 case DFmode:
738 switch (to_mode)
740 case SFmode:
741 libcall = truncdfsf2_libfunc;
742 break;
744 case XFmode:
745 libcall = extenddfxf2_libfunc;
746 break;
748 case TFmode:
749 libcall = extenddftf2_libfunc;
750 break;
752 default:
753 break;
755 break;
757 case XFmode:
758 switch (to_mode)
760 case SFmode:
761 libcall = truncxfsf2_libfunc;
762 break;
764 case DFmode:
765 libcall = truncxfdf2_libfunc;
766 break;
768 default:
769 break;
771 break;
773 case TFmode:
774 switch (to_mode)
776 case SFmode:
777 libcall = trunctfsf2_libfunc;
778 break;
780 case DFmode:
781 libcall = trunctfdf2_libfunc;
782 break;
784 default:
785 break;
787 break;
789 default:
790 break;
793 if (libcall == (rtx) 0)
794 /* This conversion is not implemented yet. */
795 abort ();
797 start_sequence ();
798 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
799 1, from, from_mode);
800 insns = get_insns ();
801 end_sequence ();
802 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
803 from));
804 return;
807 /* Now both modes are integers. */
809 /* Handle expanding beyond a word. */
810 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
811 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
813 rtx insns;
814 rtx lowpart;
815 rtx fill_value;
816 rtx lowfrom;
817 int i;
818 enum machine_mode lowpart_mode;
819 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
821 /* Try converting directly if the insn is supported. */
822 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
823 != CODE_FOR_nothing)
825 /* If FROM is a SUBREG, put it into a register. Do this
826 so that we always generate the same set of insns for
827 better cse'ing; if an intermediate assignment occurred,
828 we won't be doing the operation directly on the SUBREG. */
829 if (optimize > 0 && GET_CODE (from) == SUBREG)
830 from = force_reg (from_mode, from);
831 emit_unop_insn (code, to, from, equiv_code);
832 return;
834 /* Next, try converting via full word. */
835 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
836 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
837 != CODE_FOR_nothing))
839 if (GET_CODE (to) == REG)
840 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
841 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
842 emit_unop_insn (code, to,
843 gen_lowpart (word_mode, to), equiv_code);
844 return;
847 /* No special multiword conversion insn; do it by hand. */
848 start_sequence ();
850 /* Since we will turn this into a no conflict block, we must ensure
851 that the source does not overlap the target. */
853 if (reg_overlap_mentioned_p (to, from))
854 from = force_reg (from_mode, from);
856 /* Get a copy of FROM widened to a word, if necessary. */
857 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
858 lowpart_mode = word_mode;
859 else
860 lowpart_mode = from_mode;
862 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
864 lowpart = gen_lowpart (lowpart_mode, to);
865 emit_move_insn (lowpart, lowfrom);
867 /* Compute the value to put in each remaining word. */
868 if (unsignedp)
869 fill_value = const0_rtx;
870 else
872 #ifdef HAVE_slt
873 if (HAVE_slt
874 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
875 && STORE_FLAG_VALUE == -1)
877 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
878 lowpart_mode, 0, 0);
879 fill_value = gen_reg_rtx (word_mode);
880 emit_insn (gen_slt (fill_value));
882 else
883 #endif
885 fill_value
886 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
887 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
888 NULL_RTX, 0);
889 fill_value = convert_to_mode (word_mode, fill_value, 1);
893 /* Fill the remaining words. */
894 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
896 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
897 rtx subword = operand_subword (to, index, 1, to_mode);
899 if (subword == 0)
900 abort ();
902 if (fill_value != subword)
903 emit_move_insn (subword, fill_value);
906 insns = get_insns ();
907 end_sequence ();
909 emit_no_conflict_block (insns, to, from, NULL_RTX,
910 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
911 return;
914 /* Truncating multi-word to a word or less. */
915 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
916 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
925 convert_move (to, gen_lowpart (word_mode, from), 0);
926 return;
929 /* Handle pointer conversion. */ /* SPEE 900220. */
930 if (to_mode == PQImode)
932 if (from_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
935 #ifdef HAVE_truncqipqi2
936 if (HAVE_truncqipqi2)
938 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
939 return;
941 #endif /* HAVE_truncqipqi2 */
942 abort ();
945 if (from_mode == PQImode)
947 if (to_mode != QImode)
949 from = convert_to_mode (QImode, from, unsignedp);
950 from_mode = QImode;
952 else
954 #ifdef HAVE_extendpqiqi2
955 if (HAVE_extendpqiqi2)
957 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
958 return;
960 #endif /* HAVE_extendpqiqi2 */
961 abort ();
965 if (to_mode == PSImode)
967 if (from_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
970 #ifdef HAVE_truncsipsi2
971 if (HAVE_truncsipsi2)
973 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
974 return;
976 #endif /* HAVE_truncsipsi2 */
977 abort ();
980 if (from_mode == PSImode)
982 if (to_mode != SImode)
984 from = convert_to_mode (SImode, from, unsignedp);
985 from_mode = SImode;
987 else
989 #ifdef HAVE_extendpsisi2
990 if (! unsignedp && HAVE_extendpsisi2)
992 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
993 return;
995 #endif /* HAVE_extendpsisi2 */
996 #ifdef HAVE_zero_extendpsisi2
997 if (unsignedp && HAVE_zero_extendpsisi2)
999 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1000 return;
1002 #endif /* HAVE_zero_extendpsisi2 */
1003 abort ();
1007 if (to_mode == PDImode)
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1012 #ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1016 return;
1018 #endif /* HAVE_truncdipdi2 */
1019 abort ();
1022 if (from_mode == PDImode)
1024 if (to_mode != DImode)
1026 from = convert_to_mode (DImode, from, unsignedp);
1027 from_mode = DImode;
1029 else
1031 #ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1035 return;
1037 #endif /* HAVE_extendpdidi2 */
1038 abort ();
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (from_mode)))
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1061 return;
1064 /* Handle extension. */
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1071 emit_unop_insn (code, to, from, equiv_code);
1072 return;
1074 else
1076 enum machine_mode intermediate;
1077 rtx tmp;
1078 tree shift_amount;
1080 /* Search for a mode to convert via. */
1081 for (intermediate = from_mode; intermediate != VOIDmode;
1082 intermediate = GET_MODE_WIDER_MODE (intermediate))
1083 if (((can_extend_p (to_mode, intermediate, unsignedp)
1084 != CODE_FOR_nothing)
1085 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1086 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1087 GET_MODE_BITSIZE (intermediate))))
1088 && (can_extend_p (intermediate, from_mode, unsignedp)
1089 != CODE_FOR_nothing))
1091 convert_move (to, convert_to_mode (intermediate, from,
1092 unsignedp), unsignedp);
1093 return;
1096 /* No suitable intermediate mode.
1097 Generate what we need with shifts. */
1098 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1099 - GET_MODE_BITSIZE (from_mode), 0);
1100 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1101 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1102 to, unsignedp);
1103 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1104 to, unsignedp);
1105 if (tmp != to)
1106 emit_move_insn (to, tmp);
1107 return;
1111 /* Support special truncate insns for certain modes. */
1113 if (from_mode == DImode && to_mode == SImode)
1115 #ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1119 return;
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1126 if (from_mode == DImode && to_mode == HImode)
1128 #ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1132 return;
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1139 if (from_mode == DImode && to_mode == QImode)
1141 #ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1145 return;
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1152 if (from_mode == SImode && to_mode == HImode)
1154 #ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1158 return;
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1165 if (from_mode == SImode && to_mode == QImode)
1167 #ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1171 return;
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1178 if (from_mode == HImode && to_mode == QImode)
1180 #ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1184 return;
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1191 if (from_mode == TImode && to_mode == DImode)
1193 #ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1197 return;
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1204 if (from_mode == TImode && to_mode == SImode)
1206 #ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1210 return;
1212 #endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1217 if (from_mode == TImode && to_mode == HImode)
1219 #ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1223 return;
1225 #endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1230 if (from_mode == TImode && to_mode == QImode)
1232 #ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1236 return;
1238 #endif
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 return;
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1250 return;
1253 /* Mode combination is not recognized. */
1254 abort ();
1257 /* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
1262 or by copying to a new temporary with conversion.
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
1268 convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1270 rtx x;
1271 int unsignedp;
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1276 /* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1290 convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1292 rtx x;
1293 int unsignedp;
1295 register rtx temp;
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
1308 if (mode == oldmode)
1309 return x;
1311 /* There is one case that we must handle specially: If we are converting
1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1321 HOST_WIDE_INT val = INTVAL (x);
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1326 int width = GET_MODE_BITSIZE (oldmode);
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1342 || (GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_CLASS (oldmode) == MODE_INT
1344 && (GET_CODE (x) == CONST_DOUBLE
1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1364 if (! unsignedp
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1368 return GEN_INT (trunc_int_for_mode (val, mode));
1371 return gen_lowpart (mode, x);
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1376 return temp;
1379 /* This macro is used to determine what the largest unit size that
1380 move_by_pieces can use is. */
1382 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
1384 number of bytes we can move with a single instruction. */
1386 #ifndef MOVE_MAX_PIECES
1387 #define MOVE_MAX_PIECES MOVE_MAX
1388 #endif
1390 /* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1395 When TO is NULL, the emit_single_push_insn is used to push the
1396 FROM to stack.
1398 ALIGN is maximum alignment we can assume. */
1400 void
1401 move_by_pieces (to, from, len, align)
1402 rtx to, from;
1403 unsigned HOST_WIDE_INT len;
1404 unsigned int align;
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1412 data.offset = 0;
1413 data.from_addr = from_addr;
1414 if (to)
1416 to_addr = XEXP (to, 0);
1417 data.to = to;
1418 data.autinc_to
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1424 else
1426 to_addr = NULL_RTX;
1427 data.to = NULL_RTX;
1428 data.autinc_to = 1;
1429 #ifdef STACK_GROWS_DOWNWARD
1430 data.reverse = 1;
1431 #else
1432 data.reverse = 0;
1433 #endif
1435 data.to_addr = to_addr;
1436 data.from = from;
1437 data.autinc_from
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1445 data.len = len;
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1457 mode = tmode;
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1476 data.autinc_to = 1;
1477 data.explicit_inc_to = -1;
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size > 1)
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1501 mode = tmode;
1503 if (mode == VOIDmode)
1504 break;
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510 max_size = GET_MODE_SIZE (mode);
1513 /* The code above should have handled everything. */
1514 if (data.len > 0)
1515 abort ();
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1524 unsigned int align;
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 while (max_size > 1)
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1541 mode = tmode;
1543 if (mode == VOIDmode)
1544 break;
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550 max_size = GET_MODE_SIZE (mode);
1553 if (l)
1554 abort ();
1555 return n_insns;
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1562 static void
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1568 unsigned int size = GET_MODE_SIZE (mode);
1569 rtx to1 = NULL_RTX, from1;
1571 while (data->len >= size)
1573 if (data->reverse)
1574 data->offset -= size;
1576 if (data->to)
1578 if (data->autinc_to)
1580 to1 = replace_equiv_address (data->to, data->to_addr);
1581 to1 = adjust_address (to1, mode, 0);
1583 else
1584 to1 = adjust_address (data->to, mode, data->offset);
1587 if (data->autinc_from)
1589 from1 = replace_equiv_address (data->from, data->from_addr);
1590 from1 = adjust_address (from1, mode, 0);
1592 else
1593 from1 = adjust_address (data->from, mode, data->offset);
1595 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1600 if (data->to)
1601 emit_insn ((*genfun) (to1, from1));
1602 else
1603 emit_single_push_insn (mode, from1, NULL);
1605 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1606 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1608 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1610 if (! data->reverse)
1611 data->offset += size;
1613 data->len -= size;
1617 /* Emit code to move a block Y to a block X.
1618 This may be done with string-move instructions,
1619 with multiple scalar move instructions, or with a library call.
1621 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1622 with mode BLKmode.
1623 SIZE is an rtx that says how long they are.
1624 ALIGN is the maximum alignment we can assume they have.
1626 Return the address of the new block, if memcpy is called and returns it,
1627 0 otherwise. */
1630 emit_block_move (x, y, size, align)
1631 rtx x, y;
1632 rtx size;
1633 unsigned int align;
1635 rtx retval = 0;
1636 #ifdef TARGET_MEM_FUNCTIONS
1637 static tree fn;
1638 tree call_expr, arg_list;
1639 #endif
1641 if (GET_MODE (x) != BLKmode)
1642 abort ();
1644 if (GET_MODE (y) != BLKmode)
1645 abort ();
1647 x = protect_from_queue (x, 1);
1648 y = protect_from_queue (y, 0);
1649 size = protect_from_queue (size, 0);
1651 if (GET_CODE (x) != MEM)
1652 abort ();
1653 if (GET_CODE (y) != MEM)
1654 abort ();
1655 if (size == 0)
1656 abort ();
1658 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1659 move_by_pieces (x, y, INTVAL (size), align);
1660 else
1662 /* Try the most limited insn first, because there's no point
1663 including more than one in the machine description unless
1664 the more limited one has some advantage. */
1666 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1667 enum machine_mode mode;
1669 /* Since this is a move insn, we don't care about volatility. */
1670 volatile_ok = 1;
1672 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1673 mode = GET_MODE_WIDER_MODE (mode))
1675 enum insn_code code = movstr_optab[(int) mode];
1676 insn_operand_predicate_fn pred;
1678 if (code != CODE_FOR_nothing
1679 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1680 here because if SIZE is less than the mode mask, as it is
1681 returned by the macro, it will definitely be less than the
1682 actual mode mask. */
1683 && ((GET_CODE (size) == CONST_INT
1684 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1685 <= (GET_MODE_MASK (mode) >> 1)))
1686 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1687 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1688 || (*pred) (x, BLKmode))
1689 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1690 || (*pred) (y, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1692 || (*pred) (opalign, VOIDmode)))
1694 rtx op2;
1695 rtx last = get_last_insn ();
1696 rtx pat;
1698 op2 = convert_to_mode (mode, size, 1);
1699 pred = insn_data[(int) code].operand[2].predicate;
1700 if (pred != 0 && ! (*pred) (op2, mode))
1701 op2 = copy_to_mode_reg (mode, op2);
1703 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1704 if (pat)
1706 emit_insn (pat);
1707 volatile_ok = 0;
1708 return 0;
1710 else
1711 delete_insns_since (last);
1715 volatile_ok = 0;
1717 /* X, Y, or SIZE may have been passed through protect_from_queue.
1719 It is unsafe to save the value generated by protect_from_queue
1720 and reuse it later. Consider what happens if emit_queue is
1721 called before the return value from protect_from_queue is used.
1723 Expansion of the CALL_EXPR below will call emit_queue before
1724 we are finished emitting RTL for argument setup. So if we are
1725 not careful we could get the wrong value for an argument.
1727 To avoid this problem we go ahead and emit code to copy X, Y &
1728 SIZE into new pseudos. We can then place those new pseudos
1729 into an RTL_EXPR and use them later, even after a call to
1730 emit_queue.
1732 Note this is not strictly needed for library calls since they
1733 do not call emit_queue before loading their arguments. However,
1734 we may need to have library calls call emit_queue in the future
1735 since failing to do so could cause problems for targets which
1736 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1737 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1738 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1740 #ifdef TARGET_MEM_FUNCTIONS
1741 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1742 #else
1743 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1744 TREE_UNSIGNED (integer_type_node));
1745 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1746 #endif
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 /* It is incorrect to use the libcall calling conventions to call
1750 memcpy in this context.
1752 This could be a user call to memcpy and the user may wish to
1753 examine the return value from memcpy.
1755 For targets where libcalls and normal calls have different conventions
1756 for returning pointers, we could end up generating incorrect code.
1758 So instead of using a libcall sequence we build up a suitable
1759 CALL_EXPR and expand the call in the normal fashion. */
1760 if (fn == NULL_TREE)
1762 tree fntype;
1764 /* This was copied from except.c, I don't know if all this is
1765 necessary in this context or not. */
1766 fn = get_identifier ("memcpy");
1767 fntype = build_pointer_type (void_type_node);
1768 fntype = build_function_type (fntype, NULL_TREE);
1769 fn = build_decl (FUNCTION_DECL, fn, fntype);
1770 ggc_add_tree_root (&fn, 1);
1771 DECL_EXTERNAL (fn) = 1;
1772 TREE_PUBLIC (fn) = 1;
1773 DECL_ARTIFICIAL (fn) = 1;
1774 TREE_NOTHROW (fn) = 1;
1775 make_decl_rtl (fn, NULL);
1776 assemble_external (fn);
1779 /* We need to make an argument list for the function call.
1781 memcpy has three arguments, the first two are void * addresses and
1782 the last is a size_t byte count for the copy. */
1783 arg_list
1784 = build_tree_list (NULL_TREE,
1785 make_tree (build_pointer_type (void_type_node), x));
1786 TREE_CHAIN (arg_list)
1787 = build_tree_list (NULL_TREE,
1788 make_tree (build_pointer_type (void_type_node), y));
1789 TREE_CHAIN (TREE_CHAIN (arg_list))
1790 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1791 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1793 /* Now we have to build up the CALL_EXPR itself. */
1794 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1795 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1796 call_expr, arg_list, NULL_TREE);
1797 TREE_SIDE_EFFECTS (call_expr) = 1;
1799 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1800 #else
1801 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1802 VOIDmode, 3, y, Pmode, x, Pmode,
1803 convert_to_mode (TYPE_MODE (integer_type_node), size,
1804 TREE_UNSIGNED (integer_type_node)),
1805 TYPE_MODE (integer_type_node));
1806 #endif
1809 return retval;
1812 /* Copy all or part of a value X into registers starting at REGNO.
1813 The number of registers to be filled is NREGS. */
1815 void
1816 move_block_to_reg (regno, x, nregs, mode)
1817 int regno;
1818 rtx x;
1819 int nregs;
1820 enum machine_mode mode;
1822 int i;
1823 #ifdef HAVE_load_multiple
1824 rtx pat;
1825 rtx last;
1826 #endif
1828 if (nregs == 0)
1829 return;
1831 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1832 x = validize_mem (force_const_mem (mode, x));
1834 /* See if the machine can do this with a load multiple insn. */
1835 #ifdef HAVE_load_multiple
1836 if (HAVE_load_multiple)
1838 last = get_last_insn ();
1839 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1840 GEN_INT (nregs));
1841 if (pat)
1843 emit_insn (pat);
1844 return;
1846 else
1847 delete_insns_since (last);
1849 #endif
1851 for (i = 0; i < nregs; i++)
1852 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1853 operand_subword_force (x, i, mode));
1856 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857 The number of registers to be filled is NREGS. SIZE indicates the number
1858 of bytes in the object X. */
1860 void
1861 move_block_from_reg (regno, x, nregs, size)
1862 int regno;
1863 rtx x;
1864 int nregs;
1865 int size;
1867 int i;
1868 #ifdef HAVE_store_multiple
1869 rtx pat;
1870 rtx last;
1871 #endif
1872 enum machine_mode mode;
1874 if (nregs == 0)
1875 return;
1877 /* If SIZE is that of a mode no bigger than a word, just use that
1878 mode's store operation. */
1879 if (size <= UNITS_PER_WORD
1880 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1882 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1883 return;
1886 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1887 to the left before storing to memory. Note that the previous test
1888 doesn't handle all cases (e.g. SIZE == 3). */
1889 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1891 rtx tem = operand_subword (x, 0, 1, BLKmode);
1892 rtx shift;
1894 if (tem == 0)
1895 abort ();
1897 shift = expand_shift (LSHIFT_EXPR, word_mode,
1898 gen_rtx_REG (word_mode, regno),
1899 build_int_2 ((UNITS_PER_WORD - size)
1900 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1901 emit_move_insn (tem, shift);
1902 return;
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple)
1909 last = get_last_insn ();
1910 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1911 GEN_INT (nregs));
1912 if (pat)
1914 emit_insn (pat);
1915 return;
1917 else
1918 delete_insns_since (last);
1920 #endif
1922 for (i = 0; i < nregs; i++)
1924 rtx tem = operand_subword (x, i, 1, BLKmode);
1926 if (tem == 0)
1927 abort ();
1929 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1936 SRC in bits. */
1937 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1938 the balance will be in what would be the low-order memory addresses, i.e.
1939 left justified for big endian, right justified for little endian. This
1940 happens to be true for the targets currently using this support. If this
1941 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1942 would be needed. */
1944 void
1945 emit_group_load (dst, orig_src, ssize, align)
1946 rtx dst, orig_src;
1947 unsigned int align;
1948 int ssize;
1950 rtx *tmps, src;
1951 int start, i;
1953 if (GET_CODE (dst) != PARALLEL)
1954 abort ();
1956 /* Check for a NULL entry, used to indicate that the parameter goes
1957 both on the stack and in registers. */
1958 if (XEXP (XVECEXP (dst, 0, 0), 0))
1959 start = 0;
1960 else
1961 start = 1;
1963 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1965 /* Process the pieces. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1968 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1969 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1970 unsigned int bytelen = GET_MODE_SIZE (mode);
1971 int shift = 0;
1973 /* Handle trailing fragments that run over the size of the struct. */
1974 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1976 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1977 bytelen = ssize - bytepos;
1978 if (bytelen <= 0)
1979 abort ();
1982 /* If we won't be loading directly from memory, protect the real source
1983 from strange tricks we might play; but make sure that the source can
1984 be loaded directly into the destination. */
1985 src = orig_src;
1986 if (GET_CODE (orig_src) != MEM
1987 && (!CONSTANT_P (orig_src)
1988 || (GET_MODE (orig_src) != mode
1989 && GET_MODE (orig_src) != VOIDmode)))
1991 if (GET_MODE (orig_src) == VOIDmode)
1992 src = gen_reg_rtx (mode);
1993 else
1994 src = gen_reg_rtx (GET_MODE (orig_src));
1995 emit_move_insn (src, orig_src);
1998 /* Optimize the access just a bit. */
1999 if (GET_CODE (src) == MEM
2000 && align >= GET_MODE_ALIGNMENT (mode)
2001 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2002 && bytelen == GET_MODE_SIZE (mode))
2004 tmps[i] = gen_reg_rtx (mode);
2005 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2007 else if (GET_CODE (src) == CONCAT)
2009 if (bytepos == 0
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2011 tmps[i] = XEXP (src, 0);
2012 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2013 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2014 tmps[i] = XEXP (src, 1);
2015 else
2016 abort ();
2018 else if (CONSTANT_P (src)
2019 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2020 tmps[i] = src;
2021 else
2022 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2023 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2024 mode, mode, align, ssize);
2026 if (BYTES_BIG_ENDIAN && shift)
2027 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2028 tmps[i], 0, OPTAB_WIDEN);
2031 emit_queue ();
2033 /* Copy the extracted pieces into the proper (probable) hard regs. */
2034 for (i = start; i < XVECLEN (dst, 0); i++)
2035 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2038 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2039 registers represented by a PARALLEL. SSIZE represents the total size of
2040 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2042 void
2043 emit_group_store (orig_dst, src, ssize, align)
2044 rtx orig_dst, src;
2045 int ssize;
2046 unsigned int align;
2048 rtx *tmps, dst;
2049 int start, i;
2051 if (GET_CODE (src) != PARALLEL)
2052 abort ();
2054 /* Check for a NULL entry, used to indicate that the parameter goes
2055 both on the stack and in registers. */
2056 if (XEXP (XVECEXP (src, 0, 0), 0))
2057 start = 0;
2058 else
2059 start = 1;
2061 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2063 /* Copy the (probable) hard regs into pseudos. */
2064 for (i = start; i < XVECLEN (src, 0); i++)
2066 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2067 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2068 emit_move_insn (tmps[i], reg);
2070 emit_queue ();
2072 /* If we won't be storing directly into memory, protect the real destination
2073 from strange tricks we might play. */
2074 dst = orig_dst;
2075 if (GET_CODE (dst) == PARALLEL)
2077 rtx temp;
2079 /* We can get a PARALLEL dst if there is a conditional expression in
2080 a return statement. In that case, the dst and src are the same,
2081 so no action is necessary. */
2082 if (rtx_equal_p (dst, src))
2083 return;
2085 /* It is unclear if we can ever reach here, but we may as well handle
2086 it. Allocate a temporary, and split this into a store/load to/from
2087 the temporary. */
2089 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2090 emit_group_store (temp, src, ssize, align);
2091 emit_group_load (dst, temp, ssize, align);
2092 return;
2094 else if (GET_CODE (dst) != MEM)
2096 dst = gen_reg_rtx (GET_MODE (orig_dst));
2097 /* Make life a bit easier for combine. */
2098 emit_move_insn (dst, const0_rtx);
2101 /* Process the pieces. */
2102 for (i = start; i < XVECLEN (src, 0); i++)
2104 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2105 enum machine_mode mode = GET_MODE (tmps[i]);
2106 unsigned int bytelen = GET_MODE_SIZE (mode);
2108 /* Handle trailing fragments that run over the size of the struct. */
2109 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2111 if (BYTES_BIG_ENDIAN)
2113 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2114 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2115 tmps[i], 0, OPTAB_WIDEN);
2117 bytelen = ssize - bytepos;
2120 /* Optimize the access just a bit. */
2121 if (GET_CODE (dst) == MEM
2122 && align >= GET_MODE_ALIGNMENT (mode)
2123 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2124 && bytelen == GET_MODE_SIZE (mode))
2125 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2126 else
2127 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2128 mode, tmps[i], align, ssize);
2131 emit_queue ();
2133 /* Copy from the pseudo into the (probable) hard reg. */
2134 if (GET_CODE (dst) == REG)
2135 emit_move_insn (orig_dst, dst);
2138 /* Generate code to copy a BLKmode object of TYPE out of a
2139 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2140 is null, a stack temporary is created. TGTBLK is returned.
2142 The primary purpose of this routine is to handle functions
2143 that return BLKmode structures in registers. Some machines
2144 (the PA for example) want to return all small structures
2145 in registers regardless of the structure's alignment. */
2148 copy_blkmode_from_reg (tgtblk, srcreg, type)
2149 rtx tgtblk;
2150 rtx srcreg;
2151 tree type;
2153 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2154 rtx src = NULL, dst = NULL;
2155 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2156 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2158 if (tgtblk == 0)
2160 tgtblk = assign_temp (build_qualified_type (type,
2161 (TYPE_QUALS (type)
2162 | TYPE_QUAL_CONST)),
2163 0, 1, 1);
2164 preserve_temp_slots (tgtblk);
2167 /* This code assumes srcreg is at least a full word. If it isn't,
2168 copy it into a new pseudo which is a full word. */
2169 if (GET_MODE (srcreg) != BLKmode
2170 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2171 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2173 /* Structures whose size is not a multiple of a word are aligned
2174 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2175 machine, this means we must skip the empty high order bytes when
2176 calculating the bit offset. */
2177 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2178 big_endian_correction
2179 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2181 /* Copy the structure BITSIZE bites at a time.
2183 We could probably emit more efficient code for machines which do not use
2184 strict alignment, but it doesn't seem worth the effort at the current
2185 time. */
2186 for (bitpos = 0, xbitpos = big_endian_correction;
2187 bitpos < bytes * BITS_PER_UNIT;
2188 bitpos += bitsize, xbitpos += bitsize)
2190 /* We need a new source operand each time xbitpos is on a
2191 word boundary and when xbitpos == big_endian_correction
2192 (the first time through). */
2193 if (xbitpos % BITS_PER_WORD == 0
2194 || xbitpos == big_endian_correction)
2195 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2196 GET_MODE (srcreg));
2198 /* We need a new destination operand each time bitpos is on
2199 a word boundary. */
2200 if (bitpos % BITS_PER_WORD == 0)
2201 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2203 /* Use xbitpos for the source extraction (right justified) and
2204 xbitpos for the destination store (left justified). */
2205 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2206 extract_bit_field (src, bitsize,
2207 xbitpos % BITS_PER_WORD, 1,
2208 NULL_RTX, word_mode, word_mode,
2209 bitsize, BITS_PER_WORD),
2210 bitsize, BITS_PER_WORD);
2213 return tgtblk;
2216 /* Add a USE expression for REG to the (possibly empty) list pointed
2217 to by CALL_FUSAGE. REG must denote a hard register. */
2219 void
2220 use_reg (call_fusage, reg)
2221 rtx *call_fusage, reg;
2223 if (GET_CODE (reg) != REG
2224 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2225 abort ();
2227 *call_fusage
2228 = gen_rtx_EXPR_LIST (VOIDmode,
2229 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2232 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2233 starting at REGNO. All of these registers must be hard registers. */
2235 void
2236 use_regs (call_fusage, regno, nregs)
2237 rtx *call_fusage;
2238 int regno;
2239 int nregs;
2241 int i;
2243 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2244 abort ();
2246 for (i = 0; i < nregs; i++)
2247 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2250 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2251 PARALLEL REGS. This is for calls that pass values in multiple
2252 non-contiguous locations. The Irix 6 ABI has examples of this. */
2254 void
2255 use_group_regs (call_fusage, regs)
2256 rtx *call_fusage;
2257 rtx regs;
2259 int i;
2261 for (i = 0; i < XVECLEN (regs, 0); i++)
2263 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2265 /* A NULL entry means the parameter goes both on the stack and in
2266 registers. This can also be a MEM for targets that pass values
2267 partially on the stack and partially in registers. */
2268 if (reg != 0 && GET_CODE (reg) == REG)
2269 use_reg (call_fusage, reg);
2275 can_store_by_pieces (len, constfun, constfundata, align)
2276 unsigned HOST_WIDE_INT len;
2277 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2278 PTR constfundata;
2279 unsigned int align;
2281 unsigned HOST_WIDE_INT max_size, l;
2282 HOST_WIDE_INT offset = 0;
2283 enum machine_mode mode, tmode;
2284 enum insn_code icode;
2285 int reverse;
2286 rtx cst;
2288 if (! MOVE_BY_PIECES_P (len, align))
2289 return 0;
2291 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2292 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2293 align = MOVE_MAX * BITS_PER_UNIT;
2295 /* We would first store what we can in the largest integer mode, then go to
2296 successively smaller modes. */
2298 for (reverse = 0;
2299 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2300 reverse++)
2302 l = len;
2303 mode = VOIDmode;
2304 max_size = MOVE_MAX_PIECES + 1;
2305 while (max_size > 1)
2307 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2308 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2309 if (GET_MODE_SIZE (tmode) < max_size)
2310 mode = tmode;
2312 if (mode == VOIDmode)
2313 break;
2315 icode = mov_optab->handlers[(int) mode].insn_code;
2316 if (icode != CODE_FOR_nothing
2317 && align >= GET_MODE_ALIGNMENT (mode))
2319 unsigned int size = GET_MODE_SIZE (mode);
2321 while (l >= size)
2323 if (reverse)
2324 offset -= size;
2326 cst = (*constfun) (constfundata, offset, mode);
2327 if (!LEGITIMATE_CONSTANT_P (cst))
2328 return 0;
2330 if (!reverse)
2331 offset += size;
2333 l -= size;
2337 max_size = GET_MODE_SIZE (mode);
2340 /* The code above should have handled everything. */
2341 if (l != 0)
2342 abort ();
2345 return 1;
2348 /* Generate several move instructions to store LEN bytes generated by
2349 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2350 pointer which will be passed as argument in every CONSTFUN call.
2351 ALIGN is maximum alignment we can assume. */
2353 void
2354 store_by_pieces (to, len, constfun, constfundata, align)
2355 rtx to;
2356 unsigned HOST_WIDE_INT len;
2357 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2358 PTR constfundata;
2359 unsigned int align;
2361 struct store_by_pieces data;
2363 if (! MOVE_BY_PIECES_P (len, align))
2364 abort ();
2365 to = protect_from_queue (to, 1);
2366 data.constfun = constfun;
2367 data.constfundata = constfundata;
2368 data.len = len;
2369 data.to = to;
2370 store_by_pieces_1 (&data, align);
2373 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2374 rtx with BLKmode). The caller must pass TO through protect_from_queue
2375 before calling. ALIGN is maximum alignment we can assume. */
2377 static void
2378 clear_by_pieces (to, len, align)
2379 rtx to;
2380 unsigned HOST_WIDE_INT len;
2381 unsigned int align;
2383 struct store_by_pieces data;
2385 data.constfun = clear_by_pieces_1;
2386 data.constfundata = NULL;
2387 data.len = len;
2388 data.to = to;
2389 store_by_pieces_1 (&data, align);
2392 /* Callback routine for clear_by_pieces.
2393 Return const0_rtx unconditionally. */
2395 static rtx
2396 clear_by_pieces_1 (data, offset, mode)
2397 PTR data ATTRIBUTE_UNUSED;
2398 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2399 enum machine_mode mode ATTRIBUTE_UNUSED;
2401 return const0_rtx;
2404 /* Subroutine of clear_by_pieces and store_by_pieces.
2405 Generate several move instructions to store LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). The caller must pass TO through protect_from_queue
2407 before calling. ALIGN is maximum alignment we can assume. */
2409 static void
2410 store_by_pieces_1 (data, align)
2411 struct store_by_pieces *data;
2412 unsigned int align;
2414 rtx to_addr = XEXP (data->to, 0);
2415 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2416 enum machine_mode mode = VOIDmode, tmode;
2417 enum insn_code icode;
2419 data->offset = 0;
2420 data->to_addr = to_addr;
2421 data->autinc_to
2422 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2423 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2425 data->explicit_inc_to = 0;
2426 data->reverse
2427 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2428 if (data->reverse)
2429 data->offset = data->len;
2431 /* If storing requires more than two move insns,
2432 copy addresses to registers (to make displacements shorter)
2433 and use post-increment if available. */
2434 if (!data->autinc_to
2435 && move_by_pieces_ninsns (data->len, align) > 2)
2437 /* Determine the main mode we'll be using. */
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2441 mode = tmode;
2443 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2445 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2446 data->autinc_to = 1;
2447 data->explicit_inc_to = -1;
2450 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2451 && ! data->autinc_to)
2453 data->to_addr = copy_addr_to_reg (to_addr);
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = 1;
2458 if ( !data->autinc_to && CONSTANT_P (to_addr))
2459 data->to_addr = copy_addr_to_reg (to_addr);
2462 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2463 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2464 align = MOVE_MAX * BITS_PER_UNIT;
2466 /* First store what we can in the largest integer mode, then go to
2467 successively smaller modes. */
2469 while (max_size > 1)
2471 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2472 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2473 if (GET_MODE_SIZE (tmode) < max_size)
2474 mode = tmode;
2476 if (mode == VOIDmode)
2477 break;
2479 icode = mov_optab->handlers[(int) mode].insn_code;
2480 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2481 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2483 max_size = GET_MODE_SIZE (mode);
2486 /* The code above should have handled everything. */
2487 if (data->len != 0)
2488 abort ();
2491 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2492 with move instructions for mode MODE. GENFUN is the gen_... function
2493 to make a move insn for that mode. DATA has all the other info. */
2495 static void
2496 store_by_pieces_2 (genfun, mode, data)
2497 rtx (*genfun) PARAMS ((rtx, ...));
2498 enum machine_mode mode;
2499 struct store_by_pieces *data;
2501 unsigned int size = GET_MODE_SIZE (mode);
2502 rtx to1, cst;
2504 while (data->len >= size)
2506 if (data->reverse)
2507 data->offset -= size;
2509 if (data->autinc_to)
2511 to1 = replace_equiv_address (data->to, data->to_addr);
2512 to1 = adjust_address (to1, mode, 0);
2514 else
2515 to1 = adjust_address (data->to, mode, data->offset);
2517 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2518 emit_insn (gen_add2_insn (data->to_addr,
2519 GEN_INT (-(HOST_WIDE_INT) size)));
2521 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2522 emit_insn ((*genfun) (to1, cst));
2524 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2525 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2527 if (! data->reverse)
2528 data->offset += size;
2530 data->len -= size;
2534 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2535 its length in bytes and ALIGN is the maximum alignment we can is has.
2537 If we call a function that returns the length of the block, return it. */
2540 clear_storage (object, size, align)
2541 rtx object;
2542 rtx size;
2543 unsigned int align;
2545 #ifdef TARGET_MEM_FUNCTIONS
2546 static tree fn;
2547 tree call_expr, arg_list;
2548 #endif
2549 rtx retval = 0;
2551 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2552 just move a zero. Otherwise, do this a piece at a time. */
2553 if (GET_MODE (object) != BLKmode
2554 && GET_CODE (size) == CONST_INT
2555 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2556 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2557 else
2559 object = protect_from_queue (object, 1);
2560 size = protect_from_queue (size, 0);
2562 if (GET_CODE (size) == CONST_INT
2563 && MOVE_BY_PIECES_P (INTVAL (size), align))
2564 clear_by_pieces (object, INTVAL (size), align);
2565 else
2567 /* Try the most limited insn first, because there's no point
2568 including more than one in the machine description unless
2569 the more limited one has some advantage. */
2571 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2572 enum machine_mode mode;
2574 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2575 mode = GET_MODE_WIDER_MODE (mode))
2577 enum insn_code code = clrstr_optab[(int) mode];
2578 insn_operand_predicate_fn pred;
2580 if (code != CODE_FOR_nothing
2581 /* We don't need MODE to be narrower than
2582 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2583 the mode mask, as it is returned by the macro, it will
2584 definitely be less than the actual mode mask. */
2585 && ((GET_CODE (size) == CONST_INT
2586 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2587 <= (GET_MODE_MASK (mode) >> 1)))
2588 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2589 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2590 || (*pred) (object, BLKmode))
2591 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2592 || (*pred) (opalign, VOIDmode)))
2594 rtx op1;
2595 rtx last = get_last_insn ();
2596 rtx pat;
2598 op1 = convert_to_mode (mode, size, 1);
2599 pred = insn_data[(int) code].operand[1].predicate;
2600 if (pred != 0 && ! (*pred) (op1, mode))
2601 op1 = copy_to_mode_reg (mode, op1);
2603 pat = GEN_FCN ((int) code) (object, op1, opalign);
2604 if (pat)
2606 emit_insn (pat);
2607 return 0;
2609 else
2610 delete_insns_since (last);
2614 /* OBJECT or SIZE may have been passed through protect_from_queue.
2616 It is unsafe to save the value generated by protect_from_queue
2617 and reuse it later. Consider what happens if emit_queue is
2618 called before the return value from protect_from_queue is used.
2620 Expansion of the CALL_EXPR below will call emit_queue before
2621 we are finished emitting RTL for argument setup. So if we are
2622 not careful we could get the wrong value for an argument.
2624 To avoid this problem we go ahead and emit code to copy OBJECT
2625 and SIZE into new pseudos. We can then place those new pseudos
2626 into an RTL_EXPR and use them later, even after a call to
2627 emit_queue.
2629 Note this is not strictly needed for library calls since they
2630 do not call emit_queue before loading their arguments. However,
2631 we may need to have library calls call emit_queue in the future
2632 since failing to do so could cause problems for targets which
2633 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2634 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2638 #else
2639 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2640 TREE_UNSIGNED (integer_type_node));
2641 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2642 #endif
2644 #ifdef TARGET_MEM_FUNCTIONS
2645 /* It is incorrect to use the libcall calling conventions to call
2646 memset in this context.
2648 This could be a user call to memset and the user may wish to
2649 examine the return value from memset.
2651 For targets where libcalls and normal calls have different
2652 conventions for returning pointers, we could end up generating
2653 incorrect code.
2655 So instead of using a libcall sequence we build up a suitable
2656 CALL_EXPR and expand the call in the normal fashion. */
2657 if (fn == NULL_TREE)
2659 tree fntype;
2661 /* This was copied from except.c, I don't know if all this is
2662 necessary in this context or not. */
2663 fn = get_identifier ("memset");
2664 fntype = build_pointer_type (void_type_node);
2665 fntype = build_function_type (fntype, NULL_TREE);
2666 fn = build_decl (FUNCTION_DECL, fn, fntype);
2667 ggc_add_tree_root (&fn, 1);
2668 DECL_EXTERNAL (fn) = 1;
2669 TREE_PUBLIC (fn) = 1;
2670 DECL_ARTIFICIAL (fn) = 1;
2671 TREE_NOTHROW (fn) = 1;
2672 make_decl_rtl (fn, NULL);
2673 assemble_external (fn);
2676 /* We need to make an argument list for the function call.
2678 memset has three arguments, the first is a void * addresses, the
2679 second a integer with the initialization value, the last is a
2680 size_t byte count for the copy. */
2681 arg_list
2682 = build_tree_list (NULL_TREE,
2683 make_tree (build_pointer_type (void_type_node),
2684 object));
2685 TREE_CHAIN (arg_list)
2686 = build_tree_list (NULL_TREE,
2687 make_tree (integer_type_node, const0_rtx));
2688 TREE_CHAIN (TREE_CHAIN (arg_list))
2689 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2690 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2692 /* Now we have to build up the CALL_EXPR itself. */
2693 call_expr = build1 (ADDR_EXPR,
2694 build_pointer_type (TREE_TYPE (fn)), fn);
2695 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2696 call_expr, arg_list, NULL_TREE);
2697 TREE_SIDE_EFFECTS (call_expr) = 1;
2699 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2700 #else
2701 emit_library_call (bzero_libfunc, LCT_NORMAL,
2702 VOIDmode, 2, object, Pmode, size,
2703 TYPE_MODE (integer_type_node));
2704 #endif
2708 return retval;
2711 /* Generate code to copy Y into X.
2712 Both Y and X must have the same mode, except that
2713 Y can be a constant with VOIDmode.
2714 This mode cannot be BLKmode; use emit_block_move for that.
2716 Return the last instruction emitted. */
2719 emit_move_insn (x, y)
2720 rtx x, y;
2722 enum machine_mode mode = GET_MODE (x);
2723 rtx y_cst = NULL_RTX;
2724 rtx last_insn;
2726 x = protect_from_queue (x, 1);
2727 y = protect_from_queue (y, 0);
2729 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2730 abort ();
2732 /* Never force constant_p_rtx to memory. */
2733 if (GET_CODE (y) == CONSTANT_P_RTX)
2735 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2737 y_cst = y;
2738 y = force_const_mem (mode, y);
2741 /* If X or Y are memory references, verify that their addresses are valid
2742 for the machine. */
2743 if (GET_CODE (x) == MEM
2744 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2745 && ! push_operand (x, GET_MODE (x)))
2746 || (flag_force_addr
2747 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2748 x = validize_mem (x);
2750 if (GET_CODE (y) == MEM
2751 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2752 || (flag_force_addr
2753 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2754 y = validize_mem (y);
2756 if (mode == BLKmode)
2757 abort ();
2759 last_insn = emit_move_insn_1 (x, y);
2761 if (y_cst && GET_CODE (x) == REG)
2762 REG_NOTES (last_insn)
2763 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2765 return last_insn;
2768 /* Low level part of emit_move_insn.
2769 Called just like emit_move_insn, but assumes X and Y
2770 are basically valid. */
2773 emit_move_insn_1 (x, y)
2774 rtx x, y;
2776 enum machine_mode mode = GET_MODE (x);
2777 enum machine_mode submode;
2778 enum mode_class class = GET_MODE_CLASS (mode);
2779 unsigned int i;
2781 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2782 abort ();
2784 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2785 return
2786 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2788 /* Expand complex moves by moving real part and imag part, if possible. */
2789 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2790 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2791 * BITS_PER_UNIT),
2792 (class == MODE_COMPLEX_INT
2793 ? MODE_INT : MODE_FLOAT),
2795 && (mov_optab->handlers[(int) submode].insn_code
2796 != CODE_FOR_nothing))
2798 /* Don't split destination if it is a stack push. */
2799 int stack = push_operand (x, GET_MODE (x));
2801 #ifdef PUSH_ROUNDING
2802 /* In case we output to the stack, but the size is smaller machine can
2803 push exactly, we need to use move instructions. */
2804 if (stack
2805 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2807 rtx temp;
2808 int offset1, offset2;
2810 /* Do not use anti_adjust_stack, since we don't want to update
2811 stack_pointer_delta. */
2812 temp = expand_binop (Pmode,
2813 #ifdef STACK_GROWS_DOWNWARD
2814 sub_optab,
2815 #else
2816 add_optab,
2817 #endif
2818 stack_pointer_rtx,
2819 GEN_INT
2820 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2821 stack_pointer_rtx,
2823 OPTAB_LIB_WIDEN);
2824 if (temp != stack_pointer_rtx)
2825 emit_move_insn (stack_pointer_rtx, temp);
2826 #ifdef STACK_GROWS_DOWNWARD
2827 offset1 = 0;
2828 offset2 = GET_MODE_SIZE (submode);
2829 #else
2830 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2831 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2832 + GET_MODE_SIZE (submode));
2833 #endif
2834 emit_move_insn (change_address (x, submode,
2835 gen_rtx_PLUS (Pmode,
2836 stack_pointer_rtx,
2837 GEN_INT (offset1))),
2838 gen_realpart (submode, y));
2839 emit_move_insn (change_address (x, submode,
2840 gen_rtx_PLUS (Pmode,
2841 stack_pointer_rtx,
2842 GEN_INT (offset2))),
2843 gen_imagpart (submode, y));
2845 else
2846 #endif
2847 /* If this is a stack, push the highpart first, so it
2848 will be in the argument order.
2850 In that case, change_address is used only to convert
2851 the mode, not to change the address. */
2852 if (stack)
2854 /* Note that the real part always precedes the imag part in memory
2855 regardless of machine's endianness. */
2856 #ifdef STACK_GROWS_DOWNWARD
2857 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2858 (gen_rtx_MEM (submode, XEXP (x, 0)),
2859 gen_imagpart (submode, y)));
2860 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2861 (gen_rtx_MEM (submode, XEXP (x, 0)),
2862 gen_realpart (submode, y)));
2863 #else
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_realpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_imagpart (submode, y)));
2870 #endif
2872 else
2874 rtx realpart_x, realpart_y;
2875 rtx imagpart_x, imagpart_y;
2877 /* If this is a complex value with each part being smaller than a
2878 word, the usual calling sequence will likely pack the pieces into
2879 a single register. Unfortunately, SUBREG of hard registers only
2880 deals in terms of words, so we have a problem converting input
2881 arguments to the CONCAT of two registers that is used elsewhere
2882 for complex values. If this is before reload, we can copy it into
2883 memory and reload. FIXME, we should see about using extract and
2884 insert on integer registers, but complex short and complex char
2885 variables should be rarely used. */
2886 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2887 && (reload_in_progress | reload_completed) == 0)
2889 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2890 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2892 if (packed_dest_p || packed_src_p)
2894 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2895 ? MODE_FLOAT : MODE_INT);
2897 enum machine_mode reg_mode
2898 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2900 if (reg_mode != BLKmode)
2902 rtx mem = assign_stack_temp (reg_mode,
2903 GET_MODE_SIZE (mode), 0);
2904 rtx cmem = adjust_address (mem, mode, 0);
2906 cfun->cannot_inline
2907 = N_("function using short complex types cannot be inline");
2909 if (packed_dest_p)
2911 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2912 emit_move_insn_1 (cmem, y);
2913 return emit_move_insn_1 (sreg, mem);
2915 else
2917 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2918 emit_move_insn_1 (mem, sreg);
2919 return emit_move_insn_1 (x, cmem);
2925 realpart_x = gen_realpart (submode, x);
2926 realpart_y = gen_realpart (submode, y);
2927 imagpart_x = gen_imagpart (submode, x);
2928 imagpart_y = gen_imagpart (submode, y);
2930 /* Show the output dies here. This is necessary for SUBREGs
2931 of pseudos since we cannot track their lifetimes correctly;
2932 hard regs shouldn't appear here except as return values.
2933 We never want to emit such a clobber after reload. */
2934 if (x != y
2935 && ! (reload_in_progress || reload_completed)
2936 && (GET_CODE (realpart_x) == SUBREG
2937 || GET_CODE (imagpart_x) == SUBREG))
2939 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2942 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2943 (realpart_x, realpart_y));
2944 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2945 (imagpart_x, imagpart_y));
2948 return get_last_insn ();
2951 /* This will handle any multi-word mode that lacks a move_insn pattern.
2952 However, you will get better code if you define such patterns,
2953 even if they must turn into multiple assembler instructions. */
2954 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2956 rtx last_insn = 0;
2957 rtx seq, inner;
2958 int need_clobber;
2960 #ifdef PUSH_ROUNDING
2962 /* If X is a push on the stack, do the push now and replace
2963 X with a reference to the stack pointer. */
2964 if (push_operand (x, GET_MODE (x)))
2966 rtx temp;
2967 enum rtx_code code;
2969 /* Do not use anti_adjust_stack, since we don't want to update
2970 stack_pointer_delta. */
2971 temp = expand_binop (Pmode,
2972 #ifdef STACK_GROWS_DOWNWARD
2973 sub_optab,
2974 #else
2975 add_optab,
2976 #endif
2977 stack_pointer_rtx,
2978 GEN_INT
2979 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2980 stack_pointer_rtx,
2982 OPTAB_LIB_WIDEN);
2983 if (temp != stack_pointer_rtx)
2984 emit_move_insn (stack_pointer_rtx, temp);
2986 code = GET_CODE (XEXP (x, 0));
2987 /* Just hope that small offsets off SP are OK. */
2988 if (code == POST_INC)
2989 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2990 GEN_INT (-(HOST_WIDE_INT)
2991 GET_MODE_SIZE (GET_MODE (x))));
2992 else if (code == POST_DEC)
2993 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2994 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2995 else
2996 temp = stack_pointer_rtx;
2998 x = change_address (x, VOIDmode, temp);
3000 #endif
3002 /* If we are in reload, see if either operand is a MEM whose address
3003 is scheduled for replacement. */
3004 if (reload_in_progress && GET_CODE (x) == MEM
3005 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3006 x = replace_equiv_address_nv (x, inner);
3007 if (reload_in_progress && GET_CODE (y) == MEM
3008 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3009 y = replace_equiv_address_nv (y, inner);
3011 start_sequence ();
3013 need_clobber = 0;
3014 for (i = 0;
3015 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3016 i++)
3018 rtx xpart = operand_subword (x, i, 1, mode);
3019 rtx ypart = operand_subword (y, i, 1, mode);
3021 /* If we can't get a part of Y, put Y into memory if it is a
3022 constant. Otherwise, force it into a register. If we still
3023 can't get a part of Y, abort. */
3024 if (ypart == 0 && CONSTANT_P (y))
3026 y = force_const_mem (mode, y);
3027 ypart = operand_subword (y, i, 1, mode);
3029 else if (ypart == 0)
3030 ypart = operand_subword_force (y, i, mode);
3032 if (xpart == 0 || ypart == 0)
3033 abort ();
3035 need_clobber |= (GET_CODE (xpart) == SUBREG);
3037 last_insn = emit_move_insn (xpart, ypart);
3040 seq = gen_sequence ();
3041 end_sequence ();
3043 /* Show the output dies here. This is necessary for SUBREGs
3044 of pseudos since we cannot track their lifetimes correctly;
3045 hard regs shouldn't appear here except as return values.
3046 We never want to emit such a clobber after reload. */
3047 if (x != y
3048 && ! (reload_in_progress || reload_completed)
3049 && need_clobber != 0)
3051 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3054 emit_insn (seq);
3056 return last_insn;
3058 else
3059 abort ();
3062 /* Pushing data onto the stack. */
3064 /* Push a block of length SIZE (perhaps variable)
3065 and return an rtx to address the beginning of the block.
3066 Note that it is not possible for the value returned to be a QUEUED.
3067 The value may be virtual_outgoing_args_rtx.
3069 EXTRA is the number of bytes of padding to push in addition to SIZE.
3070 BELOW nonzero means this padding comes at low addresses;
3071 otherwise, the padding comes at high addresses. */
3074 push_block (size, extra, below)
3075 rtx size;
3076 int extra, below;
3078 register rtx temp;
3080 size = convert_modes (Pmode, ptr_mode, size, 1);
3081 if (CONSTANT_P (size))
3082 anti_adjust_stack (plus_constant (size, extra));
3083 else if (GET_CODE (size) == REG && extra == 0)
3084 anti_adjust_stack (size);
3085 else
3087 temp = copy_to_mode_reg (Pmode, size);
3088 if (extra != 0)
3089 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3090 temp, 0, OPTAB_LIB_WIDEN);
3091 anti_adjust_stack (temp);
3094 #ifndef STACK_GROWS_DOWNWARD
3095 #ifdef ARGS_GROW_DOWNWARD
3096 if (!ACCUMULATE_OUTGOING_ARGS)
3097 #else
3098 if (0)
3099 #endif
3100 #else
3101 if (1)
3102 #endif
3104 /* Return the lowest stack address when STACK or ARGS grow downward and
3105 we are not aaccumulating outgoing arguments (the c4x port uses such
3106 conventions). */
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3111 else
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3119 else
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3128 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3129 block of SIZE bytes. */
3131 static rtx
3132 get_push_address (size)
3133 int size;
3135 register rtx temp;
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3141 else
3142 temp = stack_pointer_rtx;
3144 return copy_to_reg (temp);
3147 /* Emit single push insn. */
3148 static void
3149 emit_single_push_insn (mode, x, type)
3150 rtx x;
3151 enum machine_mode mode;
3152 tree type;
3154 #ifdef PUSH_ROUNDING
3155 rtx dest_addr;
3156 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3157 rtx dest;
3158 enum insn_code icode;
3159 insn_operand_predicate_fn pred;
3161 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3162 /* If there is push pattern, use it. Otherwise try old way of throwing
3163 MEM representing push operation to move expander. */
3164 icode = push_optab->handlers[(int) mode].insn_code;
3165 if (icode != CODE_FOR_nothing)
3167 if (((pred = insn_data[(int) icode].operand[0].predicate)
3168 && !((*pred) (x, mode))))
3169 x = force_reg (mode, x);
3170 emit_insn (GEN_FCN (icode) (x));
3171 return;
3173 if (GET_MODE_SIZE (mode) == rounded_size)
3174 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3175 else
3177 #ifdef STACK_GROWS_DOWNWARD
3178 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3179 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3180 #else
3181 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3182 GEN_INT (rounded_size));
3183 #endif
3184 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3187 dest = gen_rtx_MEM (mode, dest_addr);
3189 if (type != 0)
3191 set_mem_attributes (dest, type, 1);
3192 /* Function incoming arguments may overlap with sibling call
3193 outgoing arguments and we cannot allow reordering of reads
3194 from function arguments with stores to outgoing arguments
3195 of sibling calls. */
3196 set_mem_alias_set (dest, 0);
3198 emit_move_insn (dest, x);
3199 #else
3200 abort();
3201 #endif
3204 /* Generate code to push X onto the stack, assuming it has mode MODE and
3205 type TYPE.
3206 MODE is redundant except when X is a CONST_INT (since they don't
3207 carry mode info).
3208 SIZE is an rtx for the size of data to be copied (in bytes),
3209 needed only if X is BLKmode.
3211 ALIGN (in bits) is maximum alignment we can assume.
3213 If PARTIAL and REG are both nonzero, then copy that many of the first
3214 words of X into registers starting with REG, and push the rest of X.
3215 The amount of space pushed is decreased by PARTIAL words,
3216 rounded *down* to a multiple of PARM_BOUNDARY.
3217 REG must be a hard register in this case.
3218 If REG is zero but PARTIAL is not, take any all others actions for an
3219 argument partially in registers, but do not actually load any
3220 registers.
3222 EXTRA is the amount in bytes of extra space to leave next to this arg.
3223 This is ignored if an argument block has already been allocated.
3225 On a machine that lacks real push insns, ARGS_ADDR is the address of
3226 the bottom of the argument block for this call. We use indexing off there
3227 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3228 argument block has not been preallocated.
3230 ARGS_SO_FAR is the size of args previously pushed for this call.
3232 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3233 for arguments passed in registers. If nonzero, it will be the number
3234 of bytes required. */
3236 void
3237 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3238 args_addr, args_so_far, reg_parm_stack_space,
3239 alignment_pad)
3240 register rtx x;
3241 enum machine_mode mode;
3242 tree type;
3243 rtx size;
3244 unsigned int align;
3245 int partial;
3246 rtx reg;
3247 int extra;
3248 rtx args_addr;
3249 rtx args_so_far;
3250 int reg_parm_stack_space;
3251 rtx alignment_pad;
3253 rtx xinner;
3254 enum direction stack_direction
3255 #ifdef STACK_GROWS_DOWNWARD
3256 = downward;
3257 #else
3258 = upward;
3259 #endif
3261 /* Decide where to pad the argument: `downward' for below,
3262 `upward' for above, or `none' for don't pad it.
3263 Default is below for small data on big-endian machines; else above. */
3264 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3266 /* Invert direction if stack is post-update. */
3267 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3268 if (where_pad != none)
3269 where_pad = (where_pad == downward ? upward : downward);
3271 xinner = x = protect_from_queue (x, 0);
3273 if (mode == BLKmode)
3275 /* Copy a block into the stack, entirely or partially. */
3277 register rtx temp;
3278 int used = partial * UNITS_PER_WORD;
3279 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3280 int skip;
3282 if (size == 0)
3283 abort ();
3285 used -= offset;
3287 /* USED is now the # of bytes we need not copy to the stack
3288 because registers will take care of them. */
3290 if (partial != 0)
3291 xinner = adjust_address (xinner, BLKmode, used);
3293 /* If the partial register-part of the arg counts in its stack size,
3294 skip the part of stack space corresponding to the registers.
3295 Otherwise, start copying to the beginning of the stack space,
3296 by setting SKIP to 0. */
3297 skip = (reg_parm_stack_space == 0) ? 0 : used;
3299 #ifdef PUSH_ROUNDING
3300 /* Do it with several push insns if that doesn't take lots of insns
3301 and if there is no difficulty with push insns that skip bytes
3302 on the stack for alignment purposes. */
3303 if (args_addr == 0
3304 && PUSH_ARGS
3305 && GET_CODE (size) == CONST_INT
3306 && skip == 0
3307 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3308 /* Here we avoid the case of a structure whose weak alignment
3309 forces many pushes of a small amount of data,
3310 and such small pushes do rounding that causes trouble. */
3311 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3312 || align >= BIGGEST_ALIGNMENT
3313 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3314 == (align / BITS_PER_UNIT)))
3315 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3317 /* Push padding now if padding above and stack grows down,
3318 or if padding below and stack grows up.
3319 But if space already allocated, this has already been done. */
3320 if (extra && args_addr == 0
3321 && where_pad != none && where_pad != stack_direction)
3322 anti_adjust_stack (GEN_INT (extra));
3324 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3326 if (current_function_check_memory_usage && ! in_check_memory_usage)
3328 rtx temp;
3330 in_check_memory_usage = 1;
3331 temp = get_push_address (INTVAL (size) - used);
3332 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3333 emit_library_call (chkr_copy_bitmap_libfunc,
3334 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3335 Pmode, XEXP (xinner, 0), Pmode,
3336 GEN_INT (INTVAL (size) - used),
3337 TYPE_MODE (sizetype));
3338 else
3339 emit_library_call (chkr_set_right_libfunc,
3340 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3341 Pmode, GEN_INT (INTVAL (size) - used),
3342 TYPE_MODE (sizetype),
3343 GEN_INT (MEMORY_USE_RW),
3344 TYPE_MODE (integer_type_node));
3345 in_check_memory_usage = 0;
3348 else
3349 #endif /* PUSH_ROUNDING */
3351 rtx target;
3353 /* Otherwise make space on the stack and copy the data
3354 to the address of that space. */
3356 /* Deduct words put into registers from the size we must copy. */
3357 if (partial != 0)
3359 if (GET_CODE (size) == CONST_INT)
3360 size = GEN_INT (INTVAL (size) - used);
3361 else
3362 size = expand_binop (GET_MODE (size), sub_optab, size,
3363 GEN_INT (used), NULL_RTX, 0,
3364 OPTAB_LIB_WIDEN);
3367 /* Get the address of the stack space.
3368 In this case, we do not deal with EXTRA separately.
3369 A single stack adjust will do. */
3370 if (! args_addr)
3372 temp = push_block (size, extra, where_pad == downward);
3373 extra = 0;
3375 else if (GET_CODE (args_so_far) == CONST_INT)
3376 temp = memory_address (BLKmode,
3377 plus_constant (args_addr,
3378 skip + INTVAL (args_so_far)));
3379 else
3380 temp = memory_address (BLKmode,
3381 plus_constant (gen_rtx_PLUS (Pmode,
3382 args_addr,
3383 args_so_far),
3384 skip));
3385 if (current_function_check_memory_usage && ! in_check_memory_usage)
3387 in_check_memory_usage = 1;
3388 target = copy_to_reg (temp);
3389 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3390 emit_library_call (chkr_copy_bitmap_libfunc,
3391 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3392 target, Pmode,
3393 XEXP (xinner, 0), Pmode,
3394 size, TYPE_MODE (sizetype));
3395 else
3396 emit_library_call (chkr_set_right_libfunc,
3397 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3398 target, Pmode,
3399 size, TYPE_MODE (sizetype),
3400 GEN_INT (MEMORY_USE_RW),
3401 TYPE_MODE (integer_type_node));
3402 in_check_memory_usage = 0;
3405 target = gen_rtx_MEM (BLKmode, temp);
3407 if (type != 0)
3409 set_mem_attributes (target, type, 1);
3410 /* Function incoming arguments may overlap with sibling call
3411 outgoing arguments and we cannot allow reordering of reads
3412 from function arguments with stores to outgoing arguments
3413 of sibling calls. */
3414 set_mem_alias_set (target, 0);
3417 /* TEMP is the address of the block. Copy the data there. */
3418 if (GET_CODE (size) == CONST_INT
3419 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3421 move_by_pieces (target, xinner, INTVAL (size), align);
3422 goto ret;
3424 else
3426 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3427 enum machine_mode mode;
3429 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3430 mode != VOIDmode;
3431 mode = GET_MODE_WIDER_MODE (mode))
3433 enum insn_code code = movstr_optab[(int) mode];
3434 insn_operand_predicate_fn pred;
3436 if (code != CODE_FOR_nothing
3437 && ((GET_CODE (size) == CONST_INT
3438 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3439 <= (GET_MODE_MASK (mode) >> 1)))
3440 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3441 && (!(pred = insn_data[(int) code].operand[0].predicate)
3442 || ((*pred) (target, BLKmode)))
3443 && (!(pred = insn_data[(int) code].operand[1].predicate)
3444 || ((*pred) (xinner, BLKmode)))
3445 && (!(pred = insn_data[(int) code].operand[3].predicate)
3446 || ((*pred) (opalign, VOIDmode))))
3448 rtx op2 = convert_to_mode (mode, size, 1);
3449 rtx last = get_last_insn ();
3450 rtx pat;
3452 pred = insn_data[(int) code].operand[2].predicate;
3453 if (pred != 0 && ! (*pred) (op2, mode))
3454 op2 = copy_to_mode_reg (mode, op2);
3456 pat = GEN_FCN ((int) code) (target, xinner,
3457 op2, opalign);
3458 if (pat)
3460 emit_insn (pat);
3461 goto ret;
3463 else
3464 delete_insns_since (last);
3469 if (!ACCUMULATE_OUTGOING_ARGS)
3471 /* If the source is referenced relative to the stack pointer,
3472 copy it to another register to stabilize it. We do not need
3473 to do this if we know that we won't be changing sp. */
3475 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3476 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3477 temp = copy_to_reg (temp);
3480 /* Make inhibit_defer_pop nonzero around the library call
3481 to force it to pop the bcopy-arguments right away. */
3482 NO_DEFER_POP;
3483 #ifdef TARGET_MEM_FUNCTIONS
3484 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3485 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3486 convert_to_mode (TYPE_MODE (sizetype),
3487 size, TREE_UNSIGNED (sizetype)),
3488 TYPE_MODE (sizetype));
3489 #else
3490 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3491 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3492 convert_to_mode (TYPE_MODE (integer_type_node),
3493 size,
3494 TREE_UNSIGNED (integer_type_node)),
3495 TYPE_MODE (integer_type_node));
3496 #endif
3497 OK_DEFER_POP;
3500 else if (partial > 0)
3502 /* Scalar partly in registers. */
3504 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3505 int i;
3506 int not_stack;
3507 /* # words of start of argument
3508 that we must make space for but need not store. */
3509 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3510 int args_offset = INTVAL (args_so_far);
3511 int skip;
3513 /* Push padding now if padding above and stack grows down,
3514 or if padding below and stack grows up.
3515 But if space already allocated, this has already been done. */
3516 if (extra && args_addr == 0
3517 && where_pad != none && where_pad != stack_direction)
3518 anti_adjust_stack (GEN_INT (extra));
3520 /* If we make space by pushing it, we might as well push
3521 the real data. Otherwise, we can leave OFFSET nonzero
3522 and leave the space uninitialized. */
3523 if (args_addr == 0)
3524 offset = 0;
3526 /* Now NOT_STACK gets the number of words that we don't need to
3527 allocate on the stack. */
3528 not_stack = partial - offset;
3530 /* If the partial register-part of the arg counts in its stack size,
3531 skip the part of stack space corresponding to the registers.
3532 Otherwise, start copying to the beginning of the stack space,
3533 by setting SKIP to 0. */
3534 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3536 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3537 x = validize_mem (force_const_mem (mode, x));
3539 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3540 SUBREGs of such registers are not allowed. */
3541 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3542 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3543 x = copy_to_reg (x);
3545 /* Loop over all the words allocated on the stack for this arg. */
3546 /* We can do it by words, because any scalar bigger than a word
3547 has a size a multiple of a word. */
3548 #ifndef PUSH_ARGS_REVERSED
3549 for (i = not_stack; i < size; i++)
3550 #else
3551 for (i = size - 1; i >= not_stack; i--)
3552 #endif
3553 if (i >= not_stack + offset)
3554 emit_push_insn (operand_subword_force (x, i, mode),
3555 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3556 0, args_addr,
3557 GEN_INT (args_offset + ((i - not_stack + skip)
3558 * UNITS_PER_WORD)),
3559 reg_parm_stack_space, alignment_pad);
3561 else
3563 rtx addr;
3564 rtx target = NULL_RTX;
3565 rtx dest;
3567 /* Push padding now if padding above and stack grows down,
3568 or if padding below and stack grows up.
3569 But if space already allocated, this has already been done. */
3570 if (extra && args_addr == 0
3571 && where_pad != none && where_pad != stack_direction)
3572 anti_adjust_stack (GEN_INT (extra));
3574 #ifdef PUSH_ROUNDING
3575 if (args_addr == 0 && PUSH_ARGS)
3576 emit_single_push_insn (mode, x, type);
3577 else
3578 #endif
3580 if (GET_CODE (args_so_far) == CONST_INT)
3581 addr
3582 = memory_address (mode,
3583 plus_constant (args_addr,
3584 INTVAL (args_so_far)));
3585 else
3586 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3587 args_so_far));
3588 target = addr;
3589 dest = gen_rtx_MEM (mode, addr);
3590 if (type != 0)
3592 set_mem_attributes (dest, type, 1);
3593 /* Function incoming arguments may overlap with sibling call
3594 outgoing arguments and we cannot allow reordering of reads
3595 from function arguments with stores to outgoing arguments
3596 of sibling calls. */
3597 set_mem_alias_set (dest, 0);
3600 emit_move_insn (dest, x);
3604 if (current_function_check_memory_usage && ! in_check_memory_usage)
3606 in_check_memory_usage = 1;
3607 if (target == 0)
3608 target = get_push_address (GET_MODE_SIZE (mode));
3610 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3611 emit_library_call (chkr_copy_bitmap_libfunc,
3612 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3613 Pmode, XEXP (x, 0), Pmode,
3614 GEN_INT (GET_MODE_SIZE (mode)),
3615 TYPE_MODE (sizetype));
3616 else
3617 emit_library_call (chkr_set_right_libfunc,
3618 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3619 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3620 TYPE_MODE (sizetype),
3621 GEN_INT (MEMORY_USE_RW),
3622 TYPE_MODE (integer_type_node));
3623 in_check_memory_usage = 0;
3627 ret:
3628 /* If part should go in registers, copy that part
3629 into the appropriate registers. Do this now, at the end,
3630 since mem-to-mem copies above may do function calls. */
3631 if (partial > 0 && reg != 0)
3633 /* Handle calls that pass values in multiple non-contiguous locations.
3634 The Irix 6 ABI has examples of this. */
3635 if (GET_CODE (reg) == PARALLEL)
3636 emit_group_load (reg, x, -1, align); /* ??? size? */
3637 else
3638 move_block_to_reg (REGNO (reg), x, partial, mode);
3641 if (extra && args_addr == 0 && where_pad == stack_direction)
3642 anti_adjust_stack (GEN_INT (extra));
3644 if (alignment_pad && args_addr == 0)
3645 anti_adjust_stack (alignment_pad);
3648 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3649 operations. */
3651 static rtx
3652 get_subtarget (x)
3653 rtx x;
3655 return ((x == 0
3656 /* Only registers can be subtargets. */
3657 || GET_CODE (x) != REG
3658 /* If the register is readonly, it can't be set more than once. */
3659 || RTX_UNCHANGING_P (x)
3660 /* Don't use hard regs to avoid extending their life. */
3661 || REGNO (x) < FIRST_PSEUDO_REGISTER
3662 /* Avoid subtargets inside loops,
3663 since they hide some invariant expressions. */
3664 || preserve_subexpressions_p ())
3665 ? 0 : x);
3668 /* Expand an assignment that stores the value of FROM into TO.
3669 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3670 (This may contain a QUEUED rtx;
3671 if the value is constant, this rtx is a constant.)
3672 Otherwise, the returned value is NULL_RTX.
3674 SUGGEST_REG is no longer actually used.
3675 It used to mean, copy the value through a register
3676 and return that register, if that is possible.
3677 We now use WANT_VALUE to decide whether to do this. */
3680 expand_assignment (to, from, want_value, suggest_reg)
3681 tree to, from;
3682 int want_value;
3683 int suggest_reg ATTRIBUTE_UNUSED;
3685 register rtx to_rtx = 0;
3686 rtx result;
3688 /* Don't crash if the lhs of the assignment was erroneous. */
3690 if (TREE_CODE (to) == ERROR_MARK)
3692 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3693 return want_value ? result : NULL_RTX;
3696 /* Assignment of a structure component needs special treatment
3697 if the structure component's rtx is not simply a MEM.
3698 Assignment of an array element at a constant index, and assignment of
3699 an array element in an unaligned packed structure field, has the same
3700 problem. */
3702 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3703 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3705 enum machine_mode mode1;
3706 HOST_WIDE_INT bitsize, bitpos;
3707 tree offset;
3708 int unsignedp;
3709 int volatilep = 0;
3710 tree tem;
3711 unsigned int alignment;
3713 push_temp_slots ();
3714 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3715 &unsignedp, &volatilep, &alignment);
3717 /* If we are going to use store_bit_field and extract_bit_field,
3718 make sure to_rtx will be safe for multiple use. */
3720 if (mode1 == VOIDmode && want_value)
3721 tem = stabilize_reference (tem);
3723 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3724 if (offset != 0)
3726 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3728 if (GET_CODE (to_rtx) != MEM)
3729 abort ();
3731 if (GET_MODE (offset_rtx) != ptr_mode)
3733 #ifdef POINTERS_EXTEND_UNSIGNED
3734 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3735 #else
3736 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3737 #endif
3740 /* A constant address in TO_RTX can have VOIDmode, we must not try
3741 to call force_reg for that case. Avoid that case. */
3742 if (GET_CODE (to_rtx) == MEM
3743 && GET_MODE (to_rtx) == BLKmode
3744 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3745 && bitsize
3746 && (bitpos % bitsize) == 0
3747 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3748 && alignment == GET_MODE_ALIGNMENT (mode1))
3750 rtx temp
3751 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3753 if (GET_CODE (XEXP (temp, 0)) == REG)
3754 to_rtx = temp;
3755 else
3756 to_rtx = (replace_equiv_address
3757 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3758 XEXP (temp, 0))));
3759 bitpos = 0;
3762 to_rtx = change_address (to_rtx, VOIDmode,
3763 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3764 force_reg (ptr_mode,
3765 offset_rtx)));
3768 if (volatilep)
3770 if (GET_CODE (to_rtx) == MEM)
3772 /* When the offset is zero, to_rtx is the address of the
3773 structure we are storing into, and hence may be shared.
3774 We must make a new MEM before setting the volatile bit. */
3775 if (offset == 0)
3776 to_rtx = copy_rtx (to_rtx);
3778 MEM_VOLATILE_P (to_rtx) = 1;
3780 #if 0 /* This was turned off because, when a field is volatile
3781 in an object which is not volatile, the object may be in a register,
3782 and then we would abort over here. */
3783 else
3784 abort ();
3785 #endif
3788 if (TREE_CODE (to) == COMPONENT_REF
3789 && TREE_READONLY (TREE_OPERAND (to, 1)))
3791 if (offset == 0)
3792 to_rtx = copy_rtx (to_rtx);
3794 RTX_UNCHANGING_P (to_rtx) = 1;
3797 /* Check the access. */
3798 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3800 rtx to_addr;
3801 int size;
3802 int best_mode_size;
3803 enum machine_mode best_mode;
3805 best_mode = get_best_mode (bitsize, bitpos,
3806 TYPE_ALIGN (TREE_TYPE (tem)),
3807 mode1, volatilep);
3808 if (best_mode == VOIDmode)
3809 best_mode = QImode;
3811 best_mode_size = GET_MODE_BITSIZE (best_mode);
3812 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3813 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3814 size *= GET_MODE_SIZE (best_mode);
3816 /* Check the access right of the pointer. */
3817 in_check_memory_usage = 1;
3818 if (size)
3819 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3820 VOIDmode, 3, to_addr, Pmode,
3821 GEN_INT (size), TYPE_MODE (sizetype),
3822 GEN_INT (MEMORY_USE_WO),
3823 TYPE_MODE (integer_type_node));
3824 in_check_memory_usage = 0;
3827 /* If this is a varying-length object, we must get the address of
3828 the source and do an explicit block move. */
3829 if (bitsize < 0)
3831 unsigned int from_align;
3832 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3833 rtx inner_to_rtx
3834 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3836 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3837 MIN (alignment, from_align));
3838 free_temp_slots ();
3839 pop_temp_slots ();
3840 return to_rtx;
3842 else
3844 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3845 (want_value
3846 /* Spurious cast for HPUX compiler. */
3847 ? ((enum machine_mode)
3848 TYPE_MODE (TREE_TYPE (to)))
3849 : VOIDmode),
3850 unsignedp,
3851 alignment,
3852 int_size_in_bytes (TREE_TYPE (tem)),
3853 get_alias_set (to));
3855 preserve_temp_slots (result);
3856 free_temp_slots ();
3857 pop_temp_slots ();
3859 /* If the value is meaningful, convert RESULT to the proper mode.
3860 Otherwise, return nothing. */
3861 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3862 TYPE_MODE (TREE_TYPE (from)),
3863 result,
3864 TREE_UNSIGNED (TREE_TYPE (to)))
3865 : NULL_RTX);
3869 /* If the rhs is a function call and its value is not an aggregate,
3870 call the function before we start to compute the lhs.
3871 This is needed for correct code for cases such as
3872 val = setjmp (buf) on machines where reference to val
3873 requires loading up part of an address in a separate insn.
3875 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3876 since it might be a promoted variable where the zero- or sign- extension
3877 needs to be done. Handling this in the normal way is safe because no
3878 computation is done before the call. */
3879 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3880 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3881 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3882 && GET_CODE (DECL_RTL (to)) == REG))
3884 rtx value;
3886 push_temp_slots ();
3887 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3888 if (to_rtx == 0)
3889 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3891 /* Handle calls that return values in multiple non-contiguous locations.
3892 The Irix 6 ABI has examples of this. */
3893 if (GET_CODE (to_rtx) == PARALLEL)
3894 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3895 TYPE_ALIGN (TREE_TYPE (from)));
3896 else if (GET_MODE (to_rtx) == BLKmode)
3897 emit_block_move (to_rtx, value, expr_size (from),
3898 TYPE_ALIGN (TREE_TYPE (from)));
3899 else
3901 #ifdef POINTERS_EXTEND_UNSIGNED
3902 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3903 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3904 value = convert_memory_address (GET_MODE (to_rtx), value);
3905 #endif
3906 emit_move_insn (to_rtx, value);
3908 preserve_temp_slots (to_rtx);
3909 free_temp_slots ();
3910 pop_temp_slots ();
3911 return want_value ? to_rtx : NULL_RTX;
3914 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3915 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3917 if (to_rtx == 0)
3919 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3920 if (GET_CODE (to_rtx) == MEM)
3921 set_mem_alias_set (to_rtx, get_alias_set (to));
3924 /* Don't move directly into a return register. */
3925 if (TREE_CODE (to) == RESULT_DECL
3926 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3928 rtx temp;
3930 push_temp_slots ();
3931 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3933 if (GET_CODE (to_rtx) == PARALLEL)
3934 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3935 TYPE_ALIGN (TREE_TYPE (from)));
3936 else
3937 emit_move_insn (to_rtx, temp);
3939 preserve_temp_slots (to_rtx);
3940 free_temp_slots ();
3941 pop_temp_slots ();
3942 return want_value ? to_rtx : NULL_RTX;
3945 /* In case we are returning the contents of an object which overlaps
3946 the place the value is being stored, use a safe function when copying
3947 a value through a pointer into a structure value return block. */
3948 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3949 && current_function_returns_struct
3950 && !current_function_returns_pcc_struct)
3952 rtx from_rtx, size;
3954 push_temp_slots ();
3955 size = expr_size (from);
3956 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3957 EXPAND_MEMORY_USE_DONT);
3959 /* Copy the rights of the bitmap. */
3960 if (current_function_check_memory_usage)
3961 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3962 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3963 XEXP (from_rtx, 0), Pmode,
3964 convert_to_mode (TYPE_MODE (sizetype),
3965 size, TREE_UNSIGNED (sizetype)),
3966 TYPE_MODE (sizetype));
3968 #ifdef TARGET_MEM_FUNCTIONS
3969 emit_library_call (memmove_libfunc, LCT_NORMAL,
3970 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3971 XEXP (from_rtx, 0), Pmode,
3972 convert_to_mode (TYPE_MODE (sizetype),
3973 size, TREE_UNSIGNED (sizetype)),
3974 TYPE_MODE (sizetype));
3975 #else
3976 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3977 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3978 XEXP (to_rtx, 0), Pmode,
3979 convert_to_mode (TYPE_MODE (integer_type_node),
3980 size, TREE_UNSIGNED (integer_type_node)),
3981 TYPE_MODE (integer_type_node));
3982 #endif
3984 preserve_temp_slots (to_rtx);
3985 free_temp_slots ();
3986 pop_temp_slots ();
3987 return want_value ? to_rtx : NULL_RTX;
3990 /* Compute FROM and store the value in the rtx we got. */
3992 push_temp_slots ();
3993 result = store_expr (from, to_rtx, want_value);
3994 preserve_temp_slots (result);
3995 free_temp_slots ();
3996 pop_temp_slots ();
3997 return want_value ? result : NULL_RTX;
4000 /* Generate code for computing expression EXP,
4001 and storing the value into TARGET.
4002 TARGET may contain a QUEUED rtx.
4004 If WANT_VALUE is nonzero, return a copy of the value
4005 not in TARGET, so that we can be sure to use the proper
4006 value in a containing expression even if TARGET has something
4007 else stored in it. If possible, we copy the value through a pseudo
4008 and return that pseudo. Or, if the value is constant, we try to
4009 return the constant. In some cases, we return a pseudo
4010 copied *from* TARGET.
4012 If the mode is BLKmode then we may return TARGET itself.
4013 It turns out that in BLKmode it doesn't cause a problem.
4014 because C has no operators that could combine two different
4015 assignments into the same BLKmode object with different values
4016 with no sequence point. Will other languages need this to
4017 be more thorough?
4019 If WANT_VALUE is 0, we return NULL, to make sure
4020 to catch quickly any cases where the caller uses the value
4021 and fails to set WANT_VALUE. */
4024 store_expr (exp, target, want_value)
4025 register tree exp;
4026 register rtx target;
4027 int want_value;
4029 register rtx temp;
4030 int dont_return_target = 0;
4031 int dont_store_target = 0;
4033 if (TREE_CODE (exp) == COMPOUND_EXPR)
4035 /* Perform first part of compound expression, then assign from second
4036 part. */
4037 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4038 emit_queue ();
4039 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4041 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4043 /* For conditional expression, get safe form of the target. Then
4044 test the condition, doing the appropriate assignment on either
4045 side. This avoids the creation of unnecessary temporaries.
4046 For non-BLKmode, it is more efficient not to do this. */
4048 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4050 emit_queue ();
4051 target = protect_from_queue (target, 1);
4053 do_pending_stack_adjust ();
4054 NO_DEFER_POP;
4055 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4056 start_cleanup_deferral ();
4057 store_expr (TREE_OPERAND (exp, 1), target, 0);
4058 end_cleanup_deferral ();
4059 emit_queue ();
4060 emit_jump_insn (gen_jump (lab2));
4061 emit_barrier ();
4062 emit_label (lab1);
4063 start_cleanup_deferral ();
4064 store_expr (TREE_OPERAND (exp, 2), target, 0);
4065 end_cleanup_deferral ();
4066 emit_queue ();
4067 emit_label (lab2);
4068 OK_DEFER_POP;
4070 return want_value ? target : NULL_RTX;
4072 else if (queued_subexp_p (target))
4073 /* If target contains a postincrement, let's not risk
4074 using it as the place to generate the rhs. */
4076 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4078 /* Expand EXP into a new pseudo. */
4079 temp = gen_reg_rtx (GET_MODE (target));
4080 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4082 else
4083 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4085 /* If target is volatile, ANSI requires accessing the value
4086 *from* the target, if it is accessed. So make that happen.
4087 In no case return the target itself. */
4088 if (! MEM_VOLATILE_P (target) && want_value)
4089 dont_return_target = 1;
4091 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4092 && GET_MODE (target) != BLKmode)
4093 /* If target is in memory and caller wants value in a register instead,
4094 arrange that. Pass TARGET as target for expand_expr so that,
4095 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4096 We know expand_expr will not use the target in that case.
4097 Don't do this if TARGET is volatile because we are supposed
4098 to write it and then read it. */
4100 temp = expand_expr (exp, target, GET_MODE (target), 0);
4101 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4103 /* If TEMP is already in the desired TARGET, only copy it from
4104 memory and don't store it there again. */
4105 if (temp == target
4106 || (rtx_equal_p (temp, target)
4107 && ! side_effects_p (temp) && ! side_effects_p (target)))
4108 dont_store_target = 1;
4109 temp = copy_to_reg (temp);
4111 dont_return_target = 1;
4113 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4114 /* If this is an scalar in a register that is stored in a wider mode
4115 than the declared mode, compute the result into its declared mode
4116 and then convert to the wider mode. Our value is the computed
4117 expression. */
4119 /* If we don't want a value, we can do the conversion inside EXP,
4120 which will often result in some optimizations. Do the conversion
4121 in two steps: first change the signedness, if needed, then
4122 the extend. But don't do this if the type of EXP is a subtype
4123 of something else since then the conversion might involve
4124 more than just converting modes. */
4125 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4126 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4128 if (TREE_UNSIGNED (TREE_TYPE (exp))
4129 != SUBREG_PROMOTED_UNSIGNED_P (target))
4131 = convert
4132 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4133 TREE_TYPE (exp)),
4134 exp);
4136 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4137 SUBREG_PROMOTED_UNSIGNED_P (target)),
4138 exp);
4141 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4143 /* If TEMP is a volatile MEM and we want a result value, make
4144 the access now so it gets done only once. Likewise if
4145 it contains TARGET. */
4146 if (GET_CODE (temp) == MEM && want_value
4147 && (MEM_VOLATILE_P (temp)
4148 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4149 temp = copy_to_reg (temp);
4151 /* If TEMP is a VOIDmode constant, use convert_modes to make
4152 sure that we properly convert it. */
4153 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4154 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4155 TYPE_MODE (TREE_TYPE (exp)), temp,
4156 SUBREG_PROMOTED_UNSIGNED_P (target));
4158 convert_move (SUBREG_REG (target), temp,
4159 SUBREG_PROMOTED_UNSIGNED_P (target));
4161 /* If we promoted a constant, change the mode back down to match
4162 target. Otherwise, the caller might get confused by a result whose
4163 mode is larger than expected. */
4165 if (want_value && GET_MODE (temp) != GET_MODE (target)
4166 && GET_MODE (temp) != VOIDmode)
4168 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4169 SUBREG_PROMOTED_VAR_P (temp) = 1;
4170 SUBREG_PROMOTED_UNSIGNED_P (temp)
4171 = SUBREG_PROMOTED_UNSIGNED_P (target);
4174 return want_value ? temp : NULL_RTX;
4176 else
4178 temp = expand_expr (exp, target, GET_MODE (target), 0);
4179 /* Return TARGET if it's a specified hardware register.
4180 If TARGET is a volatile mem ref, either return TARGET
4181 or return a reg copied *from* TARGET; ANSI requires this.
4183 Otherwise, if TEMP is not TARGET, return TEMP
4184 if it is constant (for efficiency),
4185 or if we really want the correct value. */
4186 if (!(target && GET_CODE (target) == REG
4187 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4188 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4189 && ! rtx_equal_p (temp, target)
4190 && (CONSTANT_P (temp) || want_value))
4191 dont_return_target = 1;
4194 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4195 the same as that of TARGET, adjust the constant. This is needed, for
4196 example, in case it is a CONST_DOUBLE and we want only a word-sized
4197 value. */
4198 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4199 && TREE_CODE (exp) != ERROR_MARK
4200 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4201 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4202 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4204 if (current_function_check_memory_usage
4205 && GET_CODE (target) == MEM
4206 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4208 in_check_memory_usage = 1;
4209 if (GET_CODE (temp) == MEM)
4210 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4211 VOIDmode, 3, XEXP (target, 0), Pmode,
4212 XEXP (temp, 0), Pmode,
4213 expr_size (exp), TYPE_MODE (sizetype));
4214 else
4215 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4216 VOIDmode, 3, XEXP (target, 0), Pmode,
4217 expr_size (exp), TYPE_MODE (sizetype),
4218 GEN_INT (MEMORY_USE_WO),
4219 TYPE_MODE (integer_type_node));
4220 in_check_memory_usage = 0;
4223 /* If value was not generated in the target, store it there.
4224 Convert the value to TARGET's type first if nec. */
4225 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4226 one or both of them are volatile memory refs, we have to distinguish
4227 two cases:
4228 - expand_expr has used TARGET. In this case, we must not generate
4229 another copy. This can be detected by TARGET being equal according
4230 to == .
4231 - expand_expr has not used TARGET - that means that the source just
4232 happens to have the same RTX form. Since temp will have been created
4233 by expand_expr, it will compare unequal according to == .
4234 We must generate a copy in this case, to reach the correct number
4235 of volatile memory references. */
4237 if ((! rtx_equal_p (temp, target)
4238 || (temp != target && (side_effects_p (temp)
4239 || side_effects_p (target))))
4240 && TREE_CODE (exp) != ERROR_MARK
4241 && ! dont_store_target)
4243 target = protect_from_queue (target, 1);
4244 if (GET_MODE (temp) != GET_MODE (target)
4245 && GET_MODE (temp) != VOIDmode)
4247 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4248 if (dont_return_target)
4250 /* In this case, we will return TEMP,
4251 so make sure it has the proper mode.
4252 But don't forget to store the value into TARGET. */
4253 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4254 emit_move_insn (target, temp);
4256 else
4257 convert_move (target, temp, unsignedp);
4260 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4262 /* Handle copying a string constant into an array.
4263 The string constant may be shorter than the array.
4264 So copy just the string's actual length, and clear the rest. */
4265 rtx size;
4266 rtx addr;
4268 /* Get the size of the data type of the string,
4269 which is actually the size of the target. */
4270 size = expr_size (exp);
4271 if (GET_CODE (size) == CONST_INT
4272 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4273 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4274 else
4276 /* Compute the size of the data to copy from the string. */
4277 tree copy_size
4278 = size_binop (MIN_EXPR,
4279 make_tree (sizetype, size),
4280 size_int (TREE_STRING_LENGTH (exp)));
4281 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4282 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4283 VOIDmode, 0);
4284 rtx label = 0;
4286 /* Copy that much. */
4287 emit_block_move (target, temp, copy_size_rtx,
4288 TYPE_ALIGN (TREE_TYPE (exp)));
4290 /* Figure out how much is left in TARGET that we have to clear.
4291 Do all calculations in ptr_mode. */
4293 addr = XEXP (target, 0);
4294 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4296 if (GET_CODE (copy_size_rtx) == CONST_INT)
4298 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4299 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4300 align = MIN (align,
4301 (unsigned int) (BITS_PER_UNIT
4302 * (INTVAL (copy_size_rtx)
4303 & - INTVAL (copy_size_rtx))));
4305 else
4307 addr = force_reg (ptr_mode, addr);
4308 addr = expand_binop (ptr_mode, add_optab, addr,
4309 copy_size_rtx, NULL_RTX, 0,
4310 OPTAB_LIB_WIDEN);
4312 size = expand_binop (ptr_mode, sub_optab, size,
4313 copy_size_rtx, NULL_RTX, 0,
4314 OPTAB_LIB_WIDEN);
4316 align = BITS_PER_UNIT;
4317 label = gen_label_rtx ();
4318 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4319 GET_MODE (size), 0, 0, label);
4321 align = MIN (align, expr_align (copy_size));
4323 if (size != const0_rtx)
4325 rtx dest = gen_rtx_MEM (BLKmode, addr);
4327 MEM_COPY_ATTRIBUTES (dest, target);
4329 /* Be sure we can write on ADDR. */
4330 in_check_memory_usage = 1;
4331 if (current_function_check_memory_usage)
4332 emit_library_call (chkr_check_addr_libfunc,
4333 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4334 addr, Pmode,
4335 size, TYPE_MODE (sizetype),
4336 GEN_INT (MEMORY_USE_WO),
4337 TYPE_MODE (integer_type_node));
4338 in_check_memory_usage = 0;
4339 clear_storage (dest, size, align);
4342 if (label)
4343 emit_label (label);
4346 /* Handle calls that return values in multiple non-contiguous locations.
4347 The Irix 6 ABI has examples of this. */
4348 else if (GET_CODE (target) == PARALLEL)
4349 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4350 TYPE_ALIGN (TREE_TYPE (exp)));
4351 else if (GET_MODE (temp) == BLKmode)
4352 emit_block_move (target, temp, expr_size (exp),
4353 TYPE_ALIGN (TREE_TYPE (exp)));
4354 else
4355 emit_move_insn (target, temp);
4358 /* If we don't want a value, return NULL_RTX. */
4359 if (! want_value)
4360 return NULL_RTX;
4362 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4363 ??? The latter test doesn't seem to make sense. */
4364 else if (dont_return_target && GET_CODE (temp) != MEM)
4365 return temp;
4367 /* Return TARGET itself if it is a hard register. */
4368 else if (want_value && GET_MODE (target) != BLKmode
4369 && ! (GET_CODE (target) == REG
4370 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4371 return copy_to_reg (target);
4373 else
4374 return target;
4377 /* Return 1 if EXP just contains zeros. */
4379 static int
4380 is_zeros_p (exp)
4381 tree exp;
4383 tree elt;
4385 switch (TREE_CODE (exp))
4387 case CONVERT_EXPR:
4388 case NOP_EXPR:
4389 case NON_LVALUE_EXPR:
4390 return is_zeros_p (TREE_OPERAND (exp, 0));
4392 case INTEGER_CST:
4393 return integer_zerop (exp);
4395 case COMPLEX_CST:
4396 return
4397 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4399 case REAL_CST:
4400 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4402 case CONSTRUCTOR:
4403 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4404 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4405 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4406 if (! is_zeros_p (TREE_VALUE (elt)))
4407 return 0;
4409 return 1;
4411 default:
4412 return 0;
4416 /* Return 1 if EXP contains mostly (3/4) zeros. */
4418 static int
4419 mostly_zeros_p (exp)
4420 tree exp;
4422 if (TREE_CODE (exp) == CONSTRUCTOR)
4424 int elts = 0, zeros = 0;
4425 tree elt = CONSTRUCTOR_ELTS (exp);
4426 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4428 /* If there are no ranges of true bits, it is all zero. */
4429 return elt == NULL_TREE;
4431 for (; elt; elt = TREE_CHAIN (elt))
4433 /* We do not handle the case where the index is a RANGE_EXPR,
4434 so the statistic will be somewhat inaccurate.
4435 We do make a more accurate count in store_constructor itself,
4436 so since this function is only used for nested array elements,
4437 this should be close enough. */
4438 if (mostly_zeros_p (TREE_VALUE (elt)))
4439 zeros++;
4440 elts++;
4443 return 4 * zeros >= 3 * elts;
4446 return is_zeros_p (exp);
4449 /* Helper function for store_constructor.
4450 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4451 TYPE is the type of the CONSTRUCTOR, not the element type.
4452 ALIGN and CLEARED are as for store_constructor.
4453 ALIAS_SET is the alias set to use for any stores.
4455 This provides a recursive shortcut back to store_constructor when it isn't
4456 necessary to go through store_field. This is so that we can pass through
4457 the cleared field to let store_constructor know that we may not have to
4458 clear a substructure if the outer structure has already been cleared. */
4460 static void
4461 store_constructor_field (target, bitsize, bitpos,
4462 mode, exp, type, align, cleared, alias_set)
4463 rtx target;
4464 unsigned HOST_WIDE_INT bitsize;
4465 HOST_WIDE_INT bitpos;
4466 enum machine_mode mode;
4467 tree exp, type;
4468 unsigned int align;
4469 int cleared;
4470 int alias_set;
4472 if (TREE_CODE (exp) == CONSTRUCTOR
4473 && bitpos % BITS_PER_UNIT == 0
4474 /* If we have a non-zero bitpos for a register target, then we just
4475 let store_field do the bitfield handling. This is unlikely to
4476 generate unnecessary clear instructions anyways. */
4477 && (bitpos == 0 || GET_CODE (target) == MEM))
4479 if (bitpos != 0)
4480 target
4481 = adjust_address (target,
4482 GET_MODE (target) == BLKmode
4483 || 0 != (bitpos
4484 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4485 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4488 /* Show the alignment may no longer be what it was and update the alias
4489 set, if required. */
4490 if (bitpos != 0)
4491 align = MIN (align, (unsigned int) bitpos & - bitpos);
4492 if (GET_CODE (target) == MEM)
4493 set_mem_alias_set (target, alias_set);
4495 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4497 else
4498 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4499 int_size_in_bytes (type), alias_set);
4502 /* Store the value of constructor EXP into the rtx TARGET.
4503 TARGET is either a REG or a MEM.
4504 ALIGN is the maximum known alignment for TARGET.
4505 CLEARED is true if TARGET is known to have been zero'd.
4506 SIZE is the number of bytes of TARGET we are allowed to modify: this
4507 may not be the same as the size of EXP if we are assigning to a field
4508 which has been packed to exclude padding bits. */
4510 static void
4511 store_constructor (exp, target, align, cleared, size)
4512 tree exp;
4513 rtx target;
4514 unsigned int align;
4515 int cleared;
4516 HOST_WIDE_INT size;
4518 tree type = TREE_TYPE (exp);
4519 #ifdef WORD_REGISTER_OPERATIONS
4520 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4521 #endif
4523 /* We know our target cannot conflict, since safe_from_p has been called. */
4524 #if 0
4525 /* Don't try copying piece by piece into a hard register
4526 since that is vulnerable to being clobbered by EXP.
4527 Instead, construct in a pseudo register and then copy it all. */
4528 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4530 rtx temp = gen_reg_rtx (GET_MODE (target));
4531 store_constructor (exp, temp, align, cleared, size);
4532 emit_move_insn (target, temp);
4533 return;
4535 #endif
4537 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4538 || TREE_CODE (type) == QUAL_UNION_TYPE)
4540 register tree elt;
4542 /* Inform later passes that the whole union value is dead. */
4543 if ((TREE_CODE (type) == UNION_TYPE
4544 || TREE_CODE (type) == QUAL_UNION_TYPE)
4545 && ! cleared)
4547 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4549 /* If the constructor is empty, clear the union. */
4550 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4551 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4554 /* If we are building a static constructor into a register,
4555 set the initial value as zero so we can fold the value into
4556 a constant. But if more than one register is involved,
4557 this probably loses. */
4558 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4559 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4561 if (! cleared)
4562 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4564 cleared = 1;
4567 /* If the constructor has fewer fields than the structure
4568 or if we are initializing the structure to mostly zeros,
4569 clear the whole structure first. Don't do this if TARGET is a
4570 register whose mode size isn't equal to SIZE since clear_storage
4571 can't handle this case. */
4572 else if (size > 0
4573 && ((list_length (CONSTRUCTOR_ELTS (exp))
4574 != fields_length (type))
4575 || mostly_zeros_p (exp))
4576 && (GET_CODE (target) != REG
4577 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4579 if (! cleared)
4580 clear_storage (target, GEN_INT (size), align);
4582 cleared = 1;
4584 else if (! cleared)
4585 /* Inform later passes that the old value is dead. */
4586 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4588 /* Store each element of the constructor into
4589 the corresponding field of TARGET. */
4591 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4593 register tree field = TREE_PURPOSE (elt);
4594 #ifdef WORD_REGISTER_OPERATIONS
4595 tree value = TREE_VALUE (elt);
4596 #endif
4597 register enum machine_mode mode;
4598 HOST_WIDE_INT bitsize;
4599 HOST_WIDE_INT bitpos = 0;
4600 int unsignedp;
4601 tree offset;
4602 rtx to_rtx = target;
4604 /* Just ignore missing fields.
4605 We cleared the whole structure, above,
4606 if any fields are missing. */
4607 if (field == 0)
4608 continue;
4610 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4611 continue;
4613 if (host_integerp (DECL_SIZE (field), 1))
4614 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4615 else
4616 bitsize = -1;
4618 unsignedp = TREE_UNSIGNED (field);
4619 mode = DECL_MODE (field);
4620 if (DECL_BIT_FIELD (field))
4621 mode = VOIDmode;
4623 offset = DECL_FIELD_OFFSET (field);
4624 if (host_integerp (offset, 0)
4625 && host_integerp (bit_position (field), 0))
4627 bitpos = int_bit_position (field);
4628 offset = 0;
4630 else
4631 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4633 if (offset)
4635 rtx offset_rtx;
4637 if (contains_placeholder_p (offset))
4638 offset = build (WITH_RECORD_EXPR, sizetype,
4639 offset, make_tree (TREE_TYPE (exp), target));
4641 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4642 if (GET_CODE (to_rtx) != MEM)
4643 abort ();
4645 if (GET_MODE (offset_rtx) != ptr_mode)
4647 #ifdef POINTERS_EXTEND_UNSIGNED
4648 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4649 #else
4650 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4651 #endif
4654 to_rtx
4655 = change_address (to_rtx, VOIDmode,
4656 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4657 force_reg (ptr_mode,
4658 offset_rtx)));
4659 align = DECL_OFFSET_ALIGN (field);
4662 if (TREE_READONLY (field))
4664 if (GET_CODE (to_rtx) == MEM)
4665 to_rtx = copy_rtx (to_rtx);
4667 RTX_UNCHANGING_P (to_rtx) = 1;
4670 #ifdef WORD_REGISTER_OPERATIONS
4671 /* If this initializes a field that is smaller than a word, at the
4672 start of a word, try to widen it to a full word.
4673 This special case allows us to output C++ member function
4674 initializations in a form that the optimizers can understand. */
4675 if (GET_CODE (target) == REG
4676 && bitsize < BITS_PER_WORD
4677 && bitpos % BITS_PER_WORD == 0
4678 && GET_MODE_CLASS (mode) == MODE_INT
4679 && TREE_CODE (value) == INTEGER_CST
4680 && exp_size >= 0
4681 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4683 tree type = TREE_TYPE (value);
4684 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4686 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4687 value = convert (type, value);
4689 if (BYTES_BIG_ENDIAN)
4690 value
4691 = fold (build (LSHIFT_EXPR, type, value,
4692 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4693 bitsize = BITS_PER_WORD;
4694 mode = word_mode;
4696 #endif
4697 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4698 TREE_VALUE (elt), type, align, cleared,
4699 (DECL_NONADDRESSABLE_P (field)
4700 && GET_CODE (to_rtx) == MEM)
4701 ? MEM_ALIAS_SET (to_rtx)
4702 : get_alias_set (TREE_TYPE (field)));
4705 else if (TREE_CODE (type) == ARRAY_TYPE)
4707 register tree elt;
4708 register int i;
4709 int need_to_clear;
4710 tree domain = TYPE_DOMAIN (type);
4711 tree elttype = TREE_TYPE (type);
4712 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4713 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4714 HOST_WIDE_INT minelt = 0;
4715 HOST_WIDE_INT maxelt = 0;
4717 /* If we have constant bounds for the range of the type, get them. */
4718 if (const_bounds_p)
4720 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4721 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4724 /* If the constructor has fewer elements than the array,
4725 clear the whole array first. Similarly if this is
4726 static constructor of a non-BLKmode object. */
4727 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4728 need_to_clear = 1;
4729 else
4731 HOST_WIDE_INT count = 0, zero_count = 0;
4732 need_to_clear = ! const_bounds_p;
4734 /* This loop is a more accurate version of the loop in
4735 mostly_zeros_p (it handles RANGE_EXPR in an index).
4736 It is also needed to check for missing elements. */
4737 for (elt = CONSTRUCTOR_ELTS (exp);
4738 elt != NULL_TREE && ! need_to_clear;
4739 elt = TREE_CHAIN (elt))
4741 tree index = TREE_PURPOSE (elt);
4742 HOST_WIDE_INT this_node_count;
4744 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4746 tree lo_index = TREE_OPERAND (index, 0);
4747 tree hi_index = TREE_OPERAND (index, 1);
4749 if (! host_integerp (lo_index, 1)
4750 || ! host_integerp (hi_index, 1))
4752 need_to_clear = 1;
4753 break;
4756 this_node_count = (tree_low_cst (hi_index, 1)
4757 - tree_low_cst (lo_index, 1) + 1);
4759 else
4760 this_node_count = 1;
4762 count += this_node_count;
4763 if (mostly_zeros_p (TREE_VALUE (elt)))
4764 zero_count += this_node_count;
4767 /* Clear the entire array first if there are any missing elements,
4768 or if the incidence of zero elements is >= 75%. */
4769 if (! need_to_clear
4770 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4771 need_to_clear = 1;
4774 if (need_to_clear && size > 0)
4776 if (! cleared)
4777 clear_storage (target, GEN_INT (size), align);
4778 cleared = 1;
4780 else
4781 /* Inform later passes that the old value is dead. */
4782 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4784 /* Store each element of the constructor into
4785 the corresponding element of TARGET, determined
4786 by counting the elements. */
4787 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4788 elt;
4789 elt = TREE_CHAIN (elt), i++)
4791 register enum machine_mode mode;
4792 HOST_WIDE_INT bitsize;
4793 HOST_WIDE_INT bitpos;
4794 int unsignedp;
4795 tree value = TREE_VALUE (elt);
4796 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4797 tree index = TREE_PURPOSE (elt);
4798 rtx xtarget = target;
4800 if (cleared && is_zeros_p (value))
4801 continue;
4803 unsignedp = TREE_UNSIGNED (elttype);
4804 mode = TYPE_MODE (elttype);
4805 if (mode == BLKmode)
4806 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4807 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4808 : -1);
4809 else
4810 bitsize = GET_MODE_BITSIZE (mode);
4812 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4814 tree lo_index = TREE_OPERAND (index, 0);
4815 tree hi_index = TREE_OPERAND (index, 1);
4816 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4817 struct nesting *loop;
4818 HOST_WIDE_INT lo, hi, count;
4819 tree position;
4821 /* If the range is constant and "small", unroll the loop. */
4822 if (const_bounds_p
4823 && host_integerp (lo_index, 0)
4824 && host_integerp (hi_index, 0)
4825 && (lo = tree_low_cst (lo_index, 0),
4826 hi = tree_low_cst (hi_index, 0),
4827 count = hi - lo + 1,
4828 (GET_CODE (target) != MEM
4829 || count <= 2
4830 || (host_integerp (TYPE_SIZE (elttype), 1)
4831 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4832 <= 40 * 8)))))
4834 lo -= minelt; hi -= minelt;
4835 for (; lo <= hi; lo++)
4837 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4838 store_constructor_field
4839 (target, bitsize, bitpos, mode, value, type, align,
4840 cleared,
4841 TYPE_NONALIASED_COMPONENT (type)
4842 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4845 else
4847 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4848 loop_top = gen_label_rtx ();
4849 loop_end = gen_label_rtx ();
4851 unsignedp = TREE_UNSIGNED (domain);
4853 index = build_decl (VAR_DECL, NULL_TREE, domain);
4855 index_r
4856 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4857 &unsignedp, 0));
4858 SET_DECL_RTL (index, index_r);
4859 if (TREE_CODE (value) == SAVE_EXPR
4860 && SAVE_EXPR_RTL (value) == 0)
4862 /* Make sure value gets expanded once before the
4863 loop. */
4864 expand_expr (value, const0_rtx, VOIDmode, 0);
4865 emit_queue ();
4867 store_expr (lo_index, index_r, 0);
4868 loop = expand_start_loop (0);
4870 /* Assign value to element index. */
4871 position
4872 = convert (ssizetype,
4873 fold (build (MINUS_EXPR, TREE_TYPE (index),
4874 index, TYPE_MIN_VALUE (domain))));
4875 position = size_binop (MULT_EXPR, position,
4876 convert (ssizetype,
4877 TYPE_SIZE_UNIT (elttype)));
4879 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4880 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4881 xtarget = change_address (target, mode, addr);
4882 if (TREE_CODE (value) == CONSTRUCTOR)
4883 store_constructor (value, xtarget, align, cleared,
4884 bitsize / BITS_PER_UNIT);
4885 else
4886 store_expr (value, xtarget, 0);
4888 expand_exit_loop_if_false (loop,
4889 build (LT_EXPR, integer_type_node,
4890 index, hi_index));
4892 expand_increment (build (PREINCREMENT_EXPR,
4893 TREE_TYPE (index),
4894 index, integer_one_node), 0, 0);
4895 expand_end_loop ();
4896 emit_label (loop_end);
4899 else if ((index != 0 && ! host_integerp (index, 0))
4900 || ! host_integerp (TYPE_SIZE (elttype), 1))
4902 rtx pos_rtx, addr;
4903 tree position;
4905 if (index == 0)
4906 index = ssize_int (1);
4908 if (minelt)
4909 index = convert (ssizetype,
4910 fold (build (MINUS_EXPR, index,
4911 TYPE_MIN_VALUE (domain))));
4913 position = size_binop (MULT_EXPR, index,
4914 convert (ssizetype,
4915 TYPE_SIZE_UNIT (elttype)));
4916 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4917 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4918 xtarget = change_address (target, mode, addr);
4919 store_expr (value, xtarget, 0);
4921 else
4923 if (index != 0)
4924 bitpos = ((tree_low_cst (index, 0) - minelt)
4925 * tree_low_cst (TYPE_SIZE (elttype), 1));
4926 else
4927 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4929 store_constructor_field (target, bitsize, bitpos, mode, value,
4930 type, align, cleared,
4931 TYPE_NONALIASED_COMPONENT (type)
4932 && GET_CODE (target) == MEM
4933 ? MEM_ALIAS_SET (target) :
4934 get_alias_set (elttype));
4940 /* Set constructor assignments. */
4941 else if (TREE_CODE (type) == SET_TYPE)
4943 tree elt = CONSTRUCTOR_ELTS (exp);
4944 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4945 tree domain = TYPE_DOMAIN (type);
4946 tree domain_min, domain_max, bitlength;
4948 /* The default implementation strategy is to extract the constant
4949 parts of the constructor, use that to initialize the target,
4950 and then "or" in whatever non-constant ranges we need in addition.
4952 If a large set is all zero or all ones, it is
4953 probably better to set it using memset (if available) or bzero.
4954 Also, if a large set has just a single range, it may also be
4955 better to first clear all the first clear the set (using
4956 bzero/memset), and set the bits we want. */
4958 /* Check for all zeros. */
4959 if (elt == NULL_TREE && size > 0)
4961 if (!cleared)
4962 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4963 return;
4966 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4967 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4968 bitlength = size_binop (PLUS_EXPR,
4969 size_diffop (domain_max, domain_min),
4970 ssize_int (1));
4972 nbits = tree_low_cst (bitlength, 1);
4974 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4975 are "complicated" (more than one range), initialize (the
4976 constant parts) by copying from a constant. */
4977 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4978 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4980 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4981 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4982 char *bit_buffer = (char *) alloca (nbits);
4983 HOST_WIDE_INT word = 0;
4984 unsigned int bit_pos = 0;
4985 unsigned int ibit = 0;
4986 unsigned int offset = 0; /* In bytes from beginning of set. */
4988 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4989 for (;;)
4991 if (bit_buffer[ibit])
4993 if (BYTES_BIG_ENDIAN)
4994 word |= (1 << (set_word_size - 1 - bit_pos));
4995 else
4996 word |= 1 << bit_pos;
4999 bit_pos++; ibit++;
5000 if (bit_pos >= set_word_size || ibit == nbits)
5002 if (word != 0 || ! cleared)
5004 rtx datum = GEN_INT (word);
5005 rtx to_rtx;
5007 /* The assumption here is that it is safe to use
5008 XEXP if the set is multi-word, but not if
5009 it's single-word. */
5010 if (GET_CODE (target) == MEM)
5011 to_rtx = adjust_address (target, mode, offset);
5012 else if (offset == 0)
5013 to_rtx = target;
5014 else
5015 abort ();
5016 emit_move_insn (to_rtx, datum);
5019 if (ibit == nbits)
5020 break;
5021 word = 0;
5022 bit_pos = 0;
5023 offset += set_word_size / BITS_PER_UNIT;
5027 else if (!cleared)
5028 /* Don't bother clearing storage if the set is all ones. */
5029 if (TREE_CHAIN (elt) != NULL_TREE
5030 || (TREE_PURPOSE (elt) == NULL_TREE
5031 ? nbits != 1
5032 : ( ! host_integerp (TREE_VALUE (elt), 0)
5033 || ! host_integerp (TREE_PURPOSE (elt), 0)
5034 || (tree_low_cst (TREE_VALUE (elt), 0)
5035 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5036 != (HOST_WIDE_INT) nbits))))
5037 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5039 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5041 /* Start of range of element or NULL. */
5042 tree startbit = TREE_PURPOSE (elt);
5043 /* End of range of element, or element value. */
5044 tree endbit = TREE_VALUE (elt);
5045 #ifdef TARGET_MEM_FUNCTIONS
5046 HOST_WIDE_INT startb, endb;
5047 #endif
5048 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5050 bitlength_rtx = expand_expr (bitlength,
5051 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5053 /* Handle non-range tuple element like [ expr ]. */
5054 if (startbit == NULL_TREE)
5056 startbit = save_expr (endbit);
5057 endbit = startbit;
5060 startbit = convert (sizetype, startbit);
5061 endbit = convert (sizetype, endbit);
5062 if (! integer_zerop (domain_min))
5064 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5065 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5067 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5068 EXPAND_CONST_ADDRESS);
5069 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5070 EXPAND_CONST_ADDRESS);
5072 if (REG_P (target))
5074 targetx
5075 = assign_temp
5076 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5077 TYPE_QUAL_CONST)),
5078 0, 1, 1);
5079 emit_move_insn (targetx, target);
5082 else if (GET_CODE (target) == MEM)
5083 targetx = target;
5084 else
5085 abort ();
5087 #ifdef TARGET_MEM_FUNCTIONS
5088 /* Optimization: If startbit and endbit are
5089 constants divisible by BITS_PER_UNIT,
5090 call memset instead. */
5091 if (TREE_CODE (startbit) == INTEGER_CST
5092 && TREE_CODE (endbit) == INTEGER_CST
5093 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5094 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5096 emit_library_call (memset_libfunc, LCT_NORMAL,
5097 VOIDmode, 3,
5098 plus_constant (XEXP (targetx, 0),
5099 startb / BITS_PER_UNIT),
5100 Pmode,
5101 constm1_rtx, TYPE_MODE (integer_type_node),
5102 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5103 TYPE_MODE (sizetype));
5105 else
5106 #endif
5107 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5108 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5109 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5110 startbit_rtx, TYPE_MODE (sizetype),
5111 endbit_rtx, TYPE_MODE (sizetype));
5113 if (REG_P (target))
5114 emit_move_insn (target, targetx);
5118 else
5119 abort ();
5122 /* Store the value of EXP (an expression tree)
5123 into a subfield of TARGET which has mode MODE and occupies
5124 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5125 If MODE is VOIDmode, it means that we are storing into a bit-field.
5127 If VALUE_MODE is VOIDmode, return nothing in particular.
5128 UNSIGNEDP is not used in this case.
5130 Otherwise, return an rtx for the value stored. This rtx
5131 has mode VALUE_MODE if that is convenient to do.
5132 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5134 ALIGN is the alignment that TARGET is known to have.
5135 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5137 ALIAS_SET is the alias set for the destination. This value will
5138 (in general) be different from that for TARGET, since TARGET is a
5139 reference to the containing structure. */
5141 static rtx
5142 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5143 unsignedp, align, total_size, alias_set)
5144 rtx target;
5145 HOST_WIDE_INT bitsize;
5146 HOST_WIDE_INT bitpos;
5147 enum machine_mode mode;
5148 tree exp;
5149 enum machine_mode value_mode;
5150 int unsignedp;
5151 unsigned int align;
5152 HOST_WIDE_INT total_size;
5153 int alias_set;
5155 HOST_WIDE_INT width_mask = 0;
5157 if (TREE_CODE (exp) == ERROR_MARK)
5158 return const0_rtx;
5160 /* If we have nothing to store, do nothing unless the expression has
5161 side-effects. */
5162 if (bitsize == 0)
5163 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5165 if (bitsize < HOST_BITS_PER_WIDE_INT)
5166 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5168 /* If we are storing into an unaligned field of an aligned union that is
5169 in a register, we may have the mode of TARGET being an integer mode but
5170 MODE == BLKmode. In that case, get an aligned object whose size and
5171 alignment are the same as TARGET and store TARGET into it (we can avoid
5172 the store if the field being stored is the entire width of TARGET). Then
5173 call ourselves recursively to store the field into a BLKmode version of
5174 that object. Finally, load from the object into TARGET. This is not
5175 very efficient in general, but should only be slightly more expensive
5176 than the otherwise-required unaligned accesses. Perhaps this can be
5177 cleaned up later. */
5179 if (mode == BLKmode
5180 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5182 rtx object
5183 = assign_temp
5184 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5185 TYPE_QUAL_CONST),
5186 0, 1, 1);
5187 rtx blk_object = copy_rtx (object);
5189 PUT_MODE (blk_object, BLKmode);
5191 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5192 emit_move_insn (object, target);
5194 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5195 align, total_size, alias_set);
5197 /* Even though we aren't returning target, we need to
5198 give it the updated value. */
5199 emit_move_insn (target, object);
5201 return blk_object;
5204 if (GET_CODE (target) == CONCAT)
5206 /* We're storing into a struct containing a single __complex. */
5208 if (bitpos != 0)
5209 abort ();
5210 return store_expr (exp, target, 0);
5213 /* If the structure is in a register or if the component
5214 is a bit field, we cannot use addressing to access it.
5215 Use bit-field techniques or SUBREG to store in it. */
5217 if (mode == VOIDmode
5218 || (mode != BLKmode && ! direct_store[(int) mode]
5219 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5220 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5221 || GET_CODE (target) == REG
5222 || GET_CODE (target) == SUBREG
5223 /* If the field isn't aligned enough to store as an ordinary memref,
5224 store it as a bit field. */
5225 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5226 && (align < GET_MODE_ALIGNMENT (mode)
5227 || bitpos % GET_MODE_ALIGNMENT (mode)))
5228 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5229 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5230 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5231 /* If the RHS and field are a constant size and the size of the
5232 RHS isn't the same size as the bitfield, we must use bitfield
5233 operations. */
5234 || (bitsize >= 0
5235 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5236 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5238 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5240 /* If BITSIZE is narrower than the size of the type of EXP
5241 we will be narrowing TEMP. Normally, what's wanted are the
5242 low-order bits. However, if EXP's type is a record and this is
5243 big-endian machine, we want the upper BITSIZE bits. */
5244 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5245 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5246 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5247 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5248 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5249 - bitsize),
5250 temp, 1);
5252 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5253 MODE. */
5254 if (mode != VOIDmode && mode != BLKmode
5255 && mode != TYPE_MODE (TREE_TYPE (exp)))
5256 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5258 /* If the modes of TARGET and TEMP are both BLKmode, both
5259 must be in memory and BITPOS must be aligned on a byte
5260 boundary. If so, we simply do a block copy. */
5261 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5263 unsigned int exp_align = expr_align (exp);
5265 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5266 || bitpos % BITS_PER_UNIT != 0)
5267 abort ();
5269 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5271 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5272 align = MIN (exp_align, align);
5274 /* Find an alignment that is consistent with the bit position. */
5275 while ((bitpos % align) != 0)
5276 align >>= 1;
5278 emit_block_move (target, temp,
5279 bitsize == -1 ? expr_size (exp)
5280 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5281 / BITS_PER_UNIT),
5282 align);
5284 return value_mode == VOIDmode ? const0_rtx : target;
5287 /* Store the value in the bitfield. */
5288 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5289 if (value_mode != VOIDmode)
5291 /* The caller wants an rtx for the value. */
5292 /* If possible, avoid refetching from the bitfield itself. */
5293 if (width_mask != 0
5294 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5296 tree count;
5297 enum machine_mode tmode;
5299 if (unsignedp)
5300 return expand_and (temp,
5301 GEN_INT
5302 (trunc_int_for_mode
5303 (width_mask,
5304 GET_MODE (temp) == VOIDmode
5305 ? value_mode
5306 : GET_MODE (temp))), NULL_RTX);
5307 tmode = GET_MODE (temp);
5308 if (tmode == VOIDmode)
5309 tmode = value_mode;
5310 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5311 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5312 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5314 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5315 NULL_RTX, value_mode, 0, align,
5316 total_size);
5318 return const0_rtx;
5320 else
5322 rtx addr = XEXP (target, 0);
5323 rtx to_rtx;
5325 /* If a value is wanted, it must be the lhs;
5326 so make the address stable for multiple use. */
5328 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5329 && ! CONSTANT_ADDRESS_P (addr)
5330 /* A frame-pointer reference is already stable. */
5331 && ! (GET_CODE (addr) == PLUS
5332 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5333 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5334 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5335 target = replace_equiv_address (target, copy_to_reg (addr));
5337 /* Now build a reference to just the desired component. */
5339 to_rtx = copy_rtx (adjust_address (target, mode,
5340 bitpos / BITS_PER_UNIT));
5342 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5343 /* If the address of the structure varies, then it might be on
5344 the stack. And, stack slots may be shared across scopes.
5345 So, two different structures, of different types, can end up
5346 at the same location. We will give the structures alias set
5347 zero; here we must be careful not to give non-zero alias sets
5348 to their fields. */
5349 set_mem_alias_set (to_rtx,
5350 rtx_varies_p (addr, /*for_alias=*/0)
5351 ? 0 : alias_set);
5353 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5357 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5358 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5359 codes and find the ultimate containing object, which we return.
5361 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5362 bit position, and *PUNSIGNEDP to the signedness of the field.
5363 If the position of the field is variable, we store a tree
5364 giving the variable offset (in units) in *POFFSET.
5365 This offset is in addition to the bit position.
5366 If the position is not variable, we store 0 in *POFFSET.
5367 We set *PALIGNMENT to the alignment of the address that will be
5368 computed. This is the alignment of the thing we return if *POFFSET
5369 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5371 If any of the extraction expressions is volatile,
5372 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5374 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5375 is a mode that can be used to access the field. In that case, *PBITSIZE
5376 is redundant.
5378 If the field describes a variable-sized object, *PMODE is set to
5379 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5380 this case, but the address of the object can be found. */
5382 tree
5383 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5384 punsignedp, pvolatilep, palignment)
5385 tree exp;
5386 HOST_WIDE_INT *pbitsize;
5387 HOST_WIDE_INT *pbitpos;
5388 tree *poffset;
5389 enum machine_mode *pmode;
5390 int *punsignedp;
5391 int *pvolatilep;
5392 unsigned int *palignment;
5394 tree size_tree = 0;
5395 enum machine_mode mode = VOIDmode;
5396 tree offset = size_zero_node;
5397 tree bit_offset = bitsize_zero_node;
5398 unsigned int alignment = BIGGEST_ALIGNMENT;
5399 tree tem;
5401 /* First get the mode, signedness, and size. We do this from just the
5402 outermost expression. */
5403 if (TREE_CODE (exp) == COMPONENT_REF)
5405 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5406 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5407 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5409 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5411 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5413 size_tree = TREE_OPERAND (exp, 1);
5414 *punsignedp = TREE_UNSIGNED (exp);
5416 else
5418 mode = TYPE_MODE (TREE_TYPE (exp));
5419 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5421 if (mode == BLKmode)
5422 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5423 else
5424 *pbitsize = GET_MODE_BITSIZE (mode);
5427 if (size_tree != 0)
5429 if (! host_integerp (size_tree, 1))
5430 mode = BLKmode, *pbitsize = -1;
5431 else
5432 *pbitsize = tree_low_cst (size_tree, 1);
5435 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5436 and find the ultimate containing object. */
5437 while (1)
5439 if (TREE_CODE (exp) == BIT_FIELD_REF)
5440 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5441 else if (TREE_CODE (exp) == COMPONENT_REF)
5443 tree field = TREE_OPERAND (exp, 1);
5444 tree this_offset = DECL_FIELD_OFFSET (field);
5446 /* If this field hasn't been filled in yet, don't go
5447 past it. This should only happen when folding expressions
5448 made during type construction. */
5449 if (this_offset == 0)
5450 break;
5451 else if (! TREE_CONSTANT (this_offset)
5452 && contains_placeholder_p (this_offset))
5453 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5455 offset = size_binop (PLUS_EXPR, offset, this_offset);
5456 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5457 DECL_FIELD_BIT_OFFSET (field));
5459 if (! host_integerp (offset, 0))
5460 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5463 else if (TREE_CODE (exp) == ARRAY_REF
5464 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5466 tree index = TREE_OPERAND (exp, 1);
5467 tree array = TREE_OPERAND (exp, 0);
5468 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5469 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5470 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5472 /* We assume all arrays have sizes that are a multiple of a byte.
5473 First subtract the lower bound, if any, in the type of the
5474 index, then convert to sizetype and multiply by the size of the
5475 array element. */
5476 if (low_bound != 0 && ! integer_zerop (low_bound))
5477 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5478 index, low_bound));
5480 /* If the index has a self-referential type, pass it to a
5481 WITH_RECORD_EXPR; if the component size is, pass our
5482 component to one. */
5483 if (! TREE_CONSTANT (index)
5484 && contains_placeholder_p (index))
5485 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5486 if (! TREE_CONSTANT (unit_size)
5487 && contains_placeholder_p (unit_size))
5488 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5490 offset = size_binop (PLUS_EXPR, offset,
5491 size_binop (MULT_EXPR,
5492 convert (sizetype, index),
5493 unit_size));
5496 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5497 && ! ((TREE_CODE (exp) == NOP_EXPR
5498 || TREE_CODE (exp) == CONVERT_EXPR)
5499 && (TYPE_MODE (TREE_TYPE (exp))
5500 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5501 break;
5503 /* If any reference in the chain is volatile, the effect is volatile. */
5504 if (TREE_THIS_VOLATILE (exp))
5505 *pvolatilep = 1;
5507 /* If the offset is non-constant already, then we can't assume any
5508 alignment more than the alignment here. */
5509 if (! TREE_CONSTANT (offset))
5510 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5512 exp = TREE_OPERAND (exp, 0);
5515 if (DECL_P (exp))
5516 alignment = MIN (alignment, DECL_ALIGN (exp));
5517 else if (TREE_TYPE (exp) != 0)
5518 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5520 /* If OFFSET is constant, see if we can return the whole thing as a
5521 constant bit position. Otherwise, split it up. */
5522 if (host_integerp (offset, 0)
5523 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5524 bitsize_unit_node))
5525 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5526 && host_integerp (tem, 0))
5527 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5528 else
5529 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5531 *pmode = mode;
5532 *palignment = alignment;
5533 return exp;
5536 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5538 static enum memory_use_mode
5539 get_memory_usage_from_modifier (modifier)
5540 enum expand_modifier modifier;
5542 switch (modifier)
5544 case EXPAND_NORMAL:
5545 case EXPAND_SUM:
5546 return MEMORY_USE_RO;
5547 break;
5548 case EXPAND_MEMORY_USE_WO:
5549 return MEMORY_USE_WO;
5550 break;
5551 case EXPAND_MEMORY_USE_RW:
5552 return MEMORY_USE_RW;
5553 break;
5554 case EXPAND_MEMORY_USE_DONT:
5555 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5556 MEMORY_USE_DONT, because they are modifiers to a call of
5557 expand_expr in the ADDR_EXPR case of expand_expr. */
5558 case EXPAND_CONST_ADDRESS:
5559 case EXPAND_INITIALIZER:
5560 return MEMORY_USE_DONT;
5561 case EXPAND_MEMORY_USE_BAD:
5562 default:
5563 abort ();
5567 /* Given an rtx VALUE that may contain additions and multiplications, return
5568 an equivalent value that just refers to a register, memory, or constant.
5569 This is done by generating instructions to perform the arithmetic and
5570 returning a pseudo-register containing the value.
5572 The returned value may be a REG, SUBREG, MEM or constant. */
5575 force_operand (value, target)
5576 rtx value, target;
5578 register optab binoptab = 0;
5579 /* Use a temporary to force order of execution of calls to
5580 `force_operand'. */
5581 rtx tmp;
5582 register rtx op2;
5583 /* Use subtarget as the target for operand 0 of a binary operation. */
5584 register rtx subtarget = get_subtarget (target);
5586 /* Check for a PIC address load. */
5587 if (flag_pic
5588 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5589 && XEXP (value, 0) == pic_offset_table_rtx
5590 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5591 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5592 || GET_CODE (XEXP (value, 1)) == CONST))
5594 if (!subtarget)
5595 subtarget = gen_reg_rtx (GET_MODE (value));
5596 emit_move_insn (subtarget, value);
5597 return subtarget;
5600 if (GET_CODE (value) == PLUS)
5601 binoptab = add_optab;
5602 else if (GET_CODE (value) == MINUS)
5603 binoptab = sub_optab;
5604 else if (GET_CODE (value) == MULT)
5606 op2 = XEXP (value, 1);
5607 if (!CONSTANT_P (op2)
5608 && !(GET_CODE (op2) == REG && op2 != subtarget))
5609 subtarget = 0;
5610 tmp = force_operand (XEXP (value, 0), subtarget);
5611 return expand_mult (GET_MODE (value), tmp,
5612 force_operand (op2, NULL_RTX),
5613 target, 1);
5616 if (binoptab)
5618 op2 = XEXP (value, 1);
5619 if (!CONSTANT_P (op2)
5620 && !(GET_CODE (op2) == REG && op2 != subtarget))
5621 subtarget = 0;
5622 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5624 binoptab = add_optab;
5625 op2 = negate_rtx (GET_MODE (value), op2);
5628 /* Check for an addition with OP2 a constant integer and our first
5629 operand a PLUS of a virtual register and something else. In that
5630 case, we want to emit the sum of the virtual register and the
5631 constant first and then add the other value. This allows virtual
5632 register instantiation to simply modify the constant rather than
5633 creating another one around this addition. */
5634 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5635 && GET_CODE (XEXP (value, 0)) == PLUS
5636 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5637 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5638 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5640 rtx temp = expand_binop (GET_MODE (value), binoptab,
5641 XEXP (XEXP (value, 0), 0), op2,
5642 subtarget, 0, OPTAB_LIB_WIDEN);
5643 return expand_binop (GET_MODE (value), binoptab, temp,
5644 force_operand (XEXP (XEXP (value, 0), 1), 0),
5645 target, 0, OPTAB_LIB_WIDEN);
5648 tmp = force_operand (XEXP (value, 0), subtarget);
5649 return expand_binop (GET_MODE (value), binoptab, tmp,
5650 force_operand (op2, NULL_RTX),
5651 target, 0, OPTAB_LIB_WIDEN);
5652 /* We give UNSIGNEDP = 0 to expand_binop
5653 because the only operations we are expanding here are signed ones. */
5655 return value;
5658 /* Subroutine of expand_expr:
5659 save the non-copied parts (LIST) of an expr (LHS), and return a list
5660 which can restore these values to their previous values,
5661 should something modify their storage. */
5663 static tree
5664 save_noncopied_parts (lhs, list)
5665 tree lhs;
5666 tree list;
5668 tree tail;
5669 tree parts = 0;
5671 for (tail = list; tail; tail = TREE_CHAIN (tail))
5672 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5673 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5674 else
5676 tree part = TREE_VALUE (tail);
5677 tree part_type = TREE_TYPE (part);
5678 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5679 rtx target
5680 = assign_temp (build_qualified_type (part_type,
5681 (TYPE_QUALS (part_type)
5682 | TYPE_QUAL_CONST)),
5683 0, 1, 1);
5685 parts = tree_cons (to_be_saved,
5686 build (RTL_EXPR, part_type, NULL_TREE,
5687 (tree) validize_mem (target)),
5688 parts);
5689 store_expr (TREE_PURPOSE (parts),
5690 RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5692 return parts;
5695 /* Subroutine of expand_expr:
5696 record the non-copied parts (LIST) of an expr (LHS), and return a list
5697 which specifies the initial values of these parts. */
5699 static tree
5700 init_noncopied_parts (lhs, list)
5701 tree lhs;
5702 tree list;
5704 tree tail;
5705 tree parts = 0;
5707 for (tail = list; tail; tail = TREE_CHAIN (tail))
5708 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5709 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5710 else if (TREE_PURPOSE (tail))
5712 tree part = TREE_VALUE (tail);
5713 tree part_type = TREE_TYPE (part);
5714 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5715 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5717 return parts;
5720 /* Subroutine of expand_expr: return nonzero iff there is no way that
5721 EXP can reference X, which is being modified. TOP_P is nonzero if this
5722 call is going to be used to determine whether we need a temporary
5723 for EXP, as opposed to a recursive call to this function.
5725 It is always safe for this routine to return zero since it merely
5726 searches for optimization opportunities. */
5729 safe_from_p (x, exp, top_p)
5730 rtx x;
5731 tree exp;
5732 int top_p;
5734 rtx exp_rtl = 0;
5735 int i, nops;
5736 static tree save_expr_list;
5738 if (x == 0
5739 /* If EXP has varying size, we MUST use a target since we currently
5740 have no way of allocating temporaries of variable size
5741 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5742 So we assume here that something at a higher level has prevented a
5743 clash. This is somewhat bogus, but the best we can do. Only
5744 do this when X is BLKmode and when we are at the top level. */
5745 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5746 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5747 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5748 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5749 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5750 != INTEGER_CST)
5751 && GET_MODE (x) == BLKmode)
5752 /* If X is in the outgoing argument area, it is always safe. */
5753 || (GET_CODE (x) == MEM
5754 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5755 || (GET_CODE (XEXP (x, 0)) == PLUS
5756 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5757 return 1;
5759 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5760 find the underlying pseudo. */
5761 if (GET_CODE (x) == SUBREG)
5763 x = SUBREG_REG (x);
5764 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5765 return 0;
5768 /* A SAVE_EXPR might appear many times in the expression passed to the
5769 top-level safe_from_p call, and if it has a complex subexpression,
5770 examining it multiple times could result in a combinatorial explosion.
5771 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5772 with optimization took about 28 minutes to compile -- even though it was
5773 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5774 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5775 we have processed. Note that the only test of top_p was above. */
5777 if (top_p)
5779 int rtn;
5780 tree t;
5782 save_expr_list = 0;
5784 rtn = safe_from_p (x, exp, 0);
5786 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5787 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5789 return rtn;
5792 /* Now look at our tree code and possibly recurse. */
5793 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5795 case 'd':
5796 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5797 break;
5799 case 'c':
5800 return 1;
5802 case 'x':
5803 if (TREE_CODE (exp) == TREE_LIST)
5804 return ((TREE_VALUE (exp) == 0
5805 || safe_from_p (x, TREE_VALUE (exp), 0))
5806 && (TREE_CHAIN (exp) == 0
5807 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5808 else if (TREE_CODE (exp) == ERROR_MARK)
5809 return 1; /* An already-visited SAVE_EXPR? */
5810 else
5811 return 0;
5813 case '1':
5814 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5816 case '2':
5817 case '<':
5818 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5819 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5821 case 'e':
5822 case 'r':
5823 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5824 the expression. If it is set, we conflict iff we are that rtx or
5825 both are in memory. Otherwise, we check all operands of the
5826 expression recursively. */
5828 switch (TREE_CODE (exp))
5830 case ADDR_EXPR:
5831 return (staticp (TREE_OPERAND (exp, 0))
5832 || TREE_STATIC (exp)
5833 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5835 case INDIRECT_REF:
5836 if (GET_CODE (x) == MEM
5837 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5838 get_alias_set (exp)))
5839 return 0;
5840 break;
5842 case CALL_EXPR:
5843 /* Assume that the call will clobber all hard registers and
5844 all of memory. */
5845 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5846 || GET_CODE (x) == MEM)
5847 return 0;
5848 break;
5850 case RTL_EXPR:
5851 /* If a sequence exists, we would have to scan every instruction
5852 in the sequence to see if it was safe. This is probably not
5853 worthwhile. */
5854 if (RTL_EXPR_SEQUENCE (exp))
5855 return 0;
5857 exp_rtl = RTL_EXPR_RTL (exp);
5858 break;
5860 case WITH_CLEANUP_EXPR:
5861 exp_rtl = RTL_EXPR_RTL (exp);
5862 break;
5864 case CLEANUP_POINT_EXPR:
5865 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5867 case SAVE_EXPR:
5868 exp_rtl = SAVE_EXPR_RTL (exp);
5869 if (exp_rtl)
5870 break;
5872 /* If we've already scanned this, don't do it again. Otherwise,
5873 show we've scanned it and record for clearing the flag if we're
5874 going on. */
5875 if (TREE_PRIVATE (exp))
5876 return 1;
5878 TREE_PRIVATE (exp) = 1;
5879 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5881 TREE_PRIVATE (exp) = 0;
5882 return 0;
5885 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5886 return 1;
5888 case BIND_EXPR:
5889 /* The only operand we look at is operand 1. The rest aren't
5890 part of the expression. */
5891 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5893 case METHOD_CALL_EXPR:
5894 /* This takes a rtx argument, but shouldn't appear here. */
5895 abort ();
5897 default:
5898 break;
5901 /* If we have an rtx, we do not need to scan our operands. */
5902 if (exp_rtl)
5903 break;
5905 nops = first_rtl_op (TREE_CODE (exp));
5906 for (i = 0; i < nops; i++)
5907 if (TREE_OPERAND (exp, i) != 0
5908 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5909 return 0;
5911 /* If this is a language-specific tree code, it may require
5912 special handling. */
5913 if ((unsigned int) TREE_CODE (exp)
5914 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5915 && lang_safe_from_p
5916 && !(*lang_safe_from_p) (x, exp))
5917 return 0;
5920 /* If we have an rtl, find any enclosed object. Then see if we conflict
5921 with it. */
5922 if (exp_rtl)
5924 if (GET_CODE (exp_rtl) == SUBREG)
5926 exp_rtl = SUBREG_REG (exp_rtl);
5927 if (GET_CODE (exp_rtl) == REG
5928 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5929 return 0;
5932 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5933 are memory and they conflict. */
5934 return ! (rtx_equal_p (x, exp_rtl)
5935 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5936 && true_dependence (exp_rtl, GET_MODE (x), x,
5937 rtx_addr_varies_p)));
5940 /* If we reach here, it is safe. */
5941 return 1;
5944 /* Subroutine of expand_expr: return nonzero iff EXP is an
5945 expression whose type is statically determinable. */
5947 static int
5948 fixed_type_p (exp)
5949 tree exp;
5951 if (TREE_CODE (exp) == PARM_DECL
5952 || TREE_CODE (exp) == VAR_DECL
5953 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5954 || TREE_CODE (exp) == COMPONENT_REF
5955 || TREE_CODE (exp) == ARRAY_REF)
5956 return 1;
5957 return 0;
5960 /* Subroutine of expand_expr: return rtx if EXP is a
5961 variable or parameter; else return 0. */
5963 static rtx
5964 var_rtx (exp)
5965 tree exp;
5967 STRIP_NOPS (exp);
5968 switch (TREE_CODE (exp))
5970 case PARM_DECL:
5971 case VAR_DECL:
5972 return DECL_RTL (exp);
5973 default:
5974 return 0;
5978 #ifdef MAX_INTEGER_COMPUTATION_MODE
5980 void
5981 check_max_integer_computation_mode (exp)
5982 tree exp;
5984 enum tree_code code;
5985 enum machine_mode mode;
5987 /* Strip any NOPs that don't change the mode. */
5988 STRIP_NOPS (exp);
5989 code = TREE_CODE (exp);
5991 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5992 if (code == NOP_EXPR
5993 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5994 return;
5996 /* First check the type of the overall operation. We need only look at
5997 unary, binary and relational operations. */
5998 if (TREE_CODE_CLASS (code) == '1'
5999 || TREE_CODE_CLASS (code) == '2'
6000 || TREE_CODE_CLASS (code) == '<')
6002 mode = TYPE_MODE (TREE_TYPE (exp));
6003 if (GET_MODE_CLASS (mode) == MODE_INT
6004 && mode > MAX_INTEGER_COMPUTATION_MODE)
6005 internal_error ("unsupported wide integer operation");
6008 /* Check operand of a unary op. */
6009 if (TREE_CODE_CLASS (code) == '1')
6011 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6012 if (GET_MODE_CLASS (mode) == MODE_INT
6013 && mode > MAX_INTEGER_COMPUTATION_MODE)
6014 internal_error ("unsupported wide integer operation");
6017 /* Check operands of a binary/comparison op. */
6018 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6020 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6021 if (GET_MODE_CLASS (mode) == MODE_INT
6022 && mode > MAX_INTEGER_COMPUTATION_MODE)
6023 internal_error ("unsupported wide integer operation");
6025 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6026 if (GET_MODE_CLASS (mode) == MODE_INT
6027 && mode > MAX_INTEGER_COMPUTATION_MODE)
6028 internal_error ("unsupported wide integer operation");
6031 #endif
6033 /* expand_expr: generate code for computing expression EXP.
6034 An rtx for the computed value is returned. The value is never null.
6035 In the case of a void EXP, const0_rtx is returned.
6037 The value may be stored in TARGET if TARGET is nonzero.
6038 TARGET is just a suggestion; callers must assume that
6039 the rtx returned may not be the same as TARGET.
6041 If TARGET is CONST0_RTX, it means that the value will be ignored.
6043 If TMODE is not VOIDmode, it suggests generating the
6044 result in mode TMODE. But this is done only when convenient.
6045 Otherwise, TMODE is ignored and the value generated in its natural mode.
6046 TMODE is just a suggestion; callers must assume that
6047 the rtx returned may not have mode TMODE.
6049 Note that TARGET may have neither TMODE nor MODE. In that case, it
6050 probably will not be used.
6052 If MODIFIER is EXPAND_SUM then when EXP is an addition
6053 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6054 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6055 products as above, or REG or MEM, or constant.
6056 Ordinarily in such cases we would output mul or add instructions
6057 and then return a pseudo reg containing the sum.
6059 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6060 it also marks a label as absolutely required (it can't be dead).
6061 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6062 This is used for outputting expressions used in initializers.
6064 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6065 with a constant address even if that address is not normally legitimate.
6066 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6069 expand_expr (exp, target, tmode, modifier)
6070 register tree exp;
6071 rtx target;
6072 enum machine_mode tmode;
6073 enum expand_modifier modifier;
6075 register rtx op0, op1, temp;
6076 tree type = TREE_TYPE (exp);
6077 int unsignedp = TREE_UNSIGNED (type);
6078 register enum machine_mode mode;
6079 register enum tree_code code = TREE_CODE (exp);
6080 optab this_optab;
6081 rtx subtarget, original_target;
6082 int ignore;
6083 tree context;
6084 /* Used by check-memory-usage to make modifier read only. */
6085 enum expand_modifier ro_modifier;
6087 /* Handle ERROR_MARK before anybody tries to access its type. */
6088 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6090 op0 = CONST0_RTX (tmode);
6091 if (op0 != 0)
6092 return op0;
6093 return const0_rtx;
6096 mode = TYPE_MODE (type);
6097 /* Use subtarget as the target for operand 0 of a binary operation. */
6098 subtarget = get_subtarget (target);
6099 original_target = target;
6100 ignore = (target == const0_rtx
6101 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6102 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6103 || code == COND_EXPR)
6104 && TREE_CODE (type) == VOID_TYPE));
6106 /* Make a read-only version of the modifier. */
6107 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6108 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6109 ro_modifier = modifier;
6110 else
6111 ro_modifier = EXPAND_NORMAL;
6113 /* If we are going to ignore this result, we need only do something
6114 if there is a side-effect somewhere in the expression. If there
6115 is, short-circuit the most common cases here. Note that we must
6116 not call expand_expr with anything but const0_rtx in case this
6117 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6119 if (ignore)
6121 if (! TREE_SIDE_EFFECTS (exp))
6122 return const0_rtx;
6124 /* Ensure we reference a volatile object even if value is ignored, but
6125 don't do this if all we are doing is taking its address. */
6126 if (TREE_THIS_VOLATILE (exp)
6127 && TREE_CODE (exp) != FUNCTION_DECL
6128 && mode != VOIDmode && mode != BLKmode
6129 && modifier != EXPAND_CONST_ADDRESS)
6131 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6132 if (GET_CODE (temp) == MEM)
6133 temp = copy_to_reg (temp);
6134 return const0_rtx;
6137 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6138 || code == INDIRECT_REF || code == BUFFER_REF)
6139 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6140 VOIDmode, ro_modifier);
6141 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6142 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6144 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6145 ro_modifier);
6146 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6147 ro_modifier);
6148 return const0_rtx;
6150 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6151 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6152 /* If the second operand has no side effects, just evaluate
6153 the first. */
6154 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6155 VOIDmode, ro_modifier);
6156 else if (code == BIT_FIELD_REF)
6158 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6159 ro_modifier);
6160 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6161 ro_modifier);
6162 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6163 ro_modifier);
6164 return const0_rtx;
6167 target = 0;
6170 #ifdef MAX_INTEGER_COMPUTATION_MODE
6171 /* Only check stuff here if the mode we want is different from the mode
6172 of the expression; if it's the same, check_max_integer_computiation_mode
6173 will handle it. Do we really need to check this stuff at all? */
6175 if (target
6176 && GET_MODE (target) != mode
6177 && TREE_CODE (exp) != INTEGER_CST
6178 && TREE_CODE (exp) != PARM_DECL
6179 && TREE_CODE (exp) != ARRAY_REF
6180 && TREE_CODE (exp) != ARRAY_RANGE_REF
6181 && TREE_CODE (exp) != COMPONENT_REF
6182 && TREE_CODE (exp) != BIT_FIELD_REF
6183 && TREE_CODE (exp) != INDIRECT_REF
6184 && TREE_CODE (exp) != CALL_EXPR
6185 && TREE_CODE (exp) != VAR_DECL
6186 && TREE_CODE (exp) != RTL_EXPR)
6188 enum machine_mode mode = GET_MODE (target);
6190 if (GET_MODE_CLASS (mode) == MODE_INT
6191 && mode > MAX_INTEGER_COMPUTATION_MODE)
6192 internal_error ("unsupported wide integer operation");
6195 if (tmode != mode
6196 && TREE_CODE (exp) != INTEGER_CST
6197 && TREE_CODE (exp) != PARM_DECL
6198 && TREE_CODE (exp) != ARRAY_REF
6199 && TREE_CODE (exp) != ARRAY_RANGE_REF
6200 && TREE_CODE (exp) != COMPONENT_REF
6201 && TREE_CODE (exp) != BIT_FIELD_REF
6202 && TREE_CODE (exp) != INDIRECT_REF
6203 && TREE_CODE (exp) != VAR_DECL
6204 && TREE_CODE (exp) != CALL_EXPR
6205 && TREE_CODE (exp) != RTL_EXPR
6206 && GET_MODE_CLASS (tmode) == MODE_INT
6207 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6208 internal_error ("unsupported wide integer operation");
6210 check_max_integer_computation_mode (exp);
6211 #endif
6213 /* If will do cse, generate all results into pseudo registers
6214 since 1) that allows cse to find more things
6215 and 2) otherwise cse could produce an insn the machine
6216 cannot support. */
6218 if (! cse_not_expected && mode != BLKmode && target
6219 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6220 target = subtarget;
6222 switch (code)
6224 case LABEL_DECL:
6226 tree function = decl_function_context (exp);
6227 /* Handle using a label in a containing function. */
6228 if (function != current_function_decl
6229 && function != inline_function_decl && function != 0)
6231 struct function *p = find_function_data (function);
6232 p->expr->x_forced_labels
6233 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6234 p->expr->x_forced_labels);
6236 else
6238 if (modifier == EXPAND_INITIALIZER)
6239 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6240 label_rtx (exp),
6241 forced_labels);
6244 temp = gen_rtx_MEM (FUNCTION_MODE,
6245 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6246 if (function != current_function_decl
6247 && function != inline_function_decl && function != 0)
6248 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6249 return temp;
6252 case PARM_DECL:
6253 if (DECL_RTL (exp) == 0)
6255 error_with_decl (exp, "prior parameter's size depends on `%s'");
6256 return CONST0_RTX (mode);
6259 /* ... fall through ... */
6261 case VAR_DECL:
6262 /* If a static var's type was incomplete when the decl was written,
6263 but the type is complete now, lay out the decl now. */
6264 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6265 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6267 layout_decl (exp, 0);
6268 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6271 /* Although static-storage variables start off initialized, according to
6272 ANSI C, a memcpy could overwrite them with uninitialized values. So
6273 we check them too. This also lets us check for read-only variables
6274 accessed via a non-const declaration, in case it won't be detected
6275 any other way (e.g., in an embedded system or OS kernel without
6276 memory protection).
6278 Aggregates are not checked here; they're handled elsewhere. */
6279 if (cfun && current_function_check_memory_usage
6280 && code == VAR_DECL
6281 && GET_CODE (DECL_RTL (exp)) == MEM
6282 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6284 enum memory_use_mode memory_usage;
6285 memory_usage = get_memory_usage_from_modifier (modifier);
6287 in_check_memory_usage = 1;
6288 if (memory_usage != MEMORY_USE_DONT)
6289 emit_library_call (chkr_check_addr_libfunc,
6290 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6291 XEXP (DECL_RTL (exp), 0), Pmode,
6292 GEN_INT (int_size_in_bytes (type)),
6293 TYPE_MODE (sizetype),
6294 GEN_INT (memory_usage),
6295 TYPE_MODE (integer_type_node));
6296 in_check_memory_usage = 0;
6299 /* ... fall through ... */
6301 case FUNCTION_DECL:
6302 case RESULT_DECL:
6303 if (DECL_RTL (exp) == 0)
6304 abort ();
6306 /* Ensure variable marked as used even if it doesn't go through
6307 a parser. If it hasn't be used yet, write out an external
6308 definition. */
6309 if (! TREE_USED (exp))
6311 assemble_external (exp);
6312 TREE_USED (exp) = 1;
6315 /* Show we haven't gotten RTL for this yet. */
6316 temp = 0;
6318 /* Handle variables inherited from containing functions. */
6319 context = decl_function_context (exp);
6321 /* We treat inline_function_decl as an alias for the current function
6322 because that is the inline function whose vars, types, etc.
6323 are being merged into the current function.
6324 See expand_inline_function. */
6326 if (context != 0 && context != current_function_decl
6327 && context != inline_function_decl
6328 /* If var is static, we don't need a static chain to access it. */
6329 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6330 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6332 rtx addr;
6334 /* Mark as non-local and addressable. */
6335 DECL_NONLOCAL (exp) = 1;
6336 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6337 abort ();
6338 mark_addressable (exp);
6339 if (GET_CODE (DECL_RTL (exp)) != MEM)
6340 abort ();
6341 addr = XEXP (DECL_RTL (exp), 0);
6342 if (GET_CODE (addr) == MEM)
6343 addr
6344 = replace_equiv_address (addr,
6345 fix_lexical_addr (XEXP (addr, 0), exp));
6346 else
6347 addr = fix_lexical_addr (addr, exp);
6349 temp = replace_equiv_address (DECL_RTL (exp), addr);
6352 /* This is the case of an array whose size is to be determined
6353 from its initializer, while the initializer is still being parsed.
6354 See expand_decl. */
6356 else if (GET_CODE (DECL_RTL (exp)) == MEM
6357 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6358 temp = validize_mem (DECL_RTL (exp));
6360 /* If DECL_RTL is memory, we are in the normal case and either
6361 the address is not valid or it is not a register and -fforce-addr
6362 is specified, get the address into a register. */
6364 else if (GET_CODE (DECL_RTL (exp)) == MEM
6365 && modifier != EXPAND_CONST_ADDRESS
6366 && modifier != EXPAND_SUM
6367 && modifier != EXPAND_INITIALIZER
6368 && (! memory_address_p (DECL_MODE (exp),
6369 XEXP (DECL_RTL (exp), 0))
6370 || (flag_force_addr
6371 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6372 temp = replace_equiv_address (DECL_RTL (exp),
6373 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6375 /* If we got something, return it. But first, set the alignment
6376 if the address is a register. */
6377 if (temp != 0)
6379 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6380 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6382 return temp;
6385 /* If the mode of DECL_RTL does not match that of the decl, it
6386 must be a promoted value. We return a SUBREG of the wanted mode,
6387 but mark it so that we know that it was already extended. */
6389 if (GET_CODE (DECL_RTL (exp)) == REG
6390 && GET_MODE (DECL_RTL (exp)) != mode)
6392 /* Get the signedness used for this variable. Ensure we get the
6393 same mode we got when the variable was declared. */
6394 if (GET_MODE (DECL_RTL (exp))
6395 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6396 abort ();
6398 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6399 SUBREG_PROMOTED_VAR_P (temp) = 1;
6400 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6401 return temp;
6404 return DECL_RTL (exp);
6406 case INTEGER_CST:
6407 return immed_double_const (TREE_INT_CST_LOW (exp),
6408 TREE_INT_CST_HIGH (exp), mode);
6410 case CONST_DECL:
6411 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6412 EXPAND_MEMORY_USE_BAD);
6414 case REAL_CST:
6415 /* If optimized, generate immediate CONST_DOUBLE
6416 which will be turned into memory by reload if necessary.
6418 We used to force a register so that loop.c could see it. But
6419 this does not allow gen_* patterns to perform optimizations with
6420 the constants. It also produces two insns in cases like "x = 1.0;".
6421 On most machines, floating-point constants are not permitted in
6422 many insns, so we'd end up copying it to a register in any case.
6424 Now, we do the copying in expand_binop, if appropriate. */
6425 return immed_real_const (exp);
6427 case COMPLEX_CST:
6428 case STRING_CST:
6429 if (! TREE_CST_RTL (exp))
6430 output_constant_def (exp, 1);
6432 /* TREE_CST_RTL probably contains a constant address.
6433 On RISC machines where a constant address isn't valid,
6434 make some insns to get that address into a register. */
6435 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6436 && modifier != EXPAND_CONST_ADDRESS
6437 && modifier != EXPAND_INITIALIZER
6438 && modifier != EXPAND_SUM
6439 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6440 || (flag_force_addr
6441 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6442 return replace_equiv_address (TREE_CST_RTL (exp),
6443 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6444 return TREE_CST_RTL (exp);
6446 case EXPR_WITH_FILE_LOCATION:
6448 rtx to_return;
6449 const char *saved_input_filename = input_filename;
6450 int saved_lineno = lineno;
6451 input_filename = EXPR_WFL_FILENAME (exp);
6452 lineno = EXPR_WFL_LINENO (exp);
6453 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6454 emit_line_note (input_filename, lineno);
6455 /* Possibly avoid switching back and force here. */
6456 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6457 input_filename = saved_input_filename;
6458 lineno = saved_lineno;
6459 return to_return;
6462 case SAVE_EXPR:
6463 context = decl_function_context (exp);
6465 /* If this SAVE_EXPR was at global context, assume we are an
6466 initialization function and move it into our context. */
6467 if (context == 0)
6468 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6470 /* We treat inline_function_decl as an alias for the current function
6471 because that is the inline function whose vars, types, etc.
6472 are being merged into the current function.
6473 See expand_inline_function. */
6474 if (context == current_function_decl || context == inline_function_decl)
6475 context = 0;
6477 /* If this is non-local, handle it. */
6478 if (context)
6480 /* The following call just exists to abort if the context is
6481 not of a containing function. */
6482 find_function_data (context);
6484 temp = SAVE_EXPR_RTL (exp);
6485 if (temp && GET_CODE (temp) == REG)
6487 put_var_into_stack (exp);
6488 temp = SAVE_EXPR_RTL (exp);
6490 if (temp == 0 || GET_CODE (temp) != MEM)
6491 abort ();
6492 return
6493 replace_equiv_address (temp,
6494 fix_lexical_addr (XEXP (temp, 0), exp));
6496 if (SAVE_EXPR_RTL (exp) == 0)
6498 if (mode == VOIDmode)
6499 temp = const0_rtx;
6500 else
6501 temp = assign_temp (build_qualified_type (type,
6502 (TYPE_QUALS (type)
6503 | TYPE_QUAL_CONST)),
6504 3, 0, 0);
6506 SAVE_EXPR_RTL (exp) = temp;
6507 if (!optimize && GET_CODE (temp) == REG)
6508 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6509 save_expr_regs);
6511 /* If the mode of TEMP does not match that of the expression, it
6512 must be a promoted value. We pass store_expr a SUBREG of the
6513 wanted mode but mark it so that we know that it was already
6514 extended. Note that `unsignedp' was modified above in
6515 this case. */
6517 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6519 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6520 SUBREG_PROMOTED_VAR_P (temp) = 1;
6521 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6524 if (temp == const0_rtx)
6525 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6526 EXPAND_MEMORY_USE_BAD);
6527 else
6528 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6530 TREE_USED (exp) = 1;
6533 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6534 must be a promoted value. We return a SUBREG of the wanted mode,
6535 but mark it so that we know that it was already extended. */
6537 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6538 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6540 /* Compute the signedness and make the proper SUBREG. */
6541 promote_mode (type, mode, &unsignedp, 0);
6542 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6543 SUBREG_PROMOTED_VAR_P (temp) = 1;
6544 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6545 return temp;
6548 return SAVE_EXPR_RTL (exp);
6550 case UNSAVE_EXPR:
6552 rtx temp;
6553 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6554 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6555 return temp;
6558 case PLACEHOLDER_EXPR:
6560 tree placeholder_expr;
6562 /* If there is an object on the head of the placeholder list,
6563 see if some object in it of type TYPE or a pointer to it. For
6564 further information, see tree.def. */
6565 for (placeholder_expr = placeholder_list;
6566 placeholder_expr != 0;
6567 placeholder_expr = TREE_CHAIN (placeholder_expr))
6569 tree need_type = TYPE_MAIN_VARIANT (type);
6570 tree object = 0;
6571 tree old_list = placeholder_list;
6572 tree elt;
6574 /* Find the outermost reference that is of the type we want.
6575 If none, see if any object has a type that is a pointer to
6576 the type we want. */
6577 for (elt = TREE_PURPOSE (placeholder_expr);
6578 elt != 0 && object == 0;
6580 = ((TREE_CODE (elt) == COMPOUND_EXPR
6581 || TREE_CODE (elt) == COND_EXPR)
6582 ? TREE_OPERAND (elt, 1)
6583 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6584 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6585 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6586 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6587 ? TREE_OPERAND (elt, 0) : 0))
6588 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6589 object = elt;
6591 for (elt = TREE_PURPOSE (placeholder_expr);
6592 elt != 0 && object == 0;
6594 = ((TREE_CODE (elt) == COMPOUND_EXPR
6595 || TREE_CODE (elt) == COND_EXPR)
6596 ? TREE_OPERAND (elt, 1)
6597 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6598 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6599 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6600 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6601 ? TREE_OPERAND (elt, 0) : 0))
6602 if (POINTER_TYPE_P (TREE_TYPE (elt))
6603 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6604 == need_type))
6605 object = build1 (INDIRECT_REF, need_type, elt);
6607 if (object != 0)
6609 /* Expand this object skipping the list entries before
6610 it was found in case it is also a PLACEHOLDER_EXPR.
6611 In that case, we want to translate it using subsequent
6612 entries. */
6613 placeholder_list = TREE_CHAIN (placeholder_expr);
6614 temp = expand_expr (object, original_target, tmode,
6615 ro_modifier);
6616 placeholder_list = old_list;
6617 return temp;
6622 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6623 abort ();
6625 case WITH_RECORD_EXPR:
6626 /* Put the object on the placeholder list, expand our first operand,
6627 and pop the list. */
6628 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6629 placeholder_list);
6630 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6631 tmode, ro_modifier);
6632 placeholder_list = TREE_CHAIN (placeholder_list);
6633 return target;
6635 case GOTO_EXPR:
6636 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6637 expand_goto (TREE_OPERAND (exp, 0));
6638 else
6639 expand_computed_goto (TREE_OPERAND (exp, 0));
6640 return const0_rtx;
6642 case EXIT_EXPR:
6643 expand_exit_loop_if_false (NULL,
6644 invert_truthvalue (TREE_OPERAND (exp, 0)));
6645 return const0_rtx;
6647 case LABELED_BLOCK_EXPR:
6648 if (LABELED_BLOCK_BODY (exp))
6649 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6650 /* Should perhaps use expand_label, but this is simpler and safer. */
6651 do_pending_stack_adjust ();
6652 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6653 return const0_rtx;
6655 case EXIT_BLOCK_EXPR:
6656 if (EXIT_BLOCK_RETURN (exp))
6657 sorry ("returned value in block_exit_expr");
6658 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6659 return const0_rtx;
6661 case LOOP_EXPR:
6662 push_temp_slots ();
6663 expand_start_loop (1);
6664 expand_expr_stmt (TREE_OPERAND (exp, 0));
6665 expand_end_loop ();
6666 pop_temp_slots ();
6668 return const0_rtx;
6670 case BIND_EXPR:
6672 tree vars = TREE_OPERAND (exp, 0);
6673 int vars_need_expansion = 0;
6675 /* Need to open a binding contour here because
6676 if there are any cleanups they must be contained here. */
6677 expand_start_bindings (2);
6679 /* Mark the corresponding BLOCK for output in its proper place. */
6680 if (TREE_OPERAND (exp, 2) != 0
6681 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6682 insert_block (TREE_OPERAND (exp, 2));
6684 /* If VARS have not yet been expanded, expand them now. */
6685 while (vars)
6687 if (!DECL_RTL_SET_P (vars))
6689 vars_need_expansion = 1;
6690 expand_decl (vars);
6692 expand_decl_init (vars);
6693 vars = TREE_CHAIN (vars);
6696 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6698 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6700 return temp;
6703 case RTL_EXPR:
6704 if (RTL_EXPR_SEQUENCE (exp))
6706 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6707 abort ();
6708 emit_insns (RTL_EXPR_SEQUENCE (exp));
6709 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6711 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6712 free_temps_for_rtl_expr (exp);
6713 return RTL_EXPR_RTL (exp);
6715 case CONSTRUCTOR:
6716 /* If we don't need the result, just ensure we evaluate any
6717 subexpressions. */
6718 if (ignore)
6720 tree elt;
6721 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6722 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6723 EXPAND_MEMORY_USE_BAD);
6724 return const0_rtx;
6727 /* All elts simple constants => refer to a constant in memory. But
6728 if this is a non-BLKmode mode, let it store a field at a time
6729 since that should make a CONST_INT or CONST_DOUBLE when we
6730 fold. Likewise, if we have a target we can use, it is best to
6731 store directly into the target unless the type is large enough
6732 that memcpy will be used. If we are making an initializer and
6733 all operands are constant, put it in memory as well. */
6734 else if ((TREE_STATIC (exp)
6735 && ((mode == BLKmode
6736 && ! (target != 0 && safe_from_p (target, exp, 1)))
6737 || TREE_ADDRESSABLE (exp)
6738 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6739 && (! MOVE_BY_PIECES_P
6740 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6741 TYPE_ALIGN (type)))
6742 && ! mostly_zeros_p (exp))))
6743 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6745 rtx constructor = output_constant_def (exp, 1);
6747 if (modifier != EXPAND_CONST_ADDRESS
6748 && modifier != EXPAND_INITIALIZER
6749 && modifier != EXPAND_SUM)
6750 constructor = validize_mem (constructor);
6752 return constructor;
6754 else
6756 /* Handle calls that pass values in multiple non-contiguous
6757 locations. The Irix 6 ABI has examples of this. */
6758 if (target == 0 || ! safe_from_p (target, exp, 1)
6759 || GET_CODE (target) == PARALLEL)
6760 target
6761 = assign_temp (build_qualified_type (type,
6762 (TYPE_QUALS (type)
6763 | (TREE_READONLY (exp)
6764 * TYPE_QUAL_CONST))),
6765 TREE_ADDRESSABLE (exp), 1, 1);
6767 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6768 int_size_in_bytes (TREE_TYPE (exp)));
6769 return target;
6772 case INDIRECT_REF:
6774 tree exp1 = TREE_OPERAND (exp, 0);
6775 tree index;
6776 tree string = string_constant (exp1, &index);
6778 /* Try to optimize reads from const strings. */
6779 if (string
6780 && TREE_CODE (string) == STRING_CST
6781 && TREE_CODE (index) == INTEGER_CST
6782 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6783 && GET_MODE_CLASS (mode) == MODE_INT
6784 && GET_MODE_SIZE (mode) == 1
6785 && modifier != EXPAND_MEMORY_USE_WO)
6786 return
6787 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6789 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6790 op0 = memory_address (mode, op0);
6792 if (cfun && current_function_check_memory_usage
6793 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6795 enum memory_use_mode memory_usage;
6796 memory_usage = get_memory_usage_from_modifier (modifier);
6798 if (memory_usage != MEMORY_USE_DONT)
6800 in_check_memory_usage = 1;
6801 emit_library_call (chkr_check_addr_libfunc,
6802 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6803 Pmode, GEN_INT (int_size_in_bytes (type)),
6804 TYPE_MODE (sizetype),
6805 GEN_INT (memory_usage),
6806 TYPE_MODE (integer_type_node));
6807 in_check_memory_usage = 0;
6811 temp = gen_rtx_MEM (mode, op0);
6812 set_mem_attributes (temp, exp, 0);
6814 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6815 here, because, in C and C++, the fact that a location is accessed
6816 through a pointer to const does not mean that the value there can
6817 never change. Languages where it can never change should
6818 also set TREE_STATIC. */
6819 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6821 /* If we are writing to this object and its type is a record with
6822 readonly fields, we must mark it as readonly so it will
6823 conflict with readonly references to those fields. */
6824 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6825 RTX_UNCHANGING_P (temp) = 1;
6827 return temp;
6830 case ARRAY_REF:
6831 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6832 abort ();
6835 tree array = TREE_OPERAND (exp, 0);
6836 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6837 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6838 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6839 HOST_WIDE_INT i;
6841 /* Optimize the special-case of a zero lower bound.
6843 We convert the low_bound to sizetype to avoid some problems
6844 with constant folding. (E.g. suppose the lower bound is 1,
6845 and its mode is QI. Without the conversion, (ARRAY
6846 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6847 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6849 if (! integer_zerop (low_bound))
6850 index = size_diffop (index, convert (sizetype, low_bound));
6852 /* Fold an expression like: "foo"[2].
6853 This is not done in fold so it won't happen inside &.
6854 Don't fold if this is for wide characters since it's too
6855 difficult to do correctly and this is a very rare case. */
6857 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6858 && TREE_CODE (array) == STRING_CST
6859 && TREE_CODE (index) == INTEGER_CST
6860 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6861 && GET_MODE_CLASS (mode) == MODE_INT
6862 && GET_MODE_SIZE (mode) == 1)
6863 return
6864 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6866 /* If this is a constant index into a constant array,
6867 just get the value from the array. Handle both the cases when
6868 we have an explicit constructor and when our operand is a variable
6869 that was declared const. */
6871 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6872 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6873 && TREE_CODE (index) == INTEGER_CST
6874 && 0 > compare_tree_int (index,
6875 list_length (CONSTRUCTOR_ELTS
6876 (TREE_OPERAND (exp, 0)))))
6878 tree elem;
6880 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6881 i = TREE_INT_CST_LOW (index);
6882 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6885 if (elem)
6886 return expand_expr (fold (TREE_VALUE (elem)), target,
6887 tmode, ro_modifier);
6890 else if (optimize >= 1
6891 && modifier != EXPAND_CONST_ADDRESS
6892 && modifier != EXPAND_INITIALIZER
6893 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6894 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6895 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6897 if (TREE_CODE (index) == INTEGER_CST)
6899 tree init = DECL_INITIAL (array);
6901 if (TREE_CODE (init) == CONSTRUCTOR)
6903 tree elem;
6905 for (elem = CONSTRUCTOR_ELTS (init);
6906 (elem
6907 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6908 elem = TREE_CHAIN (elem))
6911 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6912 return expand_expr (fold (TREE_VALUE (elem)), target,
6913 tmode, ro_modifier);
6915 else if (TREE_CODE (init) == STRING_CST
6916 && 0 > compare_tree_int (index,
6917 TREE_STRING_LENGTH (init)))
6919 tree type = TREE_TYPE (TREE_TYPE (init));
6920 enum machine_mode mode = TYPE_MODE (type);
6922 if (GET_MODE_CLASS (mode) == MODE_INT
6923 && GET_MODE_SIZE (mode) == 1)
6924 return (GEN_INT
6925 (TREE_STRING_POINTER
6926 (init)[TREE_INT_CST_LOW (index)]));
6931 /* Fall through. */
6933 case COMPONENT_REF:
6934 case BIT_FIELD_REF:
6935 case ARRAY_RANGE_REF:
6936 /* If the operand is a CONSTRUCTOR, we can just extract the
6937 appropriate field if it is present. Don't do this if we have
6938 already written the data since we want to refer to that copy
6939 and varasm.c assumes that's what we'll do. */
6940 if (code == COMPONENT_REF
6941 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6942 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6944 tree elt;
6946 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6947 elt = TREE_CHAIN (elt))
6948 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6949 /* We can normally use the value of the field in the
6950 CONSTRUCTOR. However, if this is a bitfield in
6951 an integral mode that we can fit in a HOST_WIDE_INT,
6952 we must mask only the number of bits in the bitfield,
6953 since this is done implicitly by the constructor. If
6954 the bitfield does not meet either of those conditions,
6955 we can't do this optimization. */
6956 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6957 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6958 == MODE_INT)
6959 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6960 <= HOST_BITS_PER_WIDE_INT))))
6962 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6963 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6965 HOST_WIDE_INT bitsize
6966 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6968 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6970 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6971 op0 = expand_and (op0, op1, target);
6973 else
6975 enum machine_mode imode
6976 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6977 tree count
6978 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6981 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6982 target, 0);
6983 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6984 target, 0);
6988 return op0;
6993 enum machine_mode mode1;
6994 HOST_WIDE_INT bitsize, bitpos;
6995 tree offset;
6996 int volatilep = 0;
6997 unsigned int alignment;
6998 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6999 &mode1, &unsignedp, &volatilep,
7000 &alignment);
7002 /* If we got back the original object, something is wrong. Perhaps
7003 we are evaluating an expression too early. In any event, don't
7004 infinitely recurse. */
7005 if (tem == exp)
7006 abort ();
7008 /* If TEM's type is a union of variable size, pass TARGET to the inner
7009 computation, since it will need a temporary and TARGET is known
7010 to have to do. This occurs in unchecked conversion in Ada. */
7012 op0 = expand_expr (tem,
7013 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7014 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7015 != INTEGER_CST)
7016 ? target : NULL_RTX),
7017 VOIDmode,
7018 (modifier == EXPAND_INITIALIZER
7019 || modifier == EXPAND_CONST_ADDRESS)
7020 ? modifier : EXPAND_NORMAL);
7022 /* If this is a constant, put it into a register if it is a
7023 legitimate constant and OFFSET is 0 and memory if it isn't. */
7024 if (CONSTANT_P (op0))
7026 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7027 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7028 && offset == 0)
7029 op0 = force_reg (mode, op0);
7030 else
7031 op0 = validize_mem (force_const_mem (mode, op0));
7034 if (offset != 0)
7036 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7038 /* If this object is in a register, put it into memory.
7039 This case can't occur in C, but can in Ada if we have
7040 unchecked conversion of an expression from a scalar type to
7041 an array or record type. */
7042 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7043 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7045 /* If the operand is a SAVE_EXPR, we can deal with this by
7046 forcing the SAVE_EXPR into memory. */
7047 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7049 put_var_into_stack (TREE_OPERAND (exp, 0));
7050 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7052 else
7054 tree nt
7055 = build_qualified_type (TREE_TYPE (tem),
7056 (TYPE_QUALS (TREE_TYPE (tem))
7057 | TYPE_QUAL_CONST));
7058 rtx memloc = assign_temp (nt, 1, 1, 1);
7060 mark_temp_addr_taken (memloc);
7061 emit_move_insn (memloc, op0);
7062 op0 = memloc;
7066 if (GET_CODE (op0) != MEM)
7067 abort ();
7069 if (GET_MODE (offset_rtx) != ptr_mode)
7071 #ifdef POINTERS_EXTEND_UNSIGNED
7072 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7073 #else
7074 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7075 #endif
7078 /* A constant address in OP0 can have VOIDmode, we must not try
7079 to call force_reg for that case. Avoid that case. */
7080 if (GET_CODE (op0) == MEM
7081 && GET_MODE (op0) == BLKmode
7082 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7083 && bitsize != 0
7084 && (bitpos % bitsize) == 0
7085 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7086 && alignment == GET_MODE_ALIGNMENT (mode1))
7088 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7090 if (GET_CODE (XEXP (temp, 0)) == REG)
7091 op0 = temp;
7092 else
7093 op0 = (replace_equiv_address
7094 (op0,
7095 force_reg (GET_MODE (XEXP (temp, 0)),
7096 XEXP (temp, 0))));
7097 bitpos = 0;
7100 op0 = change_address (op0, VOIDmode,
7101 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7102 force_reg (ptr_mode,
7103 offset_rtx)));
7106 /* Don't forget about volatility even if this is a bitfield. */
7107 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7109 op0 = copy_rtx (op0);
7110 MEM_VOLATILE_P (op0) = 1;
7113 /* Check the access. */
7114 if (cfun != 0 && current_function_check_memory_usage
7115 && GET_CODE (op0) == MEM)
7117 enum memory_use_mode memory_usage;
7118 memory_usage = get_memory_usage_from_modifier (modifier);
7120 if (memory_usage != MEMORY_USE_DONT)
7122 rtx to;
7123 int size;
7125 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7126 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7128 /* Check the access right of the pointer. */
7129 in_check_memory_usage = 1;
7130 if (size > BITS_PER_UNIT)
7131 emit_library_call (chkr_check_addr_libfunc,
7132 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7133 Pmode, GEN_INT (size / BITS_PER_UNIT),
7134 TYPE_MODE (sizetype),
7135 GEN_INT (memory_usage),
7136 TYPE_MODE (integer_type_node));
7137 in_check_memory_usage = 0;
7141 /* In cases where an aligned union has an unaligned object
7142 as a field, we might be extracting a BLKmode value from
7143 an integer-mode (e.g., SImode) object. Handle this case
7144 by doing the extract into an object as wide as the field
7145 (which we know to be the width of a basic mode), then
7146 storing into memory, and changing the mode to BLKmode. */
7147 if (mode1 == VOIDmode
7148 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7149 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7150 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7151 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7152 && modifier != EXPAND_CONST_ADDRESS
7153 && modifier != EXPAND_INITIALIZER)
7154 /* If the field isn't aligned enough to fetch as a memref,
7155 fetch it as a bit field. */
7156 || (mode1 != BLKmode
7157 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7158 && ((TYPE_ALIGN (TREE_TYPE (tem))
7159 < GET_MODE_ALIGNMENT (mode))
7160 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7161 /* If the type and the field are a constant size and the
7162 size of the type isn't the same size as the bitfield,
7163 we must use bitfield operations. */
7164 || (bitsize >= 0
7165 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7166 == INTEGER_CST)
7167 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7168 bitsize))
7169 || (mode == BLKmode
7170 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7171 && (TYPE_ALIGN (type) > alignment
7172 || bitpos % TYPE_ALIGN (type) != 0)))
7174 enum machine_mode ext_mode = mode;
7176 if (ext_mode == BLKmode
7177 && ! (target != 0 && GET_CODE (op0) == MEM
7178 && GET_CODE (target) == MEM
7179 && bitpos % BITS_PER_UNIT == 0))
7180 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7182 if (ext_mode == BLKmode)
7184 /* In this case, BITPOS must start at a byte boundary and
7185 TARGET, if specified, must be a MEM. */
7186 if (GET_CODE (op0) != MEM
7187 || (target != 0 && GET_CODE (target) != MEM)
7188 || bitpos % BITS_PER_UNIT != 0)
7189 abort ();
7191 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7192 if (target == 0)
7193 target = assign_temp (type, 0, 1, 1);
7195 emit_block_move (target, op0,
7196 bitsize == -1 ? expr_size (exp)
7197 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7198 / BITS_PER_UNIT),
7199 BITS_PER_UNIT);
7201 return target;
7204 op0 = validize_mem (op0);
7206 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7207 mark_reg_pointer (XEXP (op0, 0), alignment);
7209 op0 = extract_bit_field (op0, bitsize, bitpos,
7210 unsignedp, target, ext_mode, ext_mode,
7211 alignment,
7212 int_size_in_bytes (TREE_TYPE (tem)));
7214 /* If the result is a record type and BITSIZE is narrower than
7215 the mode of OP0, an integral mode, and this is a big endian
7216 machine, we must put the field into the high-order bits. */
7217 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7218 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7219 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7220 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7221 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7222 - bitsize),
7223 op0, 1);
7225 if (mode == BLKmode)
7227 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7228 TYPE_QUAL_CONST);
7229 rtx new = assign_temp (nt, 0, 1, 1);
7231 emit_move_insn (new, op0);
7232 op0 = copy_rtx (new);
7233 PUT_MODE (op0, BLKmode);
7236 return op0;
7239 /* If the result is BLKmode, use that to access the object
7240 now as well. */
7241 if (mode == BLKmode)
7242 mode1 = BLKmode;
7244 /* Get a reference to just this component. */
7245 if (modifier == EXPAND_CONST_ADDRESS
7246 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7247 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7248 else
7249 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7251 set_mem_attributes (op0, exp, 0);
7252 if (GET_CODE (XEXP (op0, 0)) == REG)
7253 mark_reg_pointer (XEXP (op0, 0), alignment);
7255 MEM_VOLATILE_P (op0) |= volatilep;
7256 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7257 || modifier == EXPAND_CONST_ADDRESS
7258 || modifier == EXPAND_INITIALIZER)
7259 return op0;
7260 else if (target == 0)
7261 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7263 convert_move (target, op0, unsignedp);
7264 return target;
7267 /* Intended for a reference to a buffer of a file-object in Pascal.
7268 But it's not certain that a special tree code will really be
7269 necessary for these. INDIRECT_REF might work for them. */
7270 case BUFFER_REF:
7271 abort ();
7273 case IN_EXPR:
7275 /* Pascal set IN expression.
7277 Algorithm:
7278 rlo = set_low - (set_low%bits_per_word);
7279 the_word = set [ (index - rlo)/bits_per_word ];
7280 bit_index = index % bits_per_word;
7281 bitmask = 1 << bit_index;
7282 return !!(the_word & bitmask); */
7284 tree set = TREE_OPERAND (exp, 0);
7285 tree index = TREE_OPERAND (exp, 1);
7286 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7287 tree set_type = TREE_TYPE (set);
7288 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7289 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7290 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7291 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7292 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7293 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7294 rtx setaddr = XEXP (setval, 0);
7295 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7296 rtx rlow;
7297 rtx diff, quo, rem, addr, bit, result;
7299 /* If domain is empty, answer is no. Likewise if index is constant
7300 and out of bounds. */
7301 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7302 && TREE_CODE (set_low_bound) == INTEGER_CST
7303 && tree_int_cst_lt (set_high_bound, set_low_bound))
7304 || (TREE_CODE (index) == INTEGER_CST
7305 && TREE_CODE (set_low_bound) == INTEGER_CST
7306 && tree_int_cst_lt (index, set_low_bound))
7307 || (TREE_CODE (set_high_bound) == INTEGER_CST
7308 && TREE_CODE (index) == INTEGER_CST
7309 && tree_int_cst_lt (set_high_bound, index))))
7310 return const0_rtx;
7312 if (target == 0)
7313 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7315 /* If we get here, we have to generate the code for both cases
7316 (in range and out of range). */
7318 op0 = gen_label_rtx ();
7319 op1 = gen_label_rtx ();
7321 if (! (GET_CODE (index_val) == CONST_INT
7322 && GET_CODE (lo_r) == CONST_INT))
7324 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7325 GET_MODE (index_val), iunsignedp, 0, op1);
7328 if (! (GET_CODE (index_val) == CONST_INT
7329 && GET_CODE (hi_r) == CONST_INT))
7331 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7332 GET_MODE (index_val), iunsignedp, 0, op1);
7335 /* Calculate the element number of bit zero in the first word
7336 of the set. */
7337 if (GET_CODE (lo_r) == CONST_INT)
7338 rlow = GEN_INT (INTVAL (lo_r)
7339 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7340 else
7341 rlow = expand_binop (index_mode, and_optab, lo_r,
7342 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7343 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7345 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7346 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7348 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7349 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7350 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7351 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7353 addr = memory_address (byte_mode,
7354 expand_binop (index_mode, add_optab, diff,
7355 setaddr, NULL_RTX, iunsignedp,
7356 OPTAB_LIB_WIDEN));
7358 /* Extract the bit we want to examine. */
7359 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7360 gen_rtx_MEM (byte_mode, addr),
7361 make_tree (TREE_TYPE (index), rem),
7362 NULL_RTX, 1);
7363 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7364 GET_MODE (target) == byte_mode ? target : 0,
7365 1, OPTAB_LIB_WIDEN);
7367 if (result != target)
7368 convert_move (target, result, 1);
7370 /* Output the code to handle the out-of-range case. */
7371 emit_jump (op0);
7372 emit_label (op1);
7373 emit_move_insn (target, const0_rtx);
7374 emit_label (op0);
7375 return target;
7378 case WITH_CLEANUP_EXPR:
7379 if (RTL_EXPR_RTL (exp) == 0)
7381 RTL_EXPR_RTL (exp)
7382 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7383 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7385 /* That's it for this cleanup. */
7386 TREE_OPERAND (exp, 2) = 0;
7388 return RTL_EXPR_RTL (exp);
7390 case CLEANUP_POINT_EXPR:
7392 /* Start a new binding layer that will keep track of all cleanup
7393 actions to be performed. */
7394 expand_start_bindings (2);
7396 target_temp_slot_level = temp_slot_level;
7398 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7399 /* If we're going to use this value, load it up now. */
7400 if (! ignore)
7401 op0 = force_not_mem (op0);
7402 preserve_temp_slots (op0);
7403 expand_end_bindings (NULL_TREE, 0, 0);
7405 return op0;
7407 case CALL_EXPR:
7408 /* Check for a built-in function. */
7409 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7410 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7411 == FUNCTION_DECL)
7412 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7414 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7415 == BUILT_IN_FRONTEND)
7416 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7417 else
7418 return expand_builtin (exp, target, subtarget, tmode, ignore);
7421 return expand_call (exp, target, ignore);
7423 case NON_LVALUE_EXPR:
7424 case NOP_EXPR:
7425 case CONVERT_EXPR:
7426 case REFERENCE_EXPR:
7427 if (TREE_OPERAND (exp, 0) == error_mark_node)
7428 return const0_rtx;
7430 if (TREE_CODE (type) == UNION_TYPE)
7432 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7434 /* If both input and output are BLKmode, this conversion
7435 isn't actually doing anything unless we need to make the
7436 alignment stricter. */
7437 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7438 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7439 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7440 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7441 modifier);
7443 if (target == 0)
7444 target = assign_temp (type, 0, 1, 1);
7446 if (GET_CODE (target) == MEM)
7447 /* Store data into beginning of memory target. */
7448 store_expr (TREE_OPERAND (exp, 0),
7449 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7451 else if (GET_CODE (target) == REG)
7452 /* Store this field into a union of the proper type. */
7453 store_field (target,
7454 MIN ((int_size_in_bytes (TREE_TYPE
7455 (TREE_OPERAND (exp, 0)))
7456 * BITS_PER_UNIT),
7457 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7458 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7459 VOIDmode, 0, BITS_PER_UNIT,
7460 int_size_in_bytes (type), 0);
7461 else
7462 abort ();
7464 /* Return the entire union. */
7465 return target;
7468 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7470 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7471 ro_modifier);
7473 /* If the signedness of the conversion differs and OP0 is
7474 a promoted SUBREG, clear that indication since we now
7475 have to do the proper extension. */
7476 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7477 && GET_CODE (op0) == SUBREG)
7478 SUBREG_PROMOTED_VAR_P (op0) = 0;
7480 return op0;
7483 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7484 if (GET_MODE (op0) == mode)
7485 return op0;
7487 /* If OP0 is a constant, just convert it into the proper mode. */
7488 if (CONSTANT_P (op0))
7489 return
7490 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7491 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7493 if (modifier == EXPAND_INITIALIZER)
7494 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7496 if (target == 0)
7497 return
7498 convert_to_mode (mode, op0,
7499 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7500 else
7501 convert_move (target, op0,
7502 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7503 return target;
7505 case PLUS_EXPR:
7506 /* We come here from MINUS_EXPR when the second operand is a
7507 constant. */
7508 plus_expr:
7509 this_optab = ! unsignedp && flag_trapv
7510 && (GET_MODE_CLASS(mode) == MODE_INT)
7511 ? addv_optab : add_optab;
7513 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7514 something else, make sure we add the register to the constant and
7515 then to the other thing. This case can occur during strength
7516 reduction and doing it this way will produce better code if the
7517 frame pointer or argument pointer is eliminated.
7519 fold-const.c will ensure that the constant is always in the inner
7520 PLUS_EXPR, so the only case we need to do anything about is if
7521 sp, ap, or fp is our second argument, in which case we must swap
7522 the innermost first argument and our second argument. */
7524 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7525 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7526 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7527 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7528 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7529 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7531 tree t = TREE_OPERAND (exp, 1);
7533 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7534 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7537 /* If the result is to be ptr_mode and we are adding an integer to
7538 something, we might be forming a constant. So try to use
7539 plus_constant. If it produces a sum and we can't accept it,
7540 use force_operand. This allows P = &ARR[const] to generate
7541 efficient code on machines where a SYMBOL_REF is not a valid
7542 address.
7544 If this is an EXPAND_SUM call, always return the sum. */
7545 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7546 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7548 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7549 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7550 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7552 rtx constant_part;
7554 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7555 EXPAND_SUM);
7556 /* Use immed_double_const to ensure that the constant is
7557 truncated according to the mode of OP1, then sign extended
7558 to a HOST_WIDE_INT. Using the constant directly can result
7559 in non-canonical RTL in a 64x32 cross compile. */
7560 constant_part
7561 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7562 (HOST_WIDE_INT) 0,
7563 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7564 op1 = plus_constant (op1, INTVAL (constant_part));
7565 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7566 op1 = force_operand (op1, target);
7567 return op1;
7570 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7571 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7572 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7574 rtx constant_part;
7576 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7577 EXPAND_SUM);
7578 if (! CONSTANT_P (op0))
7580 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7581 VOIDmode, modifier);
7582 /* Don't go to both_summands if modifier
7583 says it's not right to return a PLUS. */
7584 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7585 goto binop2;
7586 goto both_summands;
7588 /* Use immed_double_const to ensure that the constant is
7589 truncated according to the mode of OP1, then sign extended
7590 to a HOST_WIDE_INT. Using the constant directly can result
7591 in non-canonical RTL in a 64x32 cross compile. */
7592 constant_part
7593 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7594 (HOST_WIDE_INT) 0,
7595 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7596 op0 = plus_constant (op0, INTVAL (constant_part));
7597 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7598 op0 = force_operand (op0, target);
7599 return op0;
7603 /* No sense saving up arithmetic to be done
7604 if it's all in the wrong mode to form part of an address.
7605 And force_operand won't know whether to sign-extend or
7606 zero-extend. */
7607 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7608 || mode != ptr_mode)
7609 goto binop;
7611 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7612 subtarget = 0;
7614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7615 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7617 both_summands:
7618 /* Make sure any term that's a sum with a constant comes last. */
7619 if (GET_CODE (op0) == PLUS
7620 && CONSTANT_P (XEXP (op0, 1)))
7622 temp = op0;
7623 op0 = op1;
7624 op1 = temp;
7626 /* If adding to a sum including a constant,
7627 associate it to put the constant outside. */
7628 if (GET_CODE (op1) == PLUS
7629 && CONSTANT_P (XEXP (op1, 1)))
7631 rtx constant_term = const0_rtx;
7633 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7634 if (temp != 0)
7635 op0 = temp;
7636 /* Ensure that MULT comes first if there is one. */
7637 else if (GET_CODE (op0) == MULT)
7638 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7639 else
7640 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7642 /* Let's also eliminate constants from op0 if possible. */
7643 op0 = eliminate_constant_term (op0, &constant_term);
7645 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7646 their sum should be a constant. Form it into OP1, since the
7647 result we want will then be OP0 + OP1. */
7649 temp = simplify_binary_operation (PLUS, mode, constant_term,
7650 XEXP (op1, 1));
7651 if (temp != 0)
7652 op1 = temp;
7653 else
7654 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7657 /* Put a constant term last and put a multiplication first. */
7658 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7659 temp = op1, op1 = op0, op0 = temp;
7661 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7662 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7664 case MINUS_EXPR:
7665 /* For initializers, we are allowed to return a MINUS of two
7666 symbolic constants. Here we handle all cases when both operands
7667 are constant. */
7668 /* Handle difference of two symbolic constants,
7669 for the sake of an initializer. */
7670 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7671 && really_constant_p (TREE_OPERAND (exp, 0))
7672 && really_constant_p (TREE_OPERAND (exp, 1)))
7674 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7675 VOIDmode, ro_modifier);
7676 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7677 VOIDmode, ro_modifier);
7679 /* If the last operand is a CONST_INT, use plus_constant of
7680 the negated constant. Else make the MINUS. */
7681 if (GET_CODE (op1) == CONST_INT)
7682 return plus_constant (op0, - INTVAL (op1));
7683 else
7684 return gen_rtx_MINUS (mode, op0, op1);
7686 /* Convert A - const to A + (-const). */
7687 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7689 tree negated = fold (build1 (NEGATE_EXPR, type,
7690 TREE_OPERAND (exp, 1)));
7692 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7693 /* If we can't negate the constant in TYPE, leave it alone and
7694 expand_binop will negate it for us. We used to try to do it
7695 here in the signed version of TYPE, but that doesn't work
7696 on POINTER_TYPEs. */;
7697 else
7699 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7700 goto plus_expr;
7703 this_optab = ! unsignedp && flag_trapv
7704 && (GET_MODE_CLASS(mode) == MODE_INT)
7705 ? subv_optab : sub_optab;
7706 goto binop;
7708 case MULT_EXPR:
7709 /* If first operand is constant, swap them.
7710 Thus the following special case checks need only
7711 check the second operand. */
7712 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7714 register tree t1 = TREE_OPERAND (exp, 0);
7715 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7716 TREE_OPERAND (exp, 1) = t1;
7719 /* Attempt to return something suitable for generating an
7720 indexed address, for machines that support that. */
7722 if (modifier == EXPAND_SUM && mode == ptr_mode
7723 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7724 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7726 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7727 EXPAND_SUM);
7729 /* Apply distributive law if OP0 is x+c. */
7730 if (GET_CODE (op0) == PLUS
7731 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7732 return
7733 gen_rtx_PLUS
7734 (mode,
7735 gen_rtx_MULT
7736 (mode, XEXP (op0, 0),
7737 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7738 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7739 * INTVAL (XEXP (op0, 1))));
7741 if (GET_CODE (op0) != REG)
7742 op0 = force_operand (op0, NULL_RTX);
7743 if (GET_CODE (op0) != REG)
7744 op0 = copy_to_mode_reg (mode, op0);
7746 return
7747 gen_rtx_MULT (mode, op0,
7748 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7751 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7752 subtarget = 0;
7754 /* Check for multiplying things that have been extended
7755 from a narrower type. If this machine supports multiplying
7756 in that narrower type with a result in the desired type,
7757 do it that way, and avoid the explicit type-conversion. */
7758 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7759 && TREE_CODE (type) == INTEGER_TYPE
7760 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7761 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7762 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7763 && int_fits_type_p (TREE_OPERAND (exp, 1),
7764 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7765 /* Don't use a widening multiply if a shift will do. */
7766 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7767 > HOST_BITS_PER_WIDE_INT)
7768 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7770 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7771 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7773 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7774 /* If both operands are extended, they must either both
7775 be zero-extended or both be sign-extended. */
7776 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7778 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7780 enum machine_mode innermode
7781 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7782 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7783 ? smul_widen_optab : umul_widen_optab);
7784 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7785 ? umul_widen_optab : smul_widen_optab);
7786 if (mode == GET_MODE_WIDER_MODE (innermode))
7788 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7790 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7791 NULL_RTX, VOIDmode, 0);
7792 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7793 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7794 VOIDmode, 0);
7795 else
7796 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7797 NULL_RTX, VOIDmode, 0);
7798 goto binop2;
7800 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7801 && innermode == word_mode)
7803 rtx htem;
7804 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7805 NULL_RTX, VOIDmode, 0);
7806 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7807 op1 = convert_modes (innermode, mode,
7808 expand_expr (TREE_OPERAND (exp, 1),
7809 NULL_RTX, VOIDmode, 0),
7810 unsignedp);
7811 else
7812 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7813 NULL_RTX, VOIDmode, 0);
7814 temp = expand_binop (mode, other_optab, op0, op1, target,
7815 unsignedp, OPTAB_LIB_WIDEN);
7816 htem = expand_mult_highpart_adjust (innermode,
7817 gen_highpart (innermode, temp),
7818 op0, op1,
7819 gen_highpart (innermode, temp),
7820 unsignedp);
7821 emit_move_insn (gen_highpart (innermode, temp), htem);
7822 return temp;
7826 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7827 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7828 return expand_mult (mode, op0, op1, target, unsignedp);
7830 case TRUNC_DIV_EXPR:
7831 case FLOOR_DIV_EXPR:
7832 case CEIL_DIV_EXPR:
7833 case ROUND_DIV_EXPR:
7834 case EXACT_DIV_EXPR:
7835 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7836 subtarget = 0;
7837 /* Possible optimization: compute the dividend with EXPAND_SUM
7838 then if the divisor is constant can optimize the case
7839 where some terms of the dividend have coeffs divisible by it. */
7840 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7841 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7842 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7844 case RDIV_EXPR:
7845 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7846 expensive divide. If not, combine will rebuild the original
7847 computation. */
7848 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7849 && !real_onep (TREE_OPERAND (exp, 0)))
7850 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7851 build (RDIV_EXPR, type,
7852 build_real (type, dconst1),
7853 TREE_OPERAND (exp, 1))),
7854 target, tmode, unsignedp);
7855 this_optab = flodiv_optab;
7856 goto binop;
7858 case TRUNC_MOD_EXPR:
7859 case FLOOR_MOD_EXPR:
7860 case CEIL_MOD_EXPR:
7861 case ROUND_MOD_EXPR:
7862 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7863 subtarget = 0;
7864 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7865 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7866 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7868 case FIX_ROUND_EXPR:
7869 case FIX_FLOOR_EXPR:
7870 case FIX_CEIL_EXPR:
7871 abort (); /* Not used for C. */
7873 case FIX_TRUNC_EXPR:
7874 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7875 if (target == 0)
7876 target = gen_reg_rtx (mode);
7877 expand_fix (target, op0, unsignedp);
7878 return target;
7880 case FLOAT_EXPR:
7881 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7882 if (target == 0)
7883 target = gen_reg_rtx (mode);
7884 /* expand_float can't figure out what to do if FROM has VOIDmode.
7885 So give it the correct mode. With -O, cse will optimize this. */
7886 if (GET_MODE (op0) == VOIDmode)
7887 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7888 op0);
7889 expand_float (target, op0,
7890 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7891 return target;
7893 case NEGATE_EXPR:
7894 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7895 temp = expand_unop (mode,
7896 ! unsignedp && flag_trapv
7897 && (GET_MODE_CLASS(mode) == MODE_INT)
7898 ? negv_optab : neg_optab, op0, target, 0);
7899 if (temp == 0)
7900 abort ();
7901 return temp;
7903 case ABS_EXPR:
7904 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7906 /* Handle complex values specially. */
7907 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7908 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7909 return expand_complex_abs (mode, op0, target, unsignedp);
7911 /* Unsigned abs is simply the operand. Testing here means we don't
7912 risk generating incorrect code below. */
7913 if (TREE_UNSIGNED (type))
7914 return op0;
7916 return expand_abs (mode, op0, target, unsignedp,
7917 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7919 case MAX_EXPR:
7920 case MIN_EXPR:
7921 target = original_target;
7922 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7923 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7924 || GET_MODE (target) != mode
7925 || (GET_CODE (target) == REG
7926 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7927 target = gen_reg_rtx (mode);
7928 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7929 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7931 /* First try to do it with a special MIN or MAX instruction.
7932 If that does not win, use a conditional jump to select the proper
7933 value. */
7934 this_optab = (TREE_UNSIGNED (type)
7935 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7936 : (code == MIN_EXPR ? smin_optab : smax_optab));
7938 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7939 OPTAB_WIDEN);
7940 if (temp != 0)
7941 return temp;
7943 /* At this point, a MEM target is no longer useful; we will get better
7944 code without it. */
7946 if (GET_CODE (target) == MEM)
7947 target = gen_reg_rtx (mode);
7949 if (target != op0)
7950 emit_move_insn (target, op0);
7952 op0 = gen_label_rtx ();
7954 /* If this mode is an integer too wide to compare properly,
7955 compare word by word. Rely on cse to optimize constant cases. */
7956 if (GET_MODE_CLASS (mode) == MODE_INT
7957 && ! can_compare_p (GE, mode, ccp_jump))
7959 if (code == MAX_EXPR)
7960 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7961 target, op1, NULL_RTX, op0);
7962 else
7963 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7964 op1, target, NULL_RTX, op0);
7966 else
7968 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7969 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7970 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7971 op0);
7973 emit_move_insn (target, op1);
7974 emit_label (op0);
7975 return target;
7977 case BIT_NOT_EXPR:
7978 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7979 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7980 if (temp == 0)
7981 abort ();
7982 return temp;
7984 case FFS_EXPR:
7985 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7986 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7987 if (temp == 0)
7988 abort ();
7989 return temp;
7991 /* ??? Can optimize bitwise operations with one arg constant.
7992 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7993 and (a bitwise1 b) bitwise2 b (etc)
7994 but that is probably not worth while. */
7996 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7997 boolean values when we want in all cases to compute both of them. In
7998 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7999 as actual zero-or-1 values and then bitwise anding. In cases where
8000 there cannot be any side effects, better code would be made by
8001 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8002 how to recognize those cases. */
8004 case TRUTH_AND_EXPR:
8005 case BIT_AND_EXPR:
8006 this_optab = and_optab;
8007 goto binop;
8009 case TRUTH_OR_EXPR:
8010 case BIT_IOR_EXPR:
8011 this_optab = ior_optab;
8012 goto binop;
8014 case TRUTH_XOR_EXPR:
8015 case BIT_XOR_EXPR:
8016 this_optab = xor_optab;
8017 goto binop;
8019 case LSHIFT_EXPR:
8020 case RSHIFT_EXPR:
8021 case LROTATE_EXPR:
8022 case RROTATE_EXPR:
8023 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8024 subtarget = 0;
8025 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8026 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8027 unsignedp);
8029 /* Could determine the answer when only additive constants differ. Also,
8030 the addition of one can be handled by changing the condition. */
8031 case LT_EXPR:
8032 case LE_EXPR:
8033 case GT_EXPR:
8034 case GE_EXPR:
8035 case EQ_EXPR:
8036 case NE_EXPR:
8037 case UNORDERED_EXPR:
8038 case ORDERED_EXPR:
8039 case UNLT_EXPR:
8040 case UNLE_EXPR:
8041 case UNGT_EXPR:
8042 case UNGE_EXPR:
8043 case UNEQ_EXPR:
8044 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8045 if (temp != 0)
8046 return temp;
8048 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8049 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8050 && original_target
8051 && GET_CODE (original_target) == REG
8052 && (GET_MODE (original_target)
8053 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8055 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8056 VOIDmode, 0);
8058 if (temp != original_target)
8059 temp = copy_to_reg (temp);
8061 op1 = gen_label_rtx ();
8062 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8063 GET_MODE (temp), unsignedp, 0, op1);
8064 emit_move_insn (temp, const1_rtx);
8065 emit_label (op1);
8066 return temp;
8069 /* If no set-flag instruction, must generate a conditional
8070 store into a temporary variable. Drop through
8071 and handle this like && and ||. */
8073 case TRUTH_ANDIF_EXPR:
8074 case TRUTH_ORIF_EXPR:
8075 if (! ignore
8076 && (target == 0 || ! safe_from_p (target, exp, 1)
8077 /* Make sure we don't have a hard reg (such as function's return
8078 value) live across basic blocks, if not optimizing. */
8079 || (!optimize && GET_CODE (target) == REG
8080 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8081 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8083 if (target)
8084 emit_clr_insn (target);
8086 op1 = gen_label_rtx ();
8087 jumpifnot (exp, op1);
8089 if (target)
8090 emit_0_to_1_insn (target);
8092 emit_label (op1);
8093 return ignore ? const0_rtx : target;
8095 case TRUTH_NOT_EXPR:
8096 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8097 /* The parser is careful to generate TRUTH_NOT_EXPR
8098 only with operands that are always zero or one. */
8099 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8100 target, 1, OPTAB_LIB_WIDEN);
8101 if (temp == 0)
8102 abort ();
8103 return temp;
8105 case COMPOUND_EXPR:
8106 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8107 emit_queue ();
8108 return expand_expr (TREE_OPERAND (exp, 1),
8109 (ignore ? const0_rtx : target),
8110 VOIDmode, 0);
8112 case COND_EXPR:
8113 /* If we would have a "singleton" (see below) were it not for a
8114 conversion in each arm, bring that conversion back out. */
8115 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8116 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8117 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8118 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8120 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8121 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8123 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8124 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8125 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8126 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8127 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8128 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8129 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8130 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8131 return expand_expr (build1 (NOP_EXPR, type,
8132 build (COND_EXPR, TREE_TYPE (iftrue),
8133 TREE_OPERAND (exp, 0),
8134 iftrue, iffalse)),
8135 target, tmode, modifier);
8139 /* Note that COND_EXPRs whose type is a structure or union
8140 are required to be constructed to contain assignments of
8141 a temporary variable, so that we can evaluate them here
8142 for side effect only. If type is void, we must do likewise. */
8144 /* If an arm of the branch requires a cleanup,
8145 only that cleanup is performed. */
8147 tree singleton = 0;
8148 tree binary_op = 0, unary_op = 0;
8150 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8151 convert it to our mode, if necessary. */
8152 if (integer_onep (TREE_OPERAND (exp, 1))
8153 && integer_zerop (TREE_OPERAND (exp, 2))
8154 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8156 if (ignore)
8158 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8159 ro_modifier);
8160 return const0_rtx;
8163 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8164 if (GET_MODE (op0) == mode)
8165 return op0;
8167 if (target == 0)
8168 target = gen_reg_rtx (mode);
8169 convert_move (target, op0, unsignedp);
8170 return target;
8173 /* Check for X ? A + B : A. If we have this, we can copy A to the
8174 output and conditionally add B. Similarly for unary operations.
8175 Don't do this if X has side-effects because those side effects
8176 might affect A or B and the "?" operation is a sequence point in
8177 ANSI. (operand_equal_p tests for side effects.) */
8179 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8180 && operand_equal_p (TREE_OPERAND (exp, 2),
8181 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8182 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8183 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8184 && operand_equal_p (TREE_OPERAND (exp, 1),
8185 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8186 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8187 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8188 && operand_equal_p (TREE_OPERAND (exp, 2),
8189 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8190 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8191 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8192 && operand_equal_p (TREE_OPERAND (exp, 1),
8193 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8194 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8196 /* If we are not to produce a result, we have no target. Otherwise,
8197 if a target was specified use it; it will not be used as an
8198 intermediate target unless it is safe. If no target, use a
8199 temporary. */
8201 if (ignore)
8202 temp = 0;
8203 else if (original_target
8204 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8205 || (singleton && GET_CODE (original_target) == REG
8206 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8207 && original_target == var_rtx (singleton)))
8208 && GET_MODE (original_target) == mode
8209 #ifdef HAVE_conditional_move
8210 && (! can_conditionally_move_p (mode)
8211 || GET_CODE (original_target) == REG
8212 || TREE_ADDRESSABLE (type))
8213 #endif
8214 && ! (GET_CODE (original_target) == MEM
8215 && MEM_VOLATILE_P (original_target)))
8216 temp = original_target;
8217 else if (TREE_ADDRESSABLE (type))
8218 abort ();
8219 else
8220 temp = assign_temp (type, 0, 0, 1);
8222 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8223 do the test of X as a store-flag operation, do this as
8224 A + ((X != 0) << log C). Similarly for other simple binary
8225 operators. Only do for C == 1 if BRANCH_COST is low. */
8226 if (temp && singleton && binary_op
8227 && (TREE_CODE (binary_op) == PLUS_EXPR
8228 || TREE_CODE (binary_op) == MINUS_EXPR
8229 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8230 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8231 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8232 : integer_onep (TREE_OPERAND (binary_op, 1)))
8233 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8235 rtx result;
8236 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8237 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8238 ? addv_optab : add_optab)
8239 : TREE_CODE (binary_op) == MINUS_EXPR
8240 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8241 ? subv_optab : sub_optab)
8242 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8243 : xor_optab);
8245 /* If we had X ? A : A + 1, do this as A + (X == 0).
8247 We have to invert the truth value here and then put it
8248 back later if do_store_flag fails. We cannot simply copy
8249 TREE_OPERAND (exp, 0) to another variable and modify that
8250 because invert_truthvalue can modify the tree pointed to
8251 by its argument. */
8252 if (singleton == TREE_OPERAND (exp, 1))
8253 TREE_OPERAND (exp, 0)
8254 = invert_truthvalue (TREE_OPERAND (exp, 0));
8256 result = do_store_flag (TREE_OPERAND (exp, 0),
8257 (safe_from_p (temp, singleton, 1)
8258 ? temp : NULL_RTX),
8259 mode, BRANCH_COST <= 1);
8261 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8262 result = expand_shift (LSHIFT_EXPR, mode, result,
8263 build_int_2 (tree_log2
8264 (TREE_OPERAND
8265 (binary_op, 1)),
8267 (safe_from_p (temp, singleton, 1)
8268 ? temp : NULL_RTX), 0);
8270 if (result)
8272 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8273 return expand_binop (mode, boptab, op1, result, temp,
8274 unsignedp, OPTAB_LIB_WIDEN);
8276 else if (singleton == TREE_OPERAND (exp, 1))
8277 TREE_OPERAND (exp, 0)
8278 = invert_truthvalue (TREE_OPERAND (exp, 0));
8281 do_pending_stack_adjust ();
8282 NO_DEFER_POP;
8283 op0 = gen_label_rtx ();
8285 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8287 if (temp != 0)
8289 /* If the target conflicts with the other operand of the
8290 binary op, we can't use it. Also, we can't use the target
8291 if it is a hard register, because evaluating the condition
8292 might clobber it. */
8293 if ((binary_op
8294 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8295 || (GET_CODE (temp) == REG
8296 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8297 temp = gen_reg_rtx (mode);
8298 store_expr (singleton, temp, 0);
8300 else
8301 expand_expr (singleton,
8302 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8303 if (singleton == TREE_OPERAND (exp, 1))
8304 jumpif (TREE_OPERAND (exp, 0), op0);
8305 else
8306 jumpifnot (TREE_OPERAND (exp, 0), op0);
8308 start_cleanup_deferral ();
8309 if (binary_op && temp == 0)
8310 /* Just touch the other operand. */
8311 expand_expr (TREE_OPERAND (binary_op, 1),
8312 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8313 else if (binary_op)
8314 store_expr (build (TREE_CODE (binary_op), type,
8315 make_tree (type, temp),
8316 TREE_OPERAND (binary_op, 1)),
8317 temp, 0);
8318 else
8319 store_expr (build1 (TREE_CODE (unary_op), type,
8320 make_tree (type, temp)),
8321 temp, 0);
8322 op1 = op0;
8324 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8325 comparison operator. If we have one of these cases, set the
8326 output to A, branch on A (cse will merge these two references),
8327 then set the output to FOO. */
8328 else if (temp
8329 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8330 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8331 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8332 TREE_OPERAND (exp, 1), 0)
8333 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8334 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8335 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8337 if (GET_CODE (temp) == REG
8338 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8339 temp = gen_reg_rtx (mode);
8340 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8341 jumpif (TREE_OPERAND (exp, 0), op0);
8343 start_cleanup_deferral ();
8344 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8345 op1 = op0;
8347 else if (temp
8348 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8349 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8350 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8351 TREE_OPERAND (exp, 2), 0)
8352 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8353 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8354 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8356 if (GET_CODE (temp) == REG
8357 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8358 temp = gen_reg_rtx (mode);
8359 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8360 jumpifnot (TREE_OPERAND (exp, 0), op0);
8362 start_cleanup_deferral ();
8363 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8364 op1 = op0;
8366 else
8368 op1 = gen_label_rtx ();
8369 jumpifnot (TREE_OPERAND (exp, 0), op0);
8371 start_cleanup_deferral ();
8373 /* One branch of the cond can be void, if it never returns. For
8374 example A ? throw : E */
8375 if (temp != 0
8376 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8377 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8378 else
8379 expand_expr (TREE_OPERAND (exp, 1),
8380 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8381 end_cleanup_deferral ();
8382 emit_queue ();
8383 emit_jump_insn (gen_jump (op1));
8384 emit_barrier ();
8385 emit_label (op0);
8386 start_cleanup_deferral ();
8387 if (temp != 0
8388 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8389 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8390 else
8391 expand_expr (TREE_OPERAND (exp, 2),
8392 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8395 end_cleanup_deferral ();
8397 emit_queue ();
8398 emit_label (op1);
8399 OK_DEFER_POP;
8401 return temp;
8404 case TARGET_EXPR:
8406 /* Something needs to be initialized, but we didn't know
8407 where that thing was when building the tree. For example,
8408 it could be the return value of a function, or a parameter
8409 to a function which lays down in the stack, or a temporary
8410 variable which must be passed by reference.
8412 We guarantee that the expression will either be constructed
8413 or copied into our original target. */
8415 tree slot = TREE_OPERAND (exp, 0);
8416 tree cleanups = NULL_TREE;
8417 tree exp1;
8419 if (TREE_CODE (slot) != VAR_DECL)
8420 abort ();
8422 if (! ignore)
8423 target = original_target;
8425 /* Set this here so that if we get a target that refers to a
8426 register variable that's already been used, put_reg_into_stack
8427 knows that it should fix up those uses. */
8428 TREE_USED (slot) = 1;
8430 if (target == 0)
8432 if (DECL_RTL_SET_P (slot))
8434 target = DECL_RTL (slot);
8435 /* If we have already expanded the slot, so don't do
8436 it again. (mrs) */
8437 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8438 return target;
8440 else
8442 target = assign_temp (type, 2, 0, 1);
8443 /* All temp slots at this level must not conflict. */
8444 preserve_temp_slots (target);
8445 SET_DECL_RTL (slot, target);
8446 if (TREE_ADDRESSABLE (slot))
8447 put_var_into_stack (slot);
8449 /* Since SLOT is not known to the called function
8450 to belong to its stack frame, we must build an explicit
8451 cleanup. This case occurs when we must build up a reference
8452 to pass the reference as an argument. In this case,
8453 it is very likely that such a reference need not be
8454 built here. */
8456 if (TREE_OPERAND (exp, 2) == 0)
8457 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8458 cleanups = TREE_OPERAND (exp, 2);
8461 else
8463 /* This case does occur, when expanding a parameter which
8464 needs to be constructed on the stack. The target
8465 is the actual stack address that we want to initialize.
8466 The function we call will perform the cleanup in this case. */
8468 /* If we have already assigned it space, use that space,
8469 not target that we were passed in, as our target
8470 parameter is only a hint. */
8471 if (DECL_RTL_SET_P (slot))
8473 target = DECL_RTL (slot);
8474 /* If we have already expanded the slot, so don't do
8475 it again. (mrs) */
8476 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8477 return target;
8479 else
8481 SET_DECL_RTL (slot, target);
8482 /* If we must have an addressable slot, then make sure that
8483 the RTL that we just stored in slot is OK. */
8484 if (TREE_ADDRESSABLE (slot))
8485 put_var_into_stack (slot);
8489 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8490 /* Mark it as expanded. */
8491 TREE_OPERAND (exp, 1) = NULL_TREE;
8493 store_expr (exp1, target, 0);
8495 expand_decl_cleanup (NULL_TREE, cleanups);
8497 return target;
8500 case INIT_EXPR:
8502 tree lhs = TREE_OPERAND (exp, 0);
8503 tree rhs = TREE_OPERAND (exp, 1);
8504 tree noncopied_parts = 0;
8505 tree lhs_type = TREE_TYPE (lhs);
8507 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8508 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8509 noncopied_parts
8510 = init_noncopied_parts (stabilize_reference (lhs),
8511 TYPE_NONCOPIED_PARTS (lhs_type));
8513 while (noncopied_parts != 0)
8515 expand_assignment (TREE_VALUE (noncopied_parts),
8516 TREE_PURPOSE (noncopied_parts), 0, 0);
8517 noncopied_parts = TREE_CHAIN (noncopied_parts);
8519 return temp;
8522 case MODIFY_EXPR:
8524 /* If lhs is complex, expand calls in rhs before computing it.
8525 That's so we don't compute a pointer and save it over a call.
8526 If lhs is simple, compute it first so we can give it as a
8527 target if the rhs is just a call. This avoids an extra temp and copy
8528 and that prevents a partial-subsumption which makes bad code.
8529 Actually we could treat component_ref's of vars like vars. */
8531 tree lhs = TREE_OPERAND (exp, 0);
8532 tree rhs = TREE_OPERAND (exp, 1);
8533 tree noncopied_parts = 0;
8534 tree lhs_type = TREE_TYPE (lhs);
8536 temp = 0;
8538 /* Check for |= or &= of a bitfield of size one into another bitfield
8539 of size 1. In this case, (unless we need the result of the
8540 assignment) we can do this more efficiently with a
8541 test followed by an assignment, if necessary.
8543 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8544 things change so we do, this code should be enhanced to
8545 support it. */
8546 if (ignore
8547 && TREE_CODE (lhs) == COMPONENT_REF
8548 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8549 || TREE_CODE (rhs) == BIT_AND_EXPR)
8550 && TREE_OPERAND (rhs, 0) == lhs
8551 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8552 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8553 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8555 rtx label = gen_label_rtx ();
8557 do_jump (TREE_OPERAND (rhs, 1),
8558 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8559 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8560 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8561 (TREE_CODE (rhs) == BIT_IOR_EXPR
8562 ? integer_one_node
8563 : integer_zero_node)),
8564 0, 0);
8565 do_pending_stack_adjust ();
8566 emit_label (label);
8567 return const0_rtx;
8570 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8571 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8572 noncopied_parts
8573 = save_noncopied_parts (stabilize_reference (lhs),
8574 TYPE_NONCOPIED_PARTS (lhs_type));
8576 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8577 while (noncopied_parts != 0)
8579 expand_assignment (TREE_PURPOSE (noncopied_parts),
8580 TREE_VALUE (noncopied_parts), 0, 0);
8581 noncopied_parts = TREE_CHAIN (noncopied_parts);
8583 return temp;
8586 case RETURN_EXPR:
8587 if (!TREE_OPERAND (exp, 0))
8588 expand_null_return ();
8589 else
8590 expand_return (TREE_OPERAND (exp, 0));
8591 return const0_rtx;
8593 case PREINCREMENT_EXPR:
8594 case PREDECREMENT_EXPR:
8595 return expand_increment (exp, 0, ignore);
8597 case POSTINCREMENT_EXPR:
8598 case POSTDECREMENT_EXPR:
8599 /* Faster to treat as pre-increment if result is not used. */
8600 return expand_increment (exp, ! ignore, ignore);
8602 case ADDR_EXPR:
8603 /* If nonzero, TEMP will be set to the address of something that might
8604 be a MEM corresponding to a stack slot. */
8605 temp = 0;
8607 /* Are we taking the address of a nested function? */
8608 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8609 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8610 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8611 && ! TREE_STATIC (exp))
8613 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8614 op0 = force_operand (op0, target);
8616 /* If we are taking the address of something erroneous, just
8617 return a zero. */
8618 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8619 return const0_rtx;
8620 else
8622 /* We make sure to pass const0_rtx down if we came in with
8623 ignore set, to avoid doing the cleanups twice for something. */
8624 op0 = expand_expr (TREE_OPERAND (exp, 0),
8625 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8626 (modifier == EXPAND_INITIALIZER
8627 ? modifier : EXPAND_CONST_ADDRESS));
8629 /* If we are going to ignore the result, OP0 will have been set
8630 to const0_rtx, so just return it. Don't get confused and
8631 think we are taking the address of the constant. */
8632 if (ignore)
8633 return op0;
8635 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8636 clever and returns a REG when given a MEM. */
8637 op0 = protect_from_queue (op0, 1);
8639 /* We would like the object in memory. If it is a constant, we can
8640 have it be statically allocated into memory. For a non-constant,
8641 we need to allocate some memory and store the value into it. */
8643 if (CONSTANT_P (op0))
8644 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8645 op0);
8646 else if (GET_CODE (op0) == MEM)
8648 mark_temp_addr_taken (op0);
8649 temp = XEXP (op0, 0);
8652 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8653 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8654 || GET_CODE (op0) == PARALLEL)
8656 /* If this object is in a register, it must be not
8657 be BLKmode. */
8658 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8659 tree nt = build_qualified_type (inner_type,
8660 (TYPE_QUALS (inner_type)
8661 | TYPE_QUAL_CONST));
8662 rtx memloc = assign_temp (nt, 1, 1, 1);
8664 mark_temp_addr_taken (memloc);
8665 if (GET_CODE (op0) == PARALLEL)
8666 /* Handle calls that pass values in multiple non-contiguous
8667 locations. The Irix 6 ABI has examples of this. */
8668 emit_group_store (memloc, op0,
8669 int_size_in_bytes (inner_type),
8670 TYPE_ALIGN (inner_type));
8671 else
8672 emit_move_insn (memloc, op0);
8673 op0 = memloc;
8676 if (GET_CODE (op0) != MEM)
8677 abort ();
8679 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8681 temp = XEXP (op0, 0);
8682 #ifdef POINTERS_EXTEND_UNSIGNED
8683 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8684 && mode == ptr_mode)
8685 temp = convert_memory_address (ptr_mode, temp);
8686 #endif
8687 return temp;
8690 op0 = force_operand (XEXP (op0, 0), target);
8693 if (flag_force_addr && GET_CODE (op0) != REG)
8694 op0 = force_reg (Pmode, op0);
8696 if (GET_CODE (op0) == REG
8697 && ! REG_USERVAR_P (op0))
8698 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8700 /* If we might have had a temp slot, add an equivalent address
8701 for it. */
8702 if (temp != 0)
8703 update_temp_slot_address (temp, op0);
8705 #ifdef POINTERS_EXTEND_UNSIGNED
8706 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8707 && mode == ptr_mode)
8708 op0 = convert_memory_address (ptr_mode, op0);
8709 #endif
8711 return op0;
8713 case ENTRY_VALUE_EXPR:
8714 abort ();
8716 /* COMPLEX type for Extended Pascal & Fortran */
8717 case COMPLEX_EXPR:
8719 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8720 rtx insns;
8722 /* Get the rtx code of the operands. */
8723 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8724 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8726 if (! target)
8727 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8729 start_sequence ();
8731 /* Move the real (op0) and imaginary (op1) parts to their location. */
8732 emit_move_insn (gen_realpart (mode, target), op0);
8733 emit_move_insn (gen_imagpart (mode, target), op1);
8735 insns = get_insns ();
8736 end_sequence ();
8738 /* Complex construction should appear as a single unit. */
8739 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8740 each with a separate pseudo as destination.
8741 It's not correct for flow to treat them as a unit. */
8742 if (GET_CODE (target) != CONCAT)
8743 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8744 else
8745 emit_insns (insns);
8747 return target;
8750 case REALPART_EXPR:
8751 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8752 return gen_realpart (mode, op0);
8754 case IMAGPART_EXPR:
8755 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8756 return gen_imagpart (mode, op0);
8758 case CONJ_EXPR:
8760 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8761 rtx imag_t;
8762 rtx insns;
8764 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8766 if (! target)
8767 target = gen_reg_rtx (mode);
8769 start_sequence ();
8771 /* Store the realpart and the negated imagpart to target. */
8772 emit_move_insn (gen_realpart (partmode, target),
8773 gen_realpart (partmode, op0));
8775 imag_t = gen_imagpart (partmode, target);
8776 temp = expand_unop (partmode,
8777 ! unsignedp && flag_trapv
8778 && (GET_MODE_CLASS(partmode) == MODE_INT)
8779 ? negv_optab : neg_optab,
8780 gen_imagpart (partmode, op0), imag_t, 0);
8781 if (temp != imag_t)
8782 emit_move_insn (imag_t, temp);
8784 insns = get_insns ();
8785 end_sequence ();
8787 /* Conjugate should appear as a single unit
8788 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8789 each with a separate pseudo as destination.
8790 It's not correct for flow to treat them as a unit. */
8791 if (GET_CODE (target) != CONCAT)
8792 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8793 else
8794 emit_insns (insns);
8796 return target;
8799 case TRY_CATCH_EXPR:
8801 tree handler = TREE_OPERAND (exp, 1);
8803 expand_eh_region_start ();
8805 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8807 expand_eh_region_end_cleanup (handler);
8809 return op0;
8812 case TRY_FINALLY_EXPR:
8814 tree try_block = TREE_OPERAND (exp, 0);
8815 tree finally_block = TREE_OPERAND (exp, 1);
8816 rtx finally_label = gen_label_rtx ();
8817 rtx done_label = gen_label_rtx ();
8818 rtx return_link = gen_reg_rtx (Pmode);
8819 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8820 (tree) finally_label, (tree) return_link);
8821 TREE_SIDE_EFFECTS (cleanup) = 1;
8823 /* Start a new binding layer that will keep track of all cleanup
8824 actions to be performed. */
8825 expand_start_bindings (2);
8827 target_temp_slot_level = temp_slot_level;
8829 expand_decl_cleanup (NULL_TREE, cleanup);
8830 op0 = expand_expr (try_block, target, tmode, modifier);
8832 preserve_temp_slots (op0);
8833 expand_end_bindings (NULL_TREE, 0, 0);
8834 emit_jump (done_label);
8835 emit_label (finally_label);
8836 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8837 emit_indirect_jump (return_link);
8838 emit_label (done_label);
8839 return op0;
8842 case GOTO_SUBROUTINE_EXPR:
8844 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8845 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8846 rtx return_address = gen_label_rtx ();
8847 emit_move_insn (return_link,
8848 gen_rtx_LABEL_REF (Pmode, return_address));
8849 emit_jump (subr);
8850 emit_label (return_address);
8851 return const0_rtx;
8854 case VA_ARG_EXPR:
8855 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8857 case EXC_PTR_EXPR:
8858 return get_exception_pointer (cfun);
8860 default:
8861 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8864 /* Here to do an ordinary binary operator, generating an instruction
8865 from the optab already placed in `this_optab'. */
8866 binop:
8867 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8868 subtarget = 0;
8869 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8870 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8871 binop2:
8872 temp = expand_binop (mode, this_optab, op0, op1, target,
8873 unsignedp, OPTAB_LIB_WIDEN);
8874 if (temp == 0)
8875 abort ();
8876 return temp;
8879 /* Similar to expand_expr, except that we don't specify a target, target
8880 mode, or modifier and we return the alignment of the inner type. This is
8881 used in cases where it is not necessary to align the result to the
8882 alignment of its type as long as we know the alignment of the result, for
8883 example for comparisons of BLKmode values. */
8885 static rtx
8886 expand_expr_unaligned (exp, palign)
8887 register tree exp;
8888 unsigned int *palign;
8890 register rtx op0;
8891 tree type = TREE_TYPE (exp);
8892 register enum machine_mode mode = TYPE_MODE (type);
8894 /* Default the alignment we return to that of the type. */
8895 *palign = TYPE_ALIGN (type);
8897 /* The only cases in which we do anything special is if the resulting mode
8898 is BLKmode. */
8899 if (mode != BLKmode)
8900 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8902 switch (TREE_CODE (exp))
8904 case CONVERT_EXPR:
8905 case NOP_EXPR:
8906 case NON_LVALUE_EXPR:
8907 /* Conversions between BLKmode values don't change the underlying
8908 alignment or value. */
8909 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8910 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8911 break;
8913 case ARRAY_REF:
8914 /* Much of the code for this case is copied directly from expand_expr.
8915 We need to duplicate it here because we will do something different
8916 in the fall-through case, so we need to handle the same exceptions
8917 it does. */
8919 tree array = TREE_OPERAND (exp, 0);
8920 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8921 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8922 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8923 HOST_WIDE_INT i;
8925 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8926 abort ();
8928 /* Optimize the special-case of a zero lower bound.
8930 We convert the low_bound to sizetype to avoid some problems
8931 with constant folding. (E.g. suppose the lower bound is 1,
8932 and its mode is QI. Without the conversion, (ARRAY
8933 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8934 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8936 if (! integer_zerop (low_bound))
8937 index = size_diffop (index, convert (sizetype, low_bound));
8939 /* If this is a constant index into a constant array,
8940 just get the value from the array. Handle both the cases when
8941 we have an explicit constructor and when our operand is a variable
8942 that was declared const. */
8944 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8945 && host_integerp (index, 0)
8946 && 0 > compare_tree_int (index,
8947 list_length (CONSTRUCTOR_ELTS
8948 (TREE_OPERAND (exp, 0)))))
8950 tree elem;
8952 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8953 i = tree_low_cst (index, 0);
8954 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8957 if (elem)
8958 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8961 else if (optimize >= 1
8962 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8963 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8964 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8966 if (TREE_CODE (index) == INTEGER_CST)
8968 tree init = DECL_INITIAL (array);
8970 if (TREE_CODE (init) == CONSTRUCTOR)
8972 tree elem;
8974 for (elem = CONSTRUCTOR_ELTS (init);
8975 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8976 elem = TREE_CHAIN (elem))
8979 if (elem)
8980 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8981 palign);
8986 /* Fall through. */
8988 case COMPONENT_REF:
8989 case BIT_FIELD_REF:
8990 case ARRAY_RANGE_REF:
8991 /* If the operand is a CONSTRUCTOR, we can just extract the
8992 appropriate field if it is present. Don't do this if we have
8993 already written the data since we want to refer to that copy
8994 and varasm.c assumes that's what we'll do. */
8995 if (TREE_CODE (exp) == COMPONENT_REF
8996 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8997 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8999 tree elt;
9001 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9002 elt = TREE_CHAIN (elt))
9003 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9004 /* Note that unlike the case in expand_expr, we know this is
9005 BLKmode and hence not an integer. */
9006 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9010 enum machine_mode mode1;
9011 HOST_WIDE_INT bitsize, bitpos;
9012 tree offset;
9013 int volatilep = 0;
9014 unsigned int alignment;
9015 int unsignedp;
9016 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9017 &mode1, &unsignedp, &volatilep,
9018 &alignment);
9020 /* If we got back the original object, something is wrong. Perhaps
9021 we are evaluating an expression too early. In any event, don't
9022 infinitely recurse. */
9023 if (tem == exp)
9024 abort ();
9026 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9028 /* If this is a constant, put it into a register if it is a
9029 legitimate constant and OFFSET is 0 and memory if it isn't. */
9030 if (CONSTANT_P (op0))
9032 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9034 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9035 && offset == 0)
9036 op0 = force_reg (inner_mode, op0);
9037 else
9038 op0 = validize_mem (force_const_mem (inner_mode, op0));
9041 if (offset != 0)
9043 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9045 /* If this object is in a register, put it into memory.
9046 This case can't occur in C, but can in Ada if we have
9047 unchecked conversion of an expression from a scalar type to
9048 an array or record type. */
9049 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9050 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9052 tree nt = build_qualified_type (TREE_TYPE (tem),
9053 (TYPE_QUALS (TREE_TYPE (tem))
9054 | TYPE_QUAL_CONST));
9055 rtx memloc = assign_temp (nt, 1, 1, 1);
9057 mark_temp_addr_taken (memloc);
9058 emit_move_insn (memloc, op0);
9059 op0 = memloc;
9062 if (GET_CODE (op0) != MEM)
9063 abort ();
9065 if (GET_MODE (offset_rtx) != ptr_mode)
9067 #ifdef POINTERS_EXTEND_UNSIGNED
9068 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9069 #else
9070 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9071 #endif
9074 op0 = change_address (op0, VOIDmode,
9075 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9076 force_reg (ptr_mode,
9077 offset_rtx)));
9080 /* Don't forget about volatility even if this is a bitfield. */
9081 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9083 op0 = copy_rtx (op0);
9084 MEM_VOLATILE_P (op0) = 1;
9087 /* Check the access. */
9088 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9090 rtx to;
9091 int size;
9093 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9094 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9096 /* Check the access right of the pointer. */
9097 in_check_memory_usage = 1;
9098 if (size > BITS_PER_UNIT)
9099 emit_library_call (chkr_check_addr_libfunc,
9100 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9101 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9102 TYPE_MODE (sizetype),
9103 GEN_INT (MEMORY_USE_RO),
9104 TYPE_MODE (integer_type_node));
9105 in_check_memory_usage = 0;
9108 /* In cases where an aligned union has an unaligned object
9109 as a field, we might be extracting a BLKmode value from
9110 an integer-mode (e.g., SImode) object. Handle this case
9111 by doing the extract into an object as wide as the field
9112 (which we know to be the width of a basic mode), then
9113 storing into memory, and changing the mode to BLKmode.
9114 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9115 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9116 if (mode1 == VOIDmode
9117 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9118 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9119 && (TYPE_ALIGN (type) > alignment
9120 || bitpos % TYPE_ALIGN (type) != 0)))
9122 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9124 if (ext_mode == BLKmode)
9126 /* In this case, BITPOS must start at a byte boundary. */
9127 if (GET_CODE (op0) != MEM
9128 || bitpos % BITS_PER_UNIT != 0)
9129 abort ();
9131 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9133 else
9135 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9136 TYPE_QUAL_CONST);
9137 rtx new = assign_temp (nt, 0, 1, 1);
9139 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9140 unsignedp, NULL_RTX, ext_mode,
9141 ext_mode, alignment,
9142 int_size_in_bytes (TREE_TYPE (tem)));
9144 /* If the result is a record type and BITSIZE is narrower than
9145 the mode of OP0, an integral mode, and this is a big endian
9146 machine, we must put the field into the high-order bits. */
9147 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9148 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9149 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9150 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9151 size_int (GET_MODE_BITSIZE
9152 (GET_MODE (op0))
9153 - bitsize),
9154 op0, 1);
9156 emit_move_insn (new, op0);
9157 op0 = copy_rtx (new);
9158 PUT_MODE (op0, BLKmode);
9161 else
9162 /* Get a reference to just this component. */
9163 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9165 set_mem_alias_set (op0, get_alias_set (exp));
9167 /* Adjust the alignment in case the bit position is not
9168 a multiple of the alignment of the inner object. */
9169 while (bitpos % alignment != 0)
9170 alignment >>= 1;
9172 if (GET_CODE (XEXP (op0, 0)) == REG)
9173 mark_reg_pointer (XEXP (op0, 0), alignment);
9175 MEM_IN_STRUCT_P (op0) = 1;
9176 MEM_VOLATILE_P (op0) |= volatilep;
9178 *palign = alignment;
9179 return op0;
9182 default:
9183 break;
9187 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9190 /* Return the tree node if a ARG corresponds to a string constant or zero
9191 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9192 in bytes within the string that ARG is accessing. The type of the
9193 offset will be `sizetype'. */
9195 tree
9196 string_constant (arg, ptr_offset)
9197 tree arg;
9198 tree *ptr_offset;
9200 STRIP_NOPS (arg);
9202 if (TREE_CODE (arg) == ADDR_EXPR
9203 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9205 *ptr_offset = size_zero_node;
9206 return TREE_OPERAND (arg, 0);
9208 else if (TREE_CODE (arg) == PLUS_EXPR)
9210 tree arg0 = TREE_OPERAND (arg, 0);
9211 tree arg1 = TREE_OPERAND (arg, 1);
9213 STRIP_NOPS (arg0);
9214 STRIP_NOPS (arg1);
9216 if (TREE_CODE (arg0) == ADDR_EXPR
9217 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9219 *ptr_offset = convert (sizetype, arg1);
9220 return TREE_OPERAND (arg0, 0);
9222 else if (TREE_CODE (arg1) == ADDR_EXPR
9223 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9225 *ptr_offset = convert (sizetype, arg0);
9226 return TREE_OPERAND (arg1, 0);
9230 return 0;
9233 /* Expand code for a post- or pre- increment or decrement
9234 and return the RTX for the result.
9235 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9237 static rtx
9238 expand_increment (exp, post, ignore)
9239 register tree exp;
9240 int post, ignore;
9242 register rtx op0, op1;
9243 register rtx temp, value;
9244 register tree incremented = TREE_OPERAND (exp, 0);
9245 optab this_optab = add_optab;
9246 int icode;
9247 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9248 int op0_is_copy = 0;
9249 int single_insn = 0;
9250 /* 1 means we can't store into OP0 directly,
9251 because it is a subreg narrower than a word,
9252 and we don't dare clobber the rest of the word. */
9253 int bad_subreg = 0;
9255 /* Stabilize any component ref that might need to be
9256 evaluated more than once below. */
9257 if (!post
9258 || TREE_CODE (incremented) == BIT_FIELD_REF
9259 || (TREE_CODE (incremented) == COMPONENT_REF
9260 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9261 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9262 incremented = stabilize_reference (incremented);
9263 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9264 ones into save exprs so that they don't accidentally get evaluated
9265 more than once by the code below. */
9266 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9267 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9268 incremented = save_expr (incremented);
9270 /* Compute the operands as RTX.
9271 Note whether OP0 is the actual lvalue or a copy of it:
9272 I believe it is a copy iff it is a register or subreg
9273 and insns were generated in computing it. */
9275 temp = get_last_insn ();
9276 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9278 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9279 in place but instead must do sign- or zero-extension during assignment,
9280 so we copy it into a new register and let the code below use it as
9281 a copy.
9283 Note that we can safely modify this SUBREG since it is know not to be
9284 shared (it was made by the expand_expr call above). */
9286 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9288 if (post)
9289 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9290 else
9291 bad_subreg = 1;
9293 else if (GET_CODE (op0) == SUBREG
9294 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9296 /* We cannot increment this SUBREG in place. If we are
9297 post-incrementing, get a copy of the old value. Otherwise,
9298 just mark that we cannot increment in place. */
9299 if (post)
9300 op0 = copy_to_reg (op0);
9301 else
9302 bad_subreg = 1;
9305 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9306 && temp != get_last_insn ());
9307 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9308 EXPAND_MEMORY_USE_BAD);
9310 /* Decide whether incrementing or decrementing. */
9311 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9312 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9313 this_optab = sub_optab;
9315 /* Convert decrement by a constant into a negative increment. */
9316 if (this_optab == sub_optab
9317 && GET_CODE (op1) == CONST_INT)
9319 op1 = GEN_INT (-INTVAL (op1));
9320 this_optab = add_optab;
9323 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9324 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9326 /* For a preincrement, see if we can do this with a single instruction. */
9327 if (!post)
9329 icode = (int) this_optab->handlers[(int) mode].insn_code;
9330 if (icode != (int) CODE_FOR_nothing
9331 /* Make sure that OP0 is valid for operands 0 and 1
9332 of the insn we want to queue. */
9333 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9334 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9335 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9336 single_insn = 1;
9339 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9340 then we cannot just increment OP0. We must therefore contrive to
9341 increment the original value. Then, for postincrement, we can return
9342 OP0 since it is a copy of the old value. For preincrement, expand here
9343 unless we can do it with a single insn.
9345 Likewise if storing directly into OP0 would clobber high bits
9346 we need to preserve (bad_subreg). */
9347 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9349 /* This is the easiest way to increment the value wherever it is.
9350 Problems with multiple evaluation of INCREMENTED are prevented
9351 because either (1) it is a component_ref or preincrement,
9352 in which case it was stabilized above, or (2) it is an array_ref
9353 with constant index in an array in a register, which is
9354 safe to reevaluate. */
9355 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9356 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9357 ? MINUS_EXPR : PLUS_EXPR),
9358 TREE_TYPE (exp),
9359 incremented,
9360 TREE_OPERAND (exp, 1));
9362 while (TREE_CODE (incremented) == NOP_EXPR
9363 || TREE_CODE (incremented) == CONVERT_EXPR)
9365 newexp = convert (TREE_TYPE (incremented), newexp);
9366 incremented = TREE_OPERAND (incremented, 0);
9369 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9370 return post ? op0 : temp;
9373 if (post)
9375 /* We have a true reference to the value in OP0.
9376 If there is an insn to add or subtract in this mode, queue it.
9377 Queueing the increment insn avoids the register shuffling
9378 that often results if we must increment now and first save
9379 the old value for subsequent use. */
9381 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9382 op0 = stabilize (op0);
9383 #endif
9385 icode = (int) this_optab->handlers[(int) mode].insn_code;
9386 if (icode != (int) CODE_FOR_nothing
9387 /* Make sure that OP0 is valid for operands 0 and 1
9388 of the insn we want to queue. */
9389 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9390 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9392 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9393 op1 = force_reg (mode, op1);
9395 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9397 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9399 rtx addr = (general_operand (XEXP (op0, 0), mode)
9400 ? force_reg (Pmode, XEXP (op0, 0))
9401 : copy_to_reg (XEXP (op0, 0)));
9402 rtx temp, result;
9404 op0 = replace_equiv_address (op0, addr);
9405 temp = force_reg (GET_MODE (op0), op0);
9406 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9407 op1 = force_reg (mode, op1);
9409 /* The increment queue is LIFO, thus we have to `queue'
9410 the instructions in reverse order. */
9411 enqueue_insn (op0, gen_move_insn (op0, temp));
9412 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9413 return result;
9417 /* Preincrement, or we can't increment with one simple insn. */
9418 if (post)
9419 /* Save a copy of the value before inc or dec, to return it later. */
9420 temp = value = copy_to_reg (op0);
9421 else
9422 /* Arrange to return the incremented value. */
9423 /* Copy the rtx because expand_binop will protect from the queue,
9424 and the results of that would be invalid for us to return
9425 if our caller does emit_queue before using our result. */
9426 temp = copy_rtx (value = op0);
9428 /* Increment however we can. */
9429 op1 = expand_binop (mode, this_optab, value, op1,
9430 current_function_check_memory_usage ? NULL_RTX : op0,
9431 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9432 /* Make sure the value is stored into OP0. */
9433 if (op1 != op0)
9434 emit_move_insn (op0, op1);
9436 return temp;
9439 /* At the start of a function, record that we have no previously-pushed
9440 arguments waiting to be popped. */
9442 void
9443 init_pending_stack_adjust ()
9445 pending_stack_adjust = 0;
9448 /* When exiting from function, if safe, clear out any pending stack adjust
9449 so the adjustment won't get done.
9451 Note, if the current function calls alloca, then it must have a
9452 frame pointer regardless of the value of flag_omit_frame_pointer. */
9454 void
9455 clear_pending_stack_adjust ()
9457 #ifdef EXIT_IGNORE_STACK
9458 if (optimize > 0
9459 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9460 && EXIT_IGNORE_STACK
9461 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9462 && ! flag_inline_functions)
9464 stack_pointer_delta -= pending_stack_adjust,
9465 pending_stack_adjust = 0;
9467 #endif
9470 /* Pop any previously-pushed arguments that have not been popped yet. */
9472 void
9473 do_pending_stack_adjust ()
9475 if (inhibit_defer_pop == 0)
9477 if (pending_stack_adjust != 0)
9478 adjust_stack (GEN_INT (pending_stack_adjust));
9479 pending_stack_adjust = 0;
9483 /* Expand conditional expressions. */
9485 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9486 LABEL is an rtx of code CODE_LABEL, in this function and all the
9487 functions here. */
9489 void
9490 jumpifnot (exp, label)
9491 tree exp;
9492 rtx label;
9494 do_jump (exp, label, NULL_RTX);
9497 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9499 void
9500 jumpif (exp, label)
9501 tree exp;
9502 rtx label;
9504 do_jump (exp, NULL_RTX, label);
9507 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9508 the result is zero, or IF_TRUE_LABEL if the result is one.
9509 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9510 meaning fall through in that case.
9512 do_jump always does any pending stack adjust except when it does not
9513 actually perform a jump. An example where there is no jump
9514 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9516 This function is responsible for optimizing cases such as
9517 &&, || and comparison operators in EXP. */
9519 void
9520 do_jump (exp, if_false_label, if_true_label)
9521 tree exp;
9522 rtx if_false_label, if_true_label;
9524 register enum tree_code code = TREE_CODE (exp);
9525 /* Some cases need to create a label to jump to
9526 in order to properly fall through.
9527 These cases set DROP_THROUGH_LABEL nonzero. */
9528 rtx drop_through_label = 0;
9529 rtx temp;
9530 int i;
9531 tree type;
9532 enum machine_mode mode;
9534 #ifdef MAX_INTEGER_COMPUTATION_MODE
9535 check_max_integer_computation_mode (exp);
9536 #endif
9538 emit_queue ();
9540 switch (code)
9542 case ERROR_MARK:
9543 break;
9545 case INTEGER_CST:
9546 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9547 if (temp)
9548 emit_jump (temp);
9549 break;
9551 #if 0
9552 /* This is not true with #pragma weak */
9553 case ADDR_EXPR:
9554 /* The address of something can never be zero. */
9555 if (if_true_label)
9556 emit_jump (if_true_label);
9557 break;
9558 #endif
9560 case NOP_EXPR:
9561 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9562 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9563 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9564 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9565 goto normal;
9566 case CONVERT_EXPR:
9567 /* If we are narrowing the operand, we have to do the compare in the
9568 narrower mode. */
9569 if ((TYPE_PRECISION (TREE_TYPE (exp))
9570 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9571 goto normal;
9572 case NON_LVALUE_EXPR:
9573 case REFERENCE_EXPR:
9574 case ABS_EXPR:
9575 case NEGATE_EXPR:
9576 case LROTATE_EXPR:
9577 case RROTATE_EXPR:
9578 /* These cannot change zero->non-zero or vice versa. */
9579 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9580 break;
9582 case WITH_RECORD_EXPR:
9583 /* Put the object on the placeholder list, recurse through our first
9584 operand, and pop the list. */
9585 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9586 placeholder_list);
9587 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9588 placeholder_list = TREE_CHAIN (placeholder_list);
9589 break;
9591 #if 0
9592 /* This is never less insns than evaluating the PLUS_EXPR followed by
9593 a test and can be longer if the test is eliminated. */
9594 case PLUS_EXPR:
9595 /* Reduce to minus. */
9596 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9597 TREE_OPERAND (exp, 0),
9598 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9599 TREE_OPERAND (exp, 1))));
9600 /* Process as MINUS. */
9601 #endif
9603 case MINUS_EXPR:
9604 /* Non-zero iff operands of minus differ. */
9605 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9606 TREE_OPERAND (exp, 0),
9607 TREE_OPERAND (exp, 1)),
9608 NE, NE, if_false_label, if_true_label);
9609 break;
9611 case BIT_AND_EXPR:
9612 /* If we are AND'ing with a small constant, do this comparison in the
9613 smallest type that fits. If the machine doesn't have comparisons
9614 that small, it will be converted back to the wider comparison.
9615 This helps if we are testing the sign bit of a narrower object.
9616 combine can't do this for us because it can't know whether a
9617 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9619 if (! SLOW_BYTE_ACCESS
9620 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9621 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9622 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9623 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9624 && (type = type_for_mode (mode, 1)) != 0
9625 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9626 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9627 != CODE_FOR_nothing))
9629 do_jump (convert (type, exp), if_false_label, if_true_label);
9630 break;
9632 goto normal;
9634 case TRUTH_NOT_EXPR:
9635 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9636 break;
9638 case TRUTH_ANDIF_EXPR:
9639 if (if_false_label == 0)
9640 if_false_label = drop_through_label = gen_label_rtx ();
9641 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9642 start_cleanup_deferral ();
9643 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9644 end_cleanup_deferral ();
9645 break;
9647 case TRUTH_ORIF_EXPR:
9648 if (if_true_label == 0)
9649 if_true_label = drop_through_label = gen_label_rtx ();
9650 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9651 start_cleanup_deferral ();
9652 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9653 end_cleanup_deferral ();
9654 break;
9656 case COMPOUND_EXPR:
9657 push_temp_slots ();
9658 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9659 preserve_temp_slots (NULL_RTX);
9660 free_temp_slots ();
9661 pop_temp_slots ();
9662 emit_queue ();
9663 do_pending_stack_adjust ();
9664 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9665 break;
9667 case COMPONENT_REF:
9668 case BIT_FIELD_REF:
9669 case ARRAY_REF:
9670 case ARRAY_RANGE_REF:
9672 HOST_WIDE_INT bitsize, bitpos;
9673 int unsignedp;
9674 enum machine_mode mode;
9675 tree type;
9676 tree offset;
9677 int volatilep = 0;
9678 unsigned int alignment;
9680 /* Get description of this reference. We don't actually care
9681 about the underlying object here. */
9682 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9683 &unsignedp, &volatilep, &alignment);
9685 type = type_for_size (bitsize, unsignedp);
9686 if (! SLOW_BYTE_ACCESS
9687 && type != 0 && bitsize >= 0
9688 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9689 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9690 != CODE_FOR_nothing))
9692 do_jump (convert (type, exp), if_false_label, if_true_label);
9693 break;
9695 goto normal;
9698 case COND_EXPR:
9699 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9700 if (integer_onep (TREE_OPERAND (exp, 1))
9701 && integer_zerop (TREE_OPERAND (exp, 2)))
9702 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9704 else if (integer_zerop (TREE_OPERAND (exp, 1))
9705 && integer_onep (TREE_OPERAND (exp, 2)))
9706 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9708 else
9710 register rtx label1 = gen_label_rtx ();
9711 drop_through_label = gen_label_rtx ();
9713 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9715 start_cleanup_deferral ();
9716 /* Now the THEN-expression. */
9717 do_jump (TREE_OPERAND (exp, 1),
9718 if_false_label ? if_false_label : drop_through_label,
9719 if_true_label ? if_true_label : drop_through_label);
9720 /* In case the do_jump just above never jumps. */
9721 do_pending_stack_adjust ();
9722 emit_label (label1);
9724 /* Now the ELSE-expression. */
9725 do_jump (TREE_OPERAND (exp, 2),
9726 if_false_label ? if_false_label : drop_through_label,
9727 if_true_label ? if_true_label : drop_through_label);
9728 end_cleanup_deferral ();
9730 break;
9732 case EQ_EXPR:
9734 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9736 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9737 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9739 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9740 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9741 do_jump
9742 (fold
9743 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9744 fold (build (EQ_EXPR, TREE_TYPE (exp),
9745 fold (build1 (REALPART_EXPR,
9746 TREE_TYPE (inner_type),
9747 exp0)),
9748 fold (build1 (REALPART_EXPR,
9749 TREE_TYPE (inner_type),
9750 exp1)))),
9751 fold (build (EQ_EXPR, TREE_TYPE (exp),
9752 fold (build1 (IMAGPART_EXPR,
9753 TREE_TYPE (inner_type),
9754 exp0)),
9755 fold (build1 (IMAGPART_EXPR,
9756 TREE_TYPE (inner_type),
9757 exp1)))))),
9758 if_false_label, if_true_label);
9761 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9762 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9764 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9765 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9766 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9767 else
9768 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9769 break;
9772 case NE_EXPR:
9774 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9776 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9777 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9779 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9780 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9781 do_jump
9782 (fold
9783 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9784 fold (build (NE_EXPR, TREE_TYPE (exp),
9785 fold (build1 (REALPART_EXPR,
9786 TREE_TYPE (inner_type),
9787 exp0)),
9788 fold (build1 (REALPART_EXPR,
9789 TREE_TYPE (inner_type),
9790 exp1)))),
9791 fold (build (NE_EXPR, TREE_TYPE (exp),
9792 fold (build1 (IMAGPART_EXPR,
9793 TREE_TYPE (inner_type),
9794 exp0)),
9795 fold (build1 (IMAGPART_EXPR,
9796 TREE_TYPE (inner_type),
9797 exp1)))))),
9798 if_false_label, if_true_label);
9801 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9802 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9804 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9805 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9806 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9807 else
9808 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9809 break;
9812 case LT_EXPR:
9813 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9814 if (GET_MODE_CLASS (mode) == MODE_INT
9815 && ! can_compare_p (LT, mode, ccp_jump))
9816 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9817 else
9818 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9819 break;
9821 case LE_EXPR:
9822 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9823 if (GET_MODE_CLASS (mode) == MODE_INT
9824 && ! can_compare_p (LE, mode, ccp_jump))
9825 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9826 else
9827 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9828 break;
9830 case GT_EXPR:
9831 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9832 if (GET_MODE_CLASS (mode) == MODE_INT
9833 && ! can_compare_p (GT, mode, ccp_jump))
9834 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9835 else
9836 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9837 break;
9839 case GE_EXPR:
9840 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9841 if (GET_MODE_CLASS (mode) == MODE_INT
9842 && ! can_compare_p (GE, mode, ccp_jump))
9843 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9844 else
9845 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9846 break;
9848 case UNORDERED_EXPR:
9849 case ORDERED_EXPR:
9851 enum rtx_code cmp, rcmp;
9852 int do_rev;
9854 if (code == UNORDERED_EXPR)
9855 cmp = UNORDERED, rcmp = ORDERED;
9856 else
9857 cmp = ORDERED, rcmp = UNORDERED;
9858 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9860 do_rev = 0;
9861 if (! can_compare_p (cmp, mode, ccp_jump)
9862 && (can_compare_p (rcmp, mode, ccp_jump)
9863 /* If the target doesn't provide either UNORDERED or ORDERED
9864 comparisons, canonicalize on UNORDERED for the library. */
9865 || rcmp == UNORDERED))
9866 do_rev = 1;
9868 if (! do_rev)
9869 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9870 else
9871 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9873 break;
9876 enum rtx_code rcode1;
9877 enum tree_code tcode2;
9879 case UNLT_EXPR:
9880 rcode1 = UNLT;
9881 tcode2 = LT_EXPR;
9882 goto unordered_bcc;
9883 case UNLE_EXPR:
9884 rcode1 = UNLE;
9885 tcode2 = LE_EXPR;
9886 goto unordered_bcc;
9887 case UNGT_EXPR:
9888 rcode1 = UNGT;
9889 tcode2 = GT_EXPR;
9890 goto unordered_bcc;
9891 case UNGE_EXPR:
9892 rcode1 = UNGE;
9893 tcode2 = GE_EXPR;
9894 goto unordered_bcc;
9895 case UNEQ_EXPR:
9896 rcode1 = UNEQ;
9897 tcode2 = EQ_EXPR;
9898 goto unordered_bcc;
9900 unordered_bcc:
9901 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9902 if (can_compare_p (rcode1, mode, ccp_jump))
9903 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9904 if_true_label);
9905 else
9907 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9908 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9909 tree cmp0, cmp1;
9911 /* If the target doesn't support combined unordered
9912 compares, decompose into UNORDERED + comparison. */
9913 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9914 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9915 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9916 do_jump (exp, if_false_label, if_true_label);
9919 break;
9921 /* Special case:
9922 __builtin_expect (<test>, 0) and
9923 __builtin_expect (<test>, 1)
9925 We need to do this here, so that <test> is not converted to a SCC
9926 operation on machines that use condition code registers and COMPARE
9927 like the PowerPC, and then the jump is done based on whether the SCC
9928 operation produced a 1 or 0. */
9929 case CALL_EXPR:
9930 /* Check for a built-in function. */
9931 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9933 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9934 tree arglist = TREE_OPERAND (exp, 1);
9936 if (TREE_CODE (fndecl) == FUNCTION_DECL
9937 && DECL_BUILT_IN (fndecl)
9938 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9939 && arglist != NULL_TREE
9940 && TREE_CHAIN (arglist) != NULL_TREE)
9942 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9943 if_true_label);
9945 if (seq != NULL_RTX)
9947 emit_insn (seq);
9948 return;
9952 /* fall through and generate the normal code. */
9954 default:
9955 normal:
9956 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9957 #if 0
9958 /* This is not needed any more and causes poor code since it causes
9959 comparisons and tests from non-SI objects to have different code
9960 sequences. */
9961 /* Copy to register to avoid generating bad insns by cse
9962 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9963 if (!cse_not_expected && GET_CODE (temp) == MEM)
9964 temp = copy_to_reg (temp);
9965 #endif
9966 do_pending_stack_adjust ();
9967 /* Do any postincrements in the expression that was tested. */
9968 emit_queue ();
9970 if (GET_CODE (temp) == CONST_INT
9971 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9972 || GET_CODE (temp) == LABEL_REF)
9974 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9975 if (target)
9976 emit_jump (target);
9978 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9979 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9980 /* Note swapping the labels gives us not-equal. */
9981 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9982 else if (GET_MODE (temp) != VOIDmode)
9983 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9984 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9985 GET_MODE (temp), NULL_RTX, 0,
9986 if_false_label, if_true_label);
9987 else
9988 abort ();
9991 if (drop_through_label)
9993 /* If do_jump produces code that might be jumped around,
9994 do any stack adjusts from that code, before the place
9995 where control merges in. */
9996 do_pending_stack_adjust ();
9997 emit_label (drop_through_label);
10001 /* Given a comparison expression EXP for values too wide to be compared
10002 with one insn, test the comparison and jump to the appropriate label.
10003 The code of EXP is ignored; we always test GT if SWAP is 0,
10004 and LT if SWAP is 1. */
10006 static void
10007 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10008 tree exp;
10009 int swap;
10010 rtx if_false_label, if_true_label;
10012 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10013 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10014 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10015 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10017 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10020 /* Compare OP0 with OP1, word at a time, in mode MODE.
10021 UNSIGNEDP says to do unsigned comparison.
10022 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10024 void
10025 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10026 enum machine_mode mode;
10027 int unsignedp;
10028 rtx op0, op1;
10029 rtx if_false_label, if_true_label;
10031 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10032 rtx drop_through_label = 0;
10033 int i;
10035 if (! if_true_label || ! if_false_label)
10036 drop_through_label = gen_label_rtx ();
10037 if (! if_true_label)
10038 if_true_label = drop_through_label;
10039 if (! if_false_label)
10040 if_false_label = drop_through_label;
10042 /* Compare a word at a time, high order first. */
10043 for (i = 0; i < nwords; i++)
10045 rtx op0_word, op1_word;
10047 if (WORDS_BIG_ENDIAN)
10049 op0_word = operand_subword_force (op0, i, mode);
10050 op1_word = operand_subword_force (op1, i, mode);
10052 else
10054 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10055 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10058 /* All but high-order word must be compared as unsigned. */
10059 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10060 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10061 NULL_RTX, if_true_label);
10063 /* Consider lower words only if these are equal. */
10064 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10065 NULL_RTX, 0, NULL_RTX, if_false_label);
10068 if (if_false_label)
10069 emit_jump (if_false_label);
10070 if (drop_through_label)
10071 emit_label (drop_through_label);
10074 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10075 with one insn, test the comparison and jump to the appropriate label. */
10077 static void
10078 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10079 tree exp;
10080 rtx if_false_label, if_true_label;
10082 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10083 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10084 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10085 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10086 int i;
10087 rtx drop_through_label = 0;
10089 if (! if_false_label)
10090 drop_through_label = if_false_label = gen_label_rtx ();
10092 for (i = 0; i < nwords; i++)
10093 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10094 operand_subword_force (op1, i, mode),
10095 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10096 word_mode, NULL_RTX, 0, if_false_label,
10097 NULL_RTX);
10099 if (if_true_label)
10100 emit_jump (if_true_label);
10101 if (drop_through_label)
10102 emit_label (drop_through_label);
10105 /* Jump according to whether OP0 is 0.
10106 We assume that OP0 has an integer mode that is too wide
10107 for the available compare insns. */
10109 void
10110 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10111 rtx op0;
10112 rtx if_false_label, if_true_label;
10114 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10115 rtx part;
10116 int i;
10117 rtx drop_through_label = 0;
10119 /* The fastest way of doing this comparison on almost any machine is to
10120 "or" all the words and compare the result. If all have to be loaded
10121 from memory and this is a very wide item, it's possible this may
10122 be slower, but that's highly unlikely. */
10124 part = gen_reg_rtx (word_mode);
10125 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10126 for (i = 1; i < nwords && part != 0; i++)
10127 part = expand_binop (word_mode, ior_optab, part,
10128 operand_subword_force (op0, i, GET_MODE (op0)),
10129 part, 1, OPTAB_WIDEN);
10131 if (part != 0)
10133 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10134 NULL_RTX, 0, if_false_label, if_true_label);
10136 return;
10139 /* If we couldn't do the "or" simply, do this with a series of compares. */
10140 if (! if_false_label)
10141 drop_through_label = if_false_label = gen_label_rtx ();
10143 for (i = 0; i < nwords; i++)
10144 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10145 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10146 if_false_label, NULL_RTX);
10148 if (if_true_label)
10149 emit_jump (if_true_label);
10151 if (drop_through_label)
10152 emit_label (drop_through_label);
10155 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10156 (including code to compute the values to be compared)
10157 and set (CC0) according to the result.
10158 The decision as to signed or unsigned comparison must be made by the caller.
10160 We force a stack adjustment unless there are currently
10161 things pushed on the stack that aren't yet used.
10163 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10164 compared.
10166 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10167 size of MODE should be used. */
10170 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10171 register rtx op0, op1;
10172 enum rtx_code code;
10173 int unsignedp;
10174 enum machine_mode mode;
10175 rtx size;
10176 unsigned int align;
10178 rtx tem;
10180 /* If one operand is constant, make it the second one. Only do this
10181 if the other operand is not constant as well. */
10183 if (swap_commutative_operands_p (op0, op1))
10185 tem = op0;
10186 op0 = op1;
10187 op1 = tem;
10188 code = swap_condition (code);
10191 if (flag_force_mem)
10193 op0 = force_not_mem (op0);
10194 op1 = force_not_mem (op1);
10197 do_pending_stack_adjust ();
10199 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10200 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10201 return tem;
10203 #if 0
10204 /* There's no need to do this now that combine.c can eliminate lots of
10205 sign extensions. This can be less efficient in certain cases on other
10206 machines. */
10208 /* If this is a signed equality comparison, we can do it as an
10209 unsigned comparison since zero-extension is cheaper than sign
10210 extension and comparisons with zero are done as unsigned. This is
10211 the case even on machines that can do fast sign extension, since
10212 zero-extension is easier to combine with other operations than
10213 sign-extension is. If we are comparing against a constant, we must
10214 convert it to what it would look like unsigned. */
10215 if ((code == EQ || code == NE) && ! unsignedp
10216 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10218 if (GET_CODE (op1) == CONST_INT
10219 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10220 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10221 unsignedp = 1;
10223 #endif
10225 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10227 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10230 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10231 The decision as to signed or unsigned comparison must be made by the caller.
10233 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10234 compared.
10236 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10237 size of MODE should be used. */
10239 void
10240 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10241 if_false_label, if_true_label)
10242 register rtx op0, op1;
10243 enum rtx_code code;
10244 int unsignedp;
10245 enum machine_mode mode;
10246 rtx size;
10247 unsigned int align;
10248 rtx if_false_label, if_true_label;
10250 rtx tem;
10251 int dummy_true_label = 0;
10253 /* Reverse the comparison if that is safe and we want to jump if it is
10254 false. */
10255 if (! if_true_label && ! FLOAT_MODE_P (mode))
10257 if_true_label = if_false_label;
10258 if_false_label = 0;
10259 code = reverse_condition (code);
10262 /* If one operand is constant, make it the second one. Only do this
10263 if the other operand is not constant as well. */
10265 if (swap_commutative_operands_p (op0, op1))
10267 tem = op0;
10268 op0 = op1;
10269 op1 = tem;
10270 code = swap_condition (code);
10273 if (flag_force_mem)
10275 op0 = force_not_mem (op0);
10276 op1 = force_not_mem (op1);
10279 do_pending_stack_adjust ();
10281 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10282 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10284 if (tem == const_true_rtx)
10286 if (if_true_label)
10287 emit_jump (if_true_label);
10289 else
10291 if (if_false_label)
10292 emit_jump (if_false_label);
10294 return;
10297 #if 0
10298 /* There's no need to do this now that combine.c can eliminate lots of
10299 sign extensions. This can be less efficient in certain cases on other
10300 machines. */
10302 /* If this is a signed equality comparison, we can do it as an
10303 unsigned comparison since zero-extension is cheaper than sign
10304 extension and comparisons with zero are done as unsigned. This is
10305 the case even on machines that can do fast sign extension, since
10306 zero-extension is easier to combine with other operations than
10307 sign-extension is. If we are comparing against a constant, we must
10308 convert it to what it would look like unsigned. */
10309 if ((code == EQ || code == NE) && ! unsignedp
10310 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10312 if (GET_CODE (op1) == CONST_INT
10313 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10314 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10315 unsignedp = 1;
10317 #endif
10319 if (! if_true_label)
10321 dummy_true_label = 1;
10322 if_true_label = gen_label_rtx ();
10325 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10326 if_true_label);
10328 if (if_false_label)
10329 emit_jump (if_false_label);
10330 if (dummy_true_label)
10331 emit_label (if_true_label);
10334 /* Generate code for a comparison expression EXP (including code to compute
10335 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10336 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10337 generated code will drop through.
10338 SIGNED_CODE should be the rtx operation for this comparison for
10339 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10341 We force a stack adjustment unless there are currently
10342 things pushed on the stack that aren't yet used. */
10344 static void
10345 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10346 if_true_label)
10347 register tree exp;
10348 enum rtx_code signed_code, unsigned_code;
10349 rtx if_false_label, if_true_label;
10351 unsigned int align0, align1;
10352 register rtx op0, op1;
10353 register tree type;
10354 register enum machine_mode mode;
10355 int unsignedp;
10356 enum rtx_code code;
10358 /* Don't crash if the comparison was erroneous. */
10359 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10360 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10361 return;
10363 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10364 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10365 return;
10367 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10368 mode = TYPE_MODE (type);
10369 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10370 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10371 || (GET_MODE_BITSIZE (mode)
10372 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10373 1)))))))
10375 /* op0 might have been replaced by promoted constant, in which
10376 case the type of second argument should be used. */
10377 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10378 mode = TYPE_MODE (type);
10380 unsignedp = TREE_UNSIGNED (type);
10381 code = unsignedp ? unsigned_code : signed_code;
10383 #ifdef HAVE_canonicalize_funcptr_for_compare
10384 /* If function pointers need to be "canonicalized" before they can
10385 be reliably compared, then canonicalize them. */
10386 if (HAVE_canonicalize_funcptr_for_compare
10387 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10388 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10389 == FUNCTION_TYPE))
10391 rtx new_op0 = gen_reg_rtx (mode);
10393 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10394 op0 = new_op0;
10397 if (HAVE_canonicalize_funcptr_for_compare
10398 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10399 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10400 == FUNCTION_TYPE))
10402 rtx new_op1 = gen_reg_rtx (mode);
10404 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10405 op1 = new_op1;
10407 #endif
10409 /* Do any postincrements in the expression that was tested. */
10410 emit_queue ();
10412 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10413 ((mode == BLKmode)
10414 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10415 MIN (align0, align1),
10416 if_false_label, if_true_label);
10419 /* Generate code to calculate EXP using a store-flag instruction
10420 and return an rtx for the result. EXP is either a comparison
10421 or a TRUTH_NOT_EXPR whose operand is a comparison.
10423 If TARGET is nonzero, store the result there if convenient.
10425 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10426 cheap.
10428 Return zero if there is no suitable set-flag instruction
10429 available on this machine.
10431 Once expand_expr has been called on the arguments of the comparison,
10432 we are committed to doing the store flag, since it is not safe to
10433 re-evaluate the expression. We emit the store-flag insn by calling
10434 emit_store_flag, but only expand the arguments if we have a reason
10435 to believe that emit_store_flag will be successful. If we think that
10436 it will, but it isn't, we have to simulate the store-flag with a
10437 set/jump/set sequence. */
10439 static rtx
10440 do_store_flag (exp, target, mode, only_cheap)
10441 tree exp;
10442 rtx target;
10443 enum machine_mode mode;
10444 int only_cheap;
10446 enum rtx_code code;
10447 tree arg0, arg1, type;
10448 tree tem;
10449 enum machine_mode operand_mode;
10450 int invert = 0;
10451 int unsignedp;
10452 rtx op0, op1;
10453 enum insn_code icode;
10454 rtx subtarget = target;
10455 rtx result, label;
10457 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10458 result at the end. We can't simply invert the test since it would
10459 have already been inverted if it were valid. This case occurs for
10460 some floating-point comparisons. */
10462 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10463 invert = 1, exp = TREE_OPERAND (exp, 0);
10465 arg0 = TREE_OPERAND (exp, 0);
10466 arg1 = TREE_OPERAND (exp, 1);
10468 /* Don't crash if the comparison was erroneous. */
10469 if (arg0 == error_mark_node || arg1 == error_mark_node)
10470 return const0_rtx;
10472 type = TREE_TYPE (arg0);
10473 operand_mode = TYPE_MODE (type);
10474 unsignedp = TREE_UNSIGNED (type);
10476 /* We won't bother with BLKmode store-flag operations because it would mean
10477 passing a lot of information to emit_store_flag. */
10478 if (operand_mode == BLKmode)
10479 return 0;
10481 /* We won't bother with store-flag operations involving function pointers
10482 when function pointers must be canonicalized before comparisons. */
10483 #ifdef HAVE_canonicalize_funcptr_for_compare
10484 if (HAVE_canonicalize_funcptr_for_compare
10485 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10486 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10487 == FUNCTION_TYPE))
10488 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10489 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10490 == FUNCTION_TYPE))))
10491 return 0;
10492 #endif
10494 STRIP_NOPS (arg0);
10495 STRIP_NOPS (arg1);
10497 /* Get the rtx comparison code to use. We know that EXP is a comparison
10498 operation of some type. Some comparisons against 1 and -1 can be
10499 converted to comparisons with zero. Do so here so that the tests
10500 below will be aware that we have a comparison with zero. These
10501 tests will not catch constants in the first operand, but constants
10502 are rarely passed as the first operand. */
10504 switch (TREE_CODE (exp))
10506 case EQ_EXPR:
10507 code = EQ;
10508 break;
10509 case NE_EXPR:
10510 code = NE;
10511 break;
10512 case LT_EXPR:
10513 if (integer_onep (arg1))
10514 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10515 else
10516 code = unsignedp ? LTU : LT;
10517 break;
10518 case LE_EXPR:
10519 if (! unsignedp && integer_all_onesp (arg1))
10520 arg1 = integer_zero_node, code = LT;
10521 else
10522 code = unsignedp ? LEU : LE;
10523 break;
10524 case GT_EXPR:
10525 if (! unsignedp && integer_all_onesp (arg1))
10526 arg1 = integer_zero_node, code = GE;
10527 else
10528 code = unsignedp ? GTU : GT;
10529 break;
10530 case GE_EXPR:
10531 if (integer_onep (arg1))
10532 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10533 else
10534 code = unsignedp ? GEU : GE;
10535 break;
10537 case UNORDERED_EXPR:
10538 code = UNORDERED;
10539 break;
10540 case ORDERED_EXPR:
10541 code = ORDERED;
10542 break;
10543 case UNLT_EXPR:
10544 code = UNLT;
10545 break;
10546 case UNLE_EXPR:
10547 code = UNLE;
10548 break;
10549 case UNGT_EXPR:
10550 code = UNGT;
10551 break;
10552 case UNGE_EXPR:
10553 code = UNGE;
10554 break;
10555 case UNEQ_EXPR:
10556 code = UNEQ;
10557 break;
10559 default:
10560 abort ();
10563 /* Put a constant second. */
10564 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10566 tem = arg0; arg0 = arg1; arg1 = tem;
10567 code = swap_condition (code);
10570 /* If this is an equality or inequality test of a single bit, we can
10571 do this by shifting the bit being tested to the low-order bit and
10572 masking the result with the constant 1. If the condition was EQ,
10573 we xor it with 1. This does not require an scc insn and is faster
10574 than an scc insn even if we have it. */
10576 if ((code == NE || code == EQ)
10577 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10578 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10580 tree inner = TREE_OPERAND (arg0, 0);
10581 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10582 int ops_unsignedp;
10584 /* If INNER is a right shift of a constant and it plus BITNUM does
10585 not overflow, adjust BITNUM and INNER. */
10587 if (TREE_CODE (inner) == RSHIFT_EXPR
10588 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10589 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10590 && bitnum < TYPE_PRECISION (type)
10591 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10592 bitnum - TYPE_PRECISION (type)))
10594 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10595 inner = TREE_OPERAND (inner, 0);
10598 /* If we are going to be able to omit the AND below, we must do our
10599 operations as unsigned. If we must use the AND, we have a choice.
10600 Normally unsigned is faster, but for some machines signed is. */
10601 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10602 #ifdef LOAD_EXTEND_OP
10603 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10604 #else
10606 #endif
10609 if (! get_subtarget (subtarget)
10610 || GET_MODE (subtarget) != operand_mode
10611 || ! safe_from_p (subtarget, inner, 1))
10612 subtarget = 0;
10614 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10616 if (bitnum != 0)
10617 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10618 size_int (bitnum), subtarget, ops_unsignedp);
10620 if (GET_MODE (op0) != mode)
10621 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10623 if ((code == EQ && ! invert) || (code == NE && invert))
10624 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10625 ops_unsignedp, OPTAB_LIB_WIDEN);
10627 /* Put the AND last so it can combine with more things. */
10628 if (bitnum != TYPE_PRECISION (type) - 1)
10629 op0 = expand_and (op0, const1_rtx, subtarget);
10631 return op0;
10634 /* Now see if we are likely to be able to do this. Return if not. */
10635 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10636 return 0;
10638 icode = setcc_gen_code[(int) code];
10639 if (icode == CODE_FOR_nothing
10640 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10642 /* We can only do this if it is one of the special cases that
10643 can be handled without an scc insn. */
10644 if ((code == LT && integer_zerop (arg1))
10645 || (! only_cheap && code == GE && integer_zerop (arg1)))
10647 else if (BRANCH_COST >= 0
10648 && ! only_cheap && (code == NE || code == EQ)
10649 && TREE_CODE (type) != REAL_TYPE
10650 && ((abs_optab->handlers[(int) operand_mode].insn_code
10651 != CODE_FOR_nothing)
10652 || (ffs_optab->handlers[(int) operand_mode].insn_code
10653 != CODE_FOR_nothing)))
10655 else
10656 return 0;
10659 if (! get_subtarget (target)
10660 || GET_MODE (subtarget) != operand_mode
10661 || ! safe_from_p (subtarget, arg1, 1))
10662 subtarget = 0;
10664 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10665 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10667 if (target == 0)
10668 target = gen_reg_rtx (mode);
10670 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10671 because, if the emit_store_flag does anything it will succeed and
10672 OP0 and OP1 will not be used subsequently. */
10674 result = emit_store_flag (target, code,
10675 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10676 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10677 operand_mode, unsignedp, 1);
10679 if (result)
10681 if (invert)
10682 result = expand_binop (mode, xor_optab, result, const1_rtx,
10683 result, 0, OPTAB_LIB_WIDEN);
10684 return result;
10687 /* If this failed, we have to do this with set/compare/jump/set code. */
10688 if (GET_CODE (target) != REG
10689 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10690 target = gen_reg_rtx (GET_MODE (target));
10692 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10693 result = compare_from_rtx (op0, op1, code, unsignedp,
10694 operand_mode, NULL_RTX, 0);
10695 if (GET_CODE (result) == CONST_INT)
10696 return (((result == const0_rtx && ! invert)
10697 || (result != const0_rtx && invert))
10698 ? const0_rtx : const1_rtx);
10700 label = gen_label_rtx ();
10701 if (bcc_gen_fctn[(int) code] == 0)
10702 abort ();
10704 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10705 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10706 emit_label (label);
10708 return target;
10711 /* Generate a tablejump instruction (used for switch statements). */
10713 #ifdef HAVE_tablejump
10715 /* INDEX is the value being switched on, with the lowest value
10716 in the table already subtracted.
10717 MODE is its expected mode (needed if INDEX is constant).
10718 RANGE is the length of the jump table.
10719 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10721 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10722 index value is out of range. */
10724 void
10725 do_tablejump (index, mode, range, table_label, default_label)
10726 rtx index, range, table_label, default_label;
10727 enum machine_mode mode;
10729 register rtx temp, vector;
10731 /* Do an unsigned comparison (in the proper mode) between the index
10732 expression and the value which represents the length of the range.
10733 Since we just finished subtracting the lower bound of the range
10734 from the index expression, this comparison allows us to simultaneously
10735 check that the original index expression value is both greater than
10736 or equal to the minimum value of the range and less than or equal to
10737 the maximum value of the range. */
10739 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10740 0, default_label);
10742 /* If index is in range, it must fit in Pmode.
10743 Convert to Pmode so we can index with it. */
10744 if (mode != Pmode)
10745 index = convert_to_mode (Pmode, index, 1);
10747 /* Don't let a MEM slip thru, because then INDEX that comes
10748 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10749 and break_out_memory_refs will go to work on it and mess it up. */
10750 #ifdef PIC_CASE_VECTOR_ADDRESS
10751 if (flag_pic && GET_CODE (index) != REG)
10752 index = copy_to_mode_reg (Pmode, index);
10753 #endif
10755 /* If flag_force_addr were to affect this address
10756 it could interfere with the tricky assumptions made
10757 about addresses that contain label-refs,
10758 which may be valid only very near the tablejump itself. */
10759 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10760 GET_MODE_SIZE, because this indicates how large insns are. The other
10761 uses should all be Pmode, because they are addresses. This code
10762 could fail if addresses and insns are not the same size. */
10763 index = gen_rtx_PLUS (Pmode,
10764 gen_rtx_MULT (Pmode, index,
10765 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10766 gen_rtx_LABEL_REF (Pmode, table_label));
10767 #ifdef PIC_CASE_VECTOR_ADDRESS
10768 if (flag_pic)
10769 index = PIC_CASE_VECTOR_ADDRESS (index);
10770 else
10771 #endif
10772 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10773 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10774 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10775 RTX_UNCHANGING_P (vector) = 1;
10776 convert_move (temp, vector, 0);
10778 emit_jump_insn (gen_tablejump (temp, table_label));
10780 /* If we are generating PIC code or if the table is PC-relative, the
10781 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10782 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10783 emit_barrier ();
10786 #endif /* HAVE_tablejump */