* c-common.c (check_function_format): Don't suggest adding format
[official-gcc.git] / gcc / expr.c
blobfbe5fe513c1e28657b6df5204c1d6e014f3a9e3a
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
67 #ifdef PUSH_ROUNDING
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
73 #endif
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
94 parameter. */
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
105 /* Don't check memory usage, since code is being emitted to check a memory
106 usage. Used when current_function_check_memory_usage is true, to avoid
107 infinite recursion. */
108 static int in_check_memory_usage;
110 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
111 static tree placeholder_list = 0;
113 /* This structure is used by move_by_pieces to describe the move to
114 be performed. */
115 struct move_by_pieces
117 rtx to;
118 rtx to_addr;
119 int autinc_to;
120 int explicit_inc_to;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 int reverse;
130 /* This structure is used by store_by_pieces to describe the clear to
131 be performed. */
133 struct store_by_pieces
135 rtx to;
136 rtx to_addr;
137 int autinc_to;
138 int explicit_inc_to;
139 unsigned HOST_WIDE_INT len;
140 HOST_WIDE_INT offset;
141 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
142 PTR constfundata;
143 int reverse;
146 extern struct obstack permanent_obstack;
148 static rtx get_push_address PARAMS ((int));
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
152 PARAMS ((unsigned HOST_WIDE_INT,
153 unsigned int));
154 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
155 struct move_by_pieces *));
156 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
157 enum machine_mode));
158 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
159 unsigned int));
160 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
161 unsigned int));
162 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
163 enum machine_mode,
164 struct store_by_pieces *));
165 static rtx get_subtarget PARAMS ((rtx));
166 static int is_zeros_p PARAMS ((tree));
167 static int mostly_zeros_p PARAMS ((tree));
168 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
169 HOST_WIDE_INT, enum machine_mode,
170 tree, tree, unsigned int, int,
171 int));
172 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
173 HOST_WIDE_INT));
174 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
175 HOST_WIDE_INT, enum machine_mode,
176 tree, enum machine_mode, int,
177 unsigned int, HOST_WIDE_INT, int));
178 static enum memory_use_mode
179 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
180 static tree save_noncopied_parts PARAMS ((tree, tree));
181 static tree init_noncopied_parts PARAMS ((tree, tree));
182 static int fixed_type_p PARAMS ((tree));
183 static rtx var_rtx PARAMS ((tree));
184 static int readonly_fields_p PARAMS ((tree));
185 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
186 static rtx expand_increment PARAMS ((tree, int, int));
187 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
188 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
189 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
190 rtx, rtx));
191 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
193 /* Record for each mode whether we can move a register directly to or
194 from an object of that mode in memory. If we can't, we won't try
195 to use that mode directly when accessing a field of that mode. */
197 static char direct_load[NUM_MACHINE_MODES];
198 static char direct_store[NUM_MACHINE_MODES];
200 /* If a memory-to-memory move would take MOVE_RATIO or more simple
201 move-instruction sequences, we will do a movstr or libcall instead. */
203 #ifndef MOVE_RATIO
204 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
205 #define MOVE_RATIO 2
206 #else
207 /* If we are optimizing for space (-Os), cut down the default move ratio. */
208 #define MOVE_RATIO (optimize_size ? 3 : 15)
209 #endif
210 #endif
212 /* This macro is used to determine whether move_by_pieces should be called
213 to perform a structure copy. */
214 #ifndef MOVE_BY_PIECES_P
215 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
216 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
217 #endif
219 /* This array records the insn_code of insns to perform block moves. */
220 enum insn_code movstr_optab[NUM_MACHINE_MODES];
222 /* This array records the insn_code of insns to perform block clears. */
223 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
225 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
227 #ifndef SLOW_UNALIGNED_ACCESS
228 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
229 #endif
231 /* This is run once per compilation to set up which modes can be used
232 directly in memory and to initialize the block move optab. */
234 void
235 init_expr_once ()
237 rtx insn, pat;
238 enum machine_mode mode;
239 int num_clobbers;
240 rtx mem, mem1;
242 start_sequence ();
244 /* Try indexing by frame ptr and try by stack ptr.
245 It is known that on the Convex the stack ptr isn't a valid index.
246 With luck, one or the other is valid on any machine. */
247 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
248 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
250 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
251 pat = PATTERN (insn);
253 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
254 mode = (enum machine_mode) ((int) mode + 1))
256 int regno;
257 rtx reg;
259 direct_load[(int) mode] = direct_store[(int) mode] = 0;
260 PUT_MODE (mem, mode);
261 PUT_MODE (mem1, mode);
263 /* See if there is some register that can be used in this mode and
264 directly loaded or stored from memory. */
266 if (mode != VOIDmode && mode != BLKmode)
267 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
268 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
269 regno++)
271 if (! HARD_REGNO_MODE_OK (regno, mode))
272 continue;
274 reg = gen_rtx_REG (mode, regno);
276 SET_SRC (pat) = mem;
277 SET_DEST (pat) = reg;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_load[(int) mode] = 1;
281 SET_SRC (pat) = mem1;
282 SET_DEST (pat) = reg;
283 if (recog (pat, insn, &num_clobbers) >= 0)
284 direct_load[(int) mode] = 1;
286 SET_SRC (pat) = reg;
287 SET_DEST (pat) = mem;
288 if (recog (pat, insn, &num_clobbers) >= 0)
289 direct_store[(int) mode] = 1;
291 SET_SRC (pat) = reg;
292 SET_DEST (pat) = mem1;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_store[(int) mode] = 1;
298 end_sequence ();
301 /* This is run at the start of compiling a function. */
303 void
304 init_expr ()
306 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
308 pending_chain = 0;
309 pending_stack_adjust = 0;
310 stack_pointer_delta = 0;
311 inhibit_defer_pop = 0;
312 saveregs_value = 0;
313 apply_args_value = 0;
314 forced_labels = 0;
317 void
318 mark_expr_status (p)
319 struct expr_status *p;
321 if (p == NULL)
322 return;
324 ggc_mark_rtx (p->x_saveregs_value);
325 ggc_mark_rtx (p->x_apply_args_value);
326 ggc_mark_rtx (p->x_forced_labels);
329 void
330 free_expr_status (f)
331 struct function *f;
333 free (f->expr);
334 f->expr = NULL;
337 /* Small sanity check that the queue is empty at the end of a function. */
339 void
340 finish_expr_for_function ()
342 if (pending_chain)
343 abort ();
346 /* Manage the queue of increment instructions to be output
347 for POSTINCREMENT_EXPR expressions, etc. */
349 /* Queue up to increment (or change) VAR later. BODY says how:
350 BODY should be the same thing you would pass to emit_insn
351 to increment right away. It will go to emit_insn later on.
353 The value is a QUEUED expression to be used in place of VAR
354 where you want to guarantee the pre-incrementation value of VAR. */
356 static rtx
357 enqueue_insn (var, body)
358 rtx var, body;
360 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
361 body, pending_chain);
362 return pending_chain;
365 /* Use protect_from_queue to convert a QUEUED expression
366 into something that you can put immediately into an instruction.
367 If the queued incrementation has not happened yet,
368 protect_from_queue returns the variable itself.
369 If the incrementation has happened, protect_from_queue returns a temp
370 that contains a copy of the old value of the variable.
372 Any time an rtx which might possibly be a QUEUED is to be put
373 into an instruction, it must be passed through protect_from_queue first.
374 QUEUED expressions are not meaningful in instructions.
376 Do not pass a value through protect_from_queue and then hold
377 on to it for a while before putting it in an instruction!
378 If the queue is flushed in between, incorrect code will result. */
381 protect_from_queue (x, modify)
382 register rtx x;
383 int modify;
385 register RTX_CODE code = GET_CODE (x);
387 #if 0 /* A QUEUED can hang around after the queue is forced out. */
388 /* Shortcut for most common case. */
389 if (pending_chain == 0)
390 return x;
391 #endif
393 if (code != QUEUED)
395 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
396 use of autoincrement. Make a copy of the contents of the memory
397 location rather than a copy of the address, but not if the value is
398 of mode BLKmode. Don't modify X in place since it might be
399 shared. */
400 if (code == MEM && GET_MODE (x) != BLKmode
401 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
403 register rtx y = XEXP (x, 0);
404 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
406 MEM_COPY_ATTRIBUTES (new, x);
408 if (QUEUED_INSN (y))
410 register rtx temp = gen_reg_rtx (GET_MODE (new));
411 emit_insn_before (gen_move_insn (temp, new),
412 QUEUED_INSN (y));
413 return temp;
415 return new;
417 /* Otherwise, recursively protect the subexpressions of all
418 the kinds of rtx's that can contain a QUEUED. */
419 if (code == MEM)
421 rtx tem = protect_from_queue (XEXP (x, 0), 0);
422 if (tem != XEXP (x, 0))
424 x = copy_rtx (x);
425 XEXP (x, 0) = tem;
428 else if (code == PLUS || code == MULT)
430 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
431 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
432 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
434 x = copy_rtx (x);
435 XEXP (x, 0) = new0;
436 XEXP (x, 1) = new1;
439 return x;
441 /* If the increment has not happened, use the variable itself. */
442 if (QUEUED_INSN (x) == 0)
443 return QUEUED_VAR (x);
444 /* If the increment has happened and a pre-increment copy exists,
445 use that copy. */
446 if (QUEUED_COPY (x) != 0)
447 return QUEUED_COPY (x);
448 /* The increment has happened but we haven't set up a pre-increment copy.
449 Set one up now, and use it. */
450 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
451 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
452 QUEUED_INSN (x));
453 return QUEUED_COPY (x);
456 /* Return nonzero if X contains a QUEUED expression:
457 if it contains anything that will be altered by a queued increment.
458 We handle only combinations of MEM, PLUS, MINUS and MULT operators
459 since memory addresses generally contain only those. */
462 queued_subexp_p (x)
463 rtx x;
465 register enum rtx_code code = GET_CODE (x);
466 switch (code)
468 case QUEUED:
469 return 1;
470 case MEM:
471 return queued_subexp_p (XEXP (x, 0));
472 case MULT:
473 case PLUS:
474 case MINUS:
475 return (queued_subexp_p (XEXP (x, 0))
476 || queued_subexp_p (XEXP (x, 1)));
477 default:
478 return 0;
482 /* Perform all the pending incrementations. */
484 void
485 emit_queue ()
487 register rtx p;
488 while ((p = pending_chain))
490 rtx body = QUEUED_BODY (p);
492 if (GET_CODE (body) == SEQUENCE)
494 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
495 emit_insn (QUEUED_BODY (p));
497 else
498 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
499 pending_chain = QUEUED_NEXT (p);
503 /* Copy data from FROM to TO, where the machine modes are not the same.
504 Both modes may be integer, or both may be floating.
505 UNSIGNEDP should be nonzero if FROM is an unsigned type.
506 This causes zero-extension instead of sign-extension. */
508 void
509 convert_move (to, from, unsignedp)
510 register rtx to, from;
511 int unsignedp;
513 enum machine_mode to_mode = GET_MODE (to);
514 enum machine_mode from_mode = GET_MODE (from);
515 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
516 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
517 enum insn_code code;
518 rtx libcall;
520 /* rtx code for making an equivalent value. */
521 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
523 to = protect_from_queue (to, 1);
524 from = protect_from_queue (from, 0);
526 if (to_real != from_real)
527 abort ();
529 /* If FROM is a SUBREG that indicates that we have already done at least
530 the required extension, strip it. We don't handle such SUBREGs as
531 TO here. */
533 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
534 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
535 >= GET_MODE_SIZE (to_mode))
536 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
537 from = gen_lowpart (to_mode, from), from_mode = to_mode;
539 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
540 abort ();
542 if (to_mode == from_mode
543 || (from_mode == VOIDmode && CONSTANT_P (from)))
545 emit_move_insn (to, from);
546 return;
549 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
551 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
552 abort ();
554 if (VECTOR_MODE_P (to_mode))
555 from = gen_rtx_SUBREG (to_mode, from, 0);
556 else
557 to = gen_rtx_SUBREG (from_mode, to, 0);
559 emit_move_insn (to, from);
560 return;
563 if (to_real != from_real)
564 abort ();
566 if (to_real)
568 rtx value, insns;
570 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
572 /* Try converting directly if the insn is supported. */
573 if ((code = can_extend_p (to_mode, from_mode, 0))
574 != CODE_FOR_nothing)
576 emit_unop_insn (code, to, from, UNKNOWN);
577 return;
581 #ifdef HAVE_trunchfqf2
582 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
584 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
585 return;
587 #endif
588 #ifdef HAVE_trunctqfqf2
589 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
591 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
592 return;
594 #endif
595 #ifdef HAVE_truncsfqf2
596 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
598 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
599 return;
601 #endif
602 #ifdef HAVE_truncdfqf2
603 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
605 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
606 return;
608 #endif
609 #ifdef HAVE_truncxfqf2
610 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
612 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
613 return;
615 #endif
616 #ifdef HAVE_trunctfqf2
617 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
619 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
620 return;
622 #endif
624 #ifdef HAVE_trunctqfhf2
625 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
627 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
628 return;
630 #endif
631 #ifdef HAVE_truncsfhf2
632 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
634 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
635 return;
637 #endif
638 #ifdef HAVE_truncdfhf2
639 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
641 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
642 return;
644 #endif
645 #ifdef HAVE_truncxfhf2
646 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
648 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
649 return;
651 #endif
652 #ifdef HAVE_trunctfhf2
653 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
655 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
656 return;
658 #endif
660 #ifdef HAVE_truncsftqf2
661 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
663 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
664 return;
666 #endif
667 #ifdef HAVE_truncdftqf2
668 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
670 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
671 return;
673 #endif
674 #ifdef HAVE_truncxftqf2
675 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
677 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
678 return;
680 #endif
681 #ifdef HAVE_trunctftqf2
682 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
684 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
685 return;
687 #endif
689 #ifdef HAVE_truncdfsf2
690 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
692 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
693 return;
695 #endif
696 #ifdef HAVE_truncxfsf2
697 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
699 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
700 return;
702 #endif
703 #ifdef HAVE_trunctfsf2
704 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
706 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
707 return;
709 #endif
710 #ifdef HAVE_truncxfdf2
711 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
713 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
714 return;
716 #endif
717 #ifdef HAVE_trunctfdf2
718 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
720 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
721 return;
723 #endif
725 libcall = (rtx) 0;
726 switch (from_mode)
728 case SFmode:
729 switch (to_mode)
731 case DFmode:
732 libcall = extendsfdf2_libfunc;
733 break;
735 case XFmode:
736 libcall = extendsfxf2_libfunc;
737 break;
739 case TFmode:
740 libcall = extendsftf2_libfunc;
741 break;
743 default:
744 break;
746 break;
748 case DFmode:
749 switch (to_mode)
751 case SFmode:
752 libcall = truncdfsf2_libfunc;
753 break;
755 case XFmode:
756 libcall = extenddfxf2_libfunc;
757 break;
759 case TFmode:
760 libcall = extenddftf2_libfunc;
761 break;
763 default:
764 break;
766 break;
768 case XFmode:
769 switch (to_mode)
771 case SFmode:
772 libcall = truncxfsf2_libfunc;
773 break;
775 case DFmode:
776 libcall = truncxfdf2_libfunc;
777 break;
779 default:
780 break;
782 break;
784 case TFmode:
785 switch (to_mode)
787 case SFmode:
788 libcall = trunctfsf2_libfunc;
789 break;
791 case DFmode:
792 libcall = trunctfdf2_libfunc;
793 break;
795 default:
796 break;
798 break;
800 default:
801 break;
804 if (libcall == (rtx) 0)
805 /* This conversion is not implemented yet. */
806 abort ();
808 start_sequence ();
809 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
810 1, from, from_mode);
811 insns = get_insns ();
812 end_sequence ();
813 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
814 from));
815 return;
818 /* Now both modes are integers. */
820 /* Handle expanding beyond a word. */
821 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
822 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
824 rtx insns;
825 rtx lowpart;
826 rtx fill_value;
827 rtx lowfrom;
828 int i;
829 enum machine_mode lowpart_mode;
830 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
832 /* Try converting directly if the insn is supported. */
833 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
834 != CODE_FOR_nothing)
836 /* If FROM is a SUBREG, put it into a register. Do this
837 so that we always generate the same set of insns for
838 better cse'ing; if an intermediate assignment occurred,
839 we won't be doing the operation directly on the SUBREG. */
840 if (optimize > 0 && GET_CODE (from) == SUBREG)
841 from = force_reg (from_mode, from);
842 emit_unop_insn (code, to, from, equiv_code);
843 return;
845 /* Next, try converting via full word. */
846 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
847 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
848 != CODE_FOR_nothing))
850 if (GET_CODE (to) == REG)
851 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
852 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
853 emit_unop_insn (code, to,
854 gen_lowpart (word_mode, to), equiv_code);
855 return;
858 /* No special multiword conversion insn; do it by hand. */
859 start_sequence ();
861 /* Since we will turn this into a no conflict block, we must ensure
862 that the source does not overlap the target. */
864 if (reg_overlap_mentioned_p (to, from))
865 from = force_reg (from_mode, from);
867 /* Get a copy of FROM widened to a word, if necessary. */
868 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
869 lowpart_mode = word_mode;
870 else
871 lowpart_mode = from_mode;
873 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
875 lowpart = gen_lowpart (lowpart_mode, to);
876 emit_move_insn (lowpart, lowfrom);
878 /* Compute the value to put in each remaining word. */
879 if (unsignedp)
880 fill_value = const0_rtx;
881 else
883 #ifdef HAVE_slt
884 if (HAVE_slt
885 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
886 && STORE_FLAG_VALUE == -1)
888 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
889 lowpart_mode, 0, 0);
890 fill_value = gen_reg_rtx (word_mode);
891 emit_insn (gen_slt (fill_value));
893 else
894 #endif
896 fill_value
897 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
898 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
899 NULL_RTX, 0);
900 fill_value = convert_to_mode (word_mode, fill_value, 1);
904 /* Fill the remaining words. */
905 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
907 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
908 rtx subword = operand_subword (to, index, 1, to_mode);
910 if (subword == 0)
911 abort ();
913 if (fill_value != subword)
914 emit_move_insn (subword, fill_value);
917 insns = get_insns ();
918 end_sequence ();
920 emit_no_conflict_block (insns, to, from, NULL_RTX,
921 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
922 return;
925 /* Truncating multi-word to a word or less. */
926 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
927 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
929 if (!((GET_CODE (from) == MEM
930 && ! MEM_VOLATILE_P (from)
931 && direct_load[(int) to_mode]
932 && ! mode_dependent_address_p (XEXP (from, 0)))
933 || GET_CODE (from) == REG
934 || GET_CODE (from) == SUBREG))
935 from = force_reg (from_mode, from);
936 convert_move (to, gen_lowpart (word_mode, from), 0);
937 return;
940 /* Handle pointer conversion. */ /* SPEE 900220. */
941 if (to_mode == PQImode)
943 if (from_mode != QImode)
944 from = convert_to_mode (QImode, from, unsignedp);
946 #ifdef HAVE_truncqipqi2
947 if (HAVE_truncqipqi2)
949 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
950 return;
952 #endif /* HAVE_truncqipqi2 */
953 abort ();
956 if (from_mode == PQImode)
958 if (to_mode != QImode)
960 from = convert_to_mode (QImode, from, unsignedp);
961 from_mode = QImode;
963 else
965 #ifdef HAVE_extendpqiqi2
966 if (HAVE_extendpqiqi2)
968 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
969 return;
971 #endif /* HAVE_extendpqiqi2 */
972 abort ();
976 if (to_mode == PSImode)
978 if (from_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
981 #ifdef HAVE_truncsipsi2
982 if (HAVE_truncsipsi2)
984 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
985 return;
987 #endif /* HAVE_truncsipsi2 */
988 abort ();
991 if (from_mode == PSImode)
993 if (to_mode != SImode)
995 from = convert_to_mode (SImode, from, unsignedp);
996 from_mode = SImode;
998 else
1000 #ifdef HAVE_extendpsisi2
1001 if (! unsignedp && HAVE_extendpsisi2)
1003 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1004 return;
1006 #endif /* HAVE_extendpsisi2 */
1007 #ifdef HAVE_zero_extendpsisi2
1008 if (unsignedp && HAVE_zero_extendpsisi2)
1010 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1011 return;
1013 #endif /* HAVE_zero_extendpsisi2 */
1014 abort ();
1018 if (to_mode == PDImode)
1020 if (from_mode != DImode)
1021 from = convert_to_mode (DImode, from, unsignedp);
1023 #ifdef HAVE_truncdipdi2
1024 if (HAVE_truncdipdi2)
1026 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1027 return;
1029 #endif /* HAVE_truncdipdi2 */
1030 abort ();
1033 if (from_mode == PDImode)
1035 if (to_mode != DImode)
1037 from = convert_to_mode (DImode, from, unsignedp);
1038 from_mode = DImode;
1040 else
1042 #ifdef HAVE_extendpdidi2
1043 if (HAVE_extendpdidi2)
1045 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1046 return;
1048 #endif /* HAVE_extendpdidi2 */
1049 abort ();
1053 /* Now follow all the conversions between integers
1054 no more than a word long. */
1056 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1057 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1058 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1059 GET_MODE_BITSIZE (from_mode)))
1061 if (!((GET_CODE (from) == MEM
1062 && ! MEM_VOLATILE_P (from)
1063 && direct_load[(int) to_mode]
1064 && ! mode_dependent_address_p (XEXP (from, 0)))
1065 || GET_CODE (from) == REG
1066 || GET_CODE (from) == SUBREG))
1067 from = force_reg (from_mode, from);
1068 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1069 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1070 from = copy_to_reg (from);
1071 emit_move_insn (to, gen_lowpart (to_mode, from));
1072 return;
1075 /* Handle extension. */
1076 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1078 /* Convert directly if that works. */
1079 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1080 != CODE_FOR_nothing)
1082 emit_unop_insn (code, to, from, equiv_code);
1083 return;
1085 else
1087 enum machine_mode intermediate;
1088 rtx tmp;
1089 tree shift_amount;
1091 /* Search for a mode to convert via. */
1092 for (intermediate = from_mode; intermediate != VOIDmode;
1093 intermediate = GET_MODE_WIDER_MODE (intermediate))
1094 if (((can_extend_p (to_mode, intermediate, unsignedp)
1095 != CODE_FOR_nothing)
1096 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1097 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1098 GET_MODE_BITSIZE (intermediate))))
1099 && (can_extend_p (intermediate, from_mode, unsignedp)
1100 != CODE_FOR_nothing))
1102 convert_move (to, convert_to_mode (intermediate, from,
1103 unsignedp), unsignedp);
1104 return;
1107 /* No suitable intermediate mode.
1108 Generate what we need with shifts. */
1109 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1110 - GET_MODE_BITSIZE (from_mode), 0);
1111 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1112 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1113 to, unsignedp);
1114 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1115 to, unsignedp);
1116 if (tmp != to)
1117 emit_move_insn (to, tmp);
1118 return;
1122 /* Support special truncate insns for certain modes. */
1124 if (from_mode == DImode && to_mode == SImode)
1126 #ifdef HAVE_truncdisi2
1127 if (HAVE_truncdisi2)
1129 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1130 return;
1132 #endif
1133 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 return;
1137 if (from_mode == DImode && to_mode == HImode)
1139 #ifdef HAVE_truncdihi2
1140 if (HAVE_truncdihi2)
1142 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1143 return;
1145 #endif
1146 convert_move (to, force_reg (from_mode, from), unsignedp);
1147 return;
1150 if (from_mode == DImode && to_mode == QImode)
1152 #ifdef HAVE_truncdiqi2
1153 if (HAVE_truncdiqi2)
1155 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1156 return;
1158 #endif
1159 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 return;
1163 if (from_mode == SImode && to_mode == HImode)
1165 #ifdef HAVE_truncsihi2
1166 if (HAVE_truncsihi2)
1168 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1169 return;
1171 #endif
1172 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 return;
1176 if (from_mode == SImode && to_mode == QImode)
1178 #ifdef HAVE_truncsiqi2
1179 if (HAVE_truncsiqi2)
1181 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1182 return;
1184 #endif
1185 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 return;
1189 if (from_mode == HImode && to_mode == QImode)
1191 #ifdef HAVE_trunchiqi2
1192 if (HAVE_trunchiqi2)
1194 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1195 return;
1197 #endif
1198 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 return;
1202 if (from_mode == TImode && to_mode == DImode)
1204 #ifdef HAVE_trunctidi2
1205 if (HAVE_trunctidi2)
1207 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1208 return;
1210 #endif
1211 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 return;
1215 if (from_mode == TImode && to_mode == SImode)
1217 #ifdef HAVE_trunctisi2
1218 if (HAVE_trunctisi2)
1220 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1221 return;
1223 #endif
1224 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 return;
1228 if (from_mode == TImode && to_mode == HImode)
1230 #ifdef HAVE_trunctihi2
1231 if (HAVE_trunctihi2)
1233 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1234 return;
1236 #endif
1237 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 return;
1241 if (from_mode == TImode && to_mode == QImode)
1243 #ifdef HAVE_trunctiqi2
1244 if (HAVE_trunctiqi2)
1246 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1247 return;
1249 #endif
1250 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 return;
1254 /* Handle truncation of volatile memrefs, and so on;
1255 the things that couldn't be truncated directly,
1256 and for which there was no special instruction. */
1257 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1259 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1260 emit_move_insn (to, temp);
1261 return;
1264 /* Mode combination is not recognized. */
1265 abort ();
1268 /* Return an rtx for a value that would result
1269 from converting X to mode MODE.
1270 Both X and MODE may be floating, or both integer.
1271 UNSIGNEDP is nonzero if X is an unsigned value.
1272 This can be done by referring to a part of X in place
1273 or by copying to a new temporary with conversion.
1275 This function *must not* call protect_from_queue
1276 except when putting X into an insn (in which case convert_move does it). */
1279 convert_to_mode (mode, x, unsignedp)
1280 enum machine_mode mode;
1281 rtx x;
1282 int unsignedp;
1284 return convert_modes (mode, VOIDmode, x, unsignedp);
1287 /* Return an rtx for a value that would result
1288 from converting X from mode OLDMODE to mode MODE.
1289 Both modes may be floating, or both integer.
1290 UNSIGNEDP is nonzero if X is an unsigned value.
1292 This can be done by referring to a part of X in place
1293 or by copying to a new temporary with conversion.
1295 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1297 This function *must not* call protect_from_queue
1298 except when putting X into an insn (in which case convert_move does it). */
1301 convert_modes (mode, oldmode, x, unsignedp)
1302 enum machine_mode mode, oldmode;
1303 rtx x;
1304 int unsignedp;
1306 register rtx temp;
1308 /* If FROM is a SUBREG that indicates that we have already done at least
1309 the required extension, strip it. */
1311 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1312 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1313 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1314 x = gen_lowpart (mode, x);
1316 if (GET_MODE (x) != VOIDmode)
1317 oldmode = GET_MODE (x);
1319 if (mode == oldmode)
1320 return x;
1322 /* There is one case that we must handle specially: If we are converting
1323 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1324 we are to interpret the constant as unsigned, gen_lowpart will do
1325 the wrong if the constant appears negative. What we want to do is
1326 make the high-order word of the constant zero, not all ones. */
1328 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1329 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1330 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1332 HOST_WIDE_INT val = INTVAL (x);
1334 if (oldmode != VOIDmode
1335 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1337 int width = GET_MODE_BITSIZE (oldmode);
1339 /* We need to zero extend VAL. */
1340 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1343 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1346 /* We can do this with a gen_lowpart if both desired and current modes
1347 are integer, and this is either a constant integer, a register, or a
1348 non-volatile MEM. Except for the constant case where MODE is no
1349 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1351 if ((GET_CODE (x) == CONST_INT
1352 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1353 || (GET_MODE_CLASS (mode) == MODE_INT
1354 && GET_MODE_CLASS (oldmode) == MODE_INT
1355 && (GET_CODE (x) == CONST_DOUBLE
1356 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1357 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1358 && direct_load[(int) mode])
1359 || (GET_CODE (x) == REG
1360 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1361 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1363 /* ?? If we don't know OLDMODE, we have to assume here that
1364 X does not need sign- or zero-extension. This may not be
1365 the case, but it's the best we can do. */
1366 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1367 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1369 HOST_WIDE_INT val = INTVAL (x);
1370 int width = GET_MODE_BITSIZE (oldmode);
1372 /* We must sign or zero-extend in this case. Start by
1373 zero-extending, then sign extend if we need to. */
1374 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1375 if (! unsignedp
1376 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1377 val |= (HOST_WIDE_INT) (-1) << width;
1379 return GEN_INT (val);
1382 return gen_lowpart (mode, x);
1385 temp = gen_reg_rtx (mode);
1386 convert_move (temp, x, unsignedp);
1387 return temp;
1390 /* This macro is used to determine what the largest unit size that
1391 move_by_pieces can use is. */
1393 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1394 move efficiently, as opposed to MOVE_MAX which is the maximum
1395 number of bytes we can move with a single instruction. */
1397 #ifndef MOVE_MAX_PIECES
1398 #define MOVE_MAX_PIECES MOVE_MAX
1399 #endif
1401 /* Generate several move instructions to copy LEN bytes
1402 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1403 The caller must pass FROM and TO
1404 through protect_from_queue before calling.
1405 ALIGN is maximum alignment we can assume. */
1407 void
1408 move_by_pieces (to, from, len, align)
1409 rtx to, from;
1410 unsigned HOST_WIDE_INT len;
1411 unsigned int align;
1413 struct move_by_pieces data;
1414 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1415 unsigned int max_size = MOVE_MAX_PIECES + 1;
1416 enum machine_mode mode = VOIDmode, tmode;
1417 enum insn_code icode;
1419 data.offset = 0;
1420 data.to_addr = to_addr;
1421 data.from_addr = from_addr;
1422 data.to = to;
1423 data.from = from;
1424 data.autinc_to
1425 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1426 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1427 data.autinc_from
1428 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1429 || GET_CODE (from_addr) == POST_INC
1430 || GET_CODE (from_addr) == POST_DEC);
1432 data.explicit_inc_from = 0;
1433 data.explicit_inc_to = 0;
1434 data.reverse
1435 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1436 if (data.reverse) data.offset = len;
1437 data.len = len;
1439 /* If copying requires more than two move insns,
1440 copy addresses to registers (to make displacements shorter)
1441 and use post-increment if available. */
1442 if (!(data.autinc_from && data.autinc_to)
1443 && move_by_pieces_ninsns (len, align) > 2)
1445 /* Find the mode of the largest move... */
1446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1448 if (GET_MODE_SIZE (tmode) < max_size)
1449 mode = tmode;
1451 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = -1;
1457 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1459 data.from_addr = copy_addr_to_reg (from_addr);
1460 data.autinc_from = 1;
1461 data.explicit_inc_from = 1;
1463 if (!data.autinc_from && CONSTANT_P (from_addr))
1464 data.from_addr = copy_addr_to_reg (from_addr);
1465 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = -1;
1471 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1473 data.to_addr = copy_addr_to_reg (to_addr);
1474 data.autinc_to = 1;
1475 data.explicit_inc_to = 1;
1477 if (!data.autinc_to && CONSTANT_P (to_addr))
1478 data.to_addr = copy_addr_to_reg (to_addr);
1481 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1482 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1483 align = MOVE_MAX * BITS_PER_UNIT;
1485 /* First move what we can in the largest integer mode, then go to
1486 successively smaller modes. */
1488 while (max_size > 1)
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1493 mode = tmode;
1495 if (mode == VOIDmode)
1496 break;
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1500 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1502 max_size = GET_MODE_SIZE (mode);
1505 /* The code above should have handled everything. */
1506 if (data.len > 0)
1507 abort ();
1510 /* Return number of insns required to move L bytes by pieces.
1511 ALIGN (in bytes) is maximum alignment we can assume. */
1513 static unsigned HOST_WIDE_INT
1514 move_by_pieces_ninsns (l, align)
1515 unsigned HOST_WIDE_INT l;
1516 unsigned int align;
1518 unsigned HOST_WIDE_INT n_insns = 0;
1519 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1521 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1522 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1523 align = MOVE_MAX * BITS_PER_UNIT;
1525 while (max_size > 1)
1527 enum machine_mode mode = VOIDmode, tmode;
1528 enum insn_code icode;
1530 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1531 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1532 if (GET_MODE_SIZE (tmode) < max_size)
1533 mode = tmode;
1535 if (mode == VOIDmode)
1536 break;
1538 icode = mov_optab->handlers[(int) mode].insn_code;
1539 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1540 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1542 max_size = GET_MODE_SIZE (mode);
1545 if (l)
1546 abort ();
1547 return n_insns;
1550 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1551 with move instructions for mode MODE. GENFUN is the gen_... function
1552 to make a move insn for that mode. DATA has all the other info. */
1554 static void
1555 move_by_pieces_1 (genfun, mode, data)
1556 rtx (*genfun) PARAMS ((rtx, ...));
1557 enum machine_mode mode;
1558 struct move_by_pieces *data;
1560 unsigned int size = GET_MODE_SIZE (mode);
1561 rtx to1, from1;
1563 while (data->len >= size)
1565 if (data->reverse)
1566 data->offset -= size;
1568 if (data->autinc_to)
1570 to1 = gen_rtx_MEM (mode, data->to_addr);
1571 MEM_COPY_ATTRIBUTES (to1, data->to);
1573 else
1574 to1 = change_address (data->to, mode,
1575 plus_constant (data->to_addr, data->offset));
1577 if (data->autinc_from)
1579 from1 = gen_rtx_MEM (mode, data->from_addr);
1580 MEM_COPY_ATTRIBUTES (from1, data->from);
1582 else
1583 from1 = change_address (data->from, mode,
1584 plus_constant (data->from_addr, data->offset));
1586 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1587 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1588 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1589 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1591 emit_insn ((*genfun) (to1, from1));
1593 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1594 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1596 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1598 if (! data->reverse)
1599 data->offset += size;
1601 data->len -= size;
1605 /* Emit code to move a block Y to a block X.
1606 This may be done with string-move instructions,
1607 with multiple scalar move instructions, or with a library call.
1609 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1610 with mode BLKmode.
1611 SIZE is an rtx that says how long they are.
1612 ALIGN is the maximum alignment we can assume they have.
1614 Return the address of the new block, if memcpy is called and returns it,
1615 0 otherwise. */
1618 emit_block_move (x, y, size, align)
1619 rtx x, y;
1620 rtx size;
1621 unsigned int align;
1623 rtx retval = 0;
1624 #ifdef TARGET_MEM_FUNCTIONS
1625 static tree fn;
1626 tree call_expr, arg_list;
1627 #endif
1629 if (GET_MODE (x) != BLKmode)
1630 abort ();
1632 if (GET_MODE (y) != BLKmode)
1633 abort ();
1635 x = protect_from_queue (x, 1);
1636 y = protect_from_queue (y, 0);
1637 size = protect_from_queue (size, 0);
1639 if (GET_CODE (x) != MEM)
1640 abort ();
1641 if (GET_CODE (y) != MEM)
1642 abort ();
1643 if (size == 0)
1644 abort ();
1646 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1647 move_by_pieces (x, y, INTVAL (size), align);
1648 else
1650 /* Try the most limited insn first, because there's no point
1651 including more than one in the machine description unless
1652 the more limited one has some advantage. */
1654 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1655 enum machine_mode mode;
1657 /* Since this is a move insn, we don't care about volatility. */
1658 volatile_ok = 1;
1660 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1661 mode = GET_MODE_WIDER_MODE (mode))
1663 enum insn_code code = movstr_optab[(int) mode];
1664 insn_operand_predicate_fn pred;
1666 if (code != CODE_FOR_nothing
1667 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1668 here because if SIZE is less than the mode mask, as it is
1669 returned by the macro, it will definitely be less than the
1670 actual mode mask. */
1671 && ((GET_CODE (size) == CONST_INT
1672 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1673 <= (GET_MODE_MASK (mode) >> 1)))
1674 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1675 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1676 || (*pred) (x, BLKmode))
1677 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1678 || (*pred) (y, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1680 || (*pred) (opalign, VOIDmode)))
1682 rtx op2;
1683 rtx last = get_last_insn ();
1684 rtx pat;
1686 op2 = convert_to_mode (mode, size, 1);
1687 pred = insn_data[(int) code].operand[2].predicate;
1688 if (pred != 0 && ! (*pred) (op2, mode))
1689 op2 = copy_to_mode_reg (mode, op2);
1691 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1692 if (pat)
1694 emit_insn (pat);
1695 volatile_ok = 0;
1696 return 0;
1698 else
1699 delete_insns_since (last);
1703 volatile_ok = 0;
1705 /* X, Y, or SIZE may have been passed through protect_from_queue.
1707 It is unsafe to save the value generated by protect_from_queue
1708 and reuse it later. Consider what happens if emit_queue is
1709 called before the return value from protect_from_queue is used.
1711 Expansion of the CALL_EXPR below will call emit_queue before
1712 we are finished emitting RTL for argument setup. So if we are
1713 not careful we could get the wrong value for an argument.
1715 To avoid this problem we go ahead and emit code to copy X, Y &
1716 SIZE into new pseudos. We can then place those new pseudos
1717 into an RTL_EXPR and use them later, even after a call to
1718 emit_queue.
1720 Note this is not strictly needed for library calls since they
1721 do not call emit_queue before loading their arguments. However,
1722 we may need to have library calls call emit_queue in the future
1723 since failing to do so could cause problems for targets which
1724 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1725 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1726 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1728 #ifdef TARGET_MEM_FUNCTIONS
1729 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1730 #else
1731 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1732 TREE_UNSIGNED (integer_type_node));
1733 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1734 #endif
1736 #ifdef TARGET_MEM_FUNCTIONS
1737 /* It is incorrect to use the libcall calling conventions to call
1738 memcpy in this context.
1740 This could be a user call to memcpy and the user may wish to
1741 examine the return value from memcpy.
1743 For targets where libcalls and normal calls have different conventions
1744 for returning pointers, we could end up generating incorrect code.
1746 So instead of using a libcall sequence we build up a suitable
1747 CALL_EXPR and expand the call in the normal fashion. */
1748 if (fn == NULL_TREE)
1750 tree fntype;
1752 /* This was copied from except.c, I don't know if all this is
1753 necessary in this context or not. */
1754 fn = get_identifier ("memcpy");
1755 fntype = build_pointer_type (void_type_node);
1756 fntype = build_function_type (fntype, NULL_TREE);
1757 fn = build_decl (FUNCTION_DECL, fn, fntype);
1758 ggc_add_tree_root (&fn, 1);
1759 DECL_EXTERNAL (fn) = 1;
1760 TREE_PUBLIC (fn) = 1;
1761 DECL_ARTIFICIAL (fn) = 1;
1762 make_decl_rtl (fn, NULL_PTR, 1);
1763 assemble_external (fn);
1766 /* We need to make an argument list for the function call.
1768 memcpy has three arguments, the first two are void * addresses and
1769 the last is a size_t byte count for the copy. */
1770 arg_list
1771 = build_tree_list (NULL_TREE,
1772 make_tree (build_pointer_type (void_type_node), x));
1773 TREE_CHAIN (arg_list)
1774 = build_tree_list (NULL_TREE,
1775 make_tree (build_pointer_type (void_type_node), y));
1776 TREE_CHAIN (TREE_CHAIN (arg_list))
1777 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1778 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1780 /* Now we have to build up the CALL_EXPR itself. */
1781 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1782 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1783 call_expr, arg_list, NULL_TREE);
1784 TREE_SIDE_EFFECTS (call_expr) = 1;
1786 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1787 #else
1788 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1789 VOIDmode, 3, y, Pmode, x, Pmode,
1790 convert_to_mode (TYPE_MODE (integer_type_node), size,
1791 TREE_UNSIGNED (integer_type_node)),
1792 TYPE_MODE (integer_type_node));
1793 #endif
1796 return retval;
1799 /* Copy all or part of a value X into registers starting at REGNO.
1800 The number of registers to be filled is NREGS. */
1802 void
1803 move_block_to_reg (regno, x, nregs, mode)
1804 int regno;
1805 rtx x;
1806 int nregs;
1807 enum machine_mode mode;
1809 int i;
1810 #ifdef HAVE_load_multiple
1811 rtx pat;
1812 rtx last;
1813 #endif
1815 if (nregs == 0)
1816 return;
1818 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1819 x = validize_mem (force_const_mem (mode, x));
1821 /* See if the machine can do this with a load multiple insn. */
1822 #ifdef HAVE_load_multiple
1823 if (HAVE_load_multiple)
1825 last = get_last_insn ();
1826 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1827 GEN_INT (nregs));
1828 if (pat)
1830 emit_insn (pat);
1831 return;
1833 else
1834 delete_insns_since (last);
1836 #endif
1838 for (i = 0; i < nregs; i++)
1839 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1840 operand_subword_force (x, i, mode));
1843 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1844 The number of registers to be filled is NREGS. SIZE indicates the number
1845 of bytes in the object X. */
1847 void
1848 move_block_from_reg (regno, x, nregs, size)
1849 int regno;
1850 rtx x;
1851 int nregs;
1852 int size;
1854 int i;
1855 #ifdef HAVE_store_multiple
1856 rtx pat;
1857 rtx last;
1858 #endif
1859 enum machine_mode mode;
1861 /* If SIZE is that of a mode no bigger than a word, just use that
1862 mode's store operation. */
1863 if (size <= UNITS_PER_WORD
1864 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1866 emit_move_insn (change_address (x, mode, NULL),
1867 gen_rtx_REG (mode, regno));
1868 return;
1871 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1872 to the left before storing to memory. Note that the previous test
1873 doesn't handle all cases (e.g. SIZE == 3). */
1874 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1876 rtx tem = operand_subword (x, 0, 1, BLKmode);
1877 rtx shift;
1879 if (tem == 0)
1880 abort ();
1882 shift = expand_shift (LSHIFT_EXPR, word_mode,
1883 gen_rtx_REG (word_mode, regno),
1884 build_int_2 ((UNITS_PER_WORD - size)
1885 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1886 emit_move_insn (tem, shift);
1887 return;
1890 /* See if the machine can do this with a store multiple insn. */
1891 #ifdef HAVE_store_multiple
1892 if (HAVE_store_multiple)
1894 last = get_last_insn ();
1895 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1896 GEN_INT (nregs));
1897 if (pat)
1899 emit_insn (pat);
1900 return;
1902 else
1903 delete_insns_since (last);
1905 #endif
1907 for (i = 0; i < nregs; i++)
1909 rtx tem = operand_subword (x, i, 1, BLKmode);
1911 if (tem == 0)
1912 abort ();
1914 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1918 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1919 registers represented by a PARALLEL. SSIZE represents the total size of
1920 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1921 SRC in bits. */
1922 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1923 the balance will be in what would be the low-order memory addresses, i.e.
1924 left justified for big endian, right justified for little endian. This
1925 happens to be true for the targets currently using this support. If this
1926 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1927 would be needed. */
1929 void
1930 emit_group_load (dst, orig_src, ssize, align)
1931 rtx dst, orig_src;
1932 unsigned int align;
1933 int ssize;
1935 rtx *tmps, src;
1936 int start, i;
1938 if (GET_CODE (dst) != PARALLEL)
1939 abort ();
1941 /* Check for a NULL entry, used to indicate that the parameter goes
1942 both on the stack and in registers. */
1943 if (XEXP (XVECEXP (dst, 0, 0), 0))
1944 start = 0;
1945 else
1946 start = 1;
1948 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1950 /* If we won't be loading directly from memory, protect the real source
1951 from strange tricks we might play. */
1952 src = orig_src;
1953 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1955 if (GET_MODE (src) == VOIDmode)
1956 src = gen_reg_rtx (GET_MODE (dst));
1957 else
1958 src = gen_reg_rtx (GET_MODE (orig_src));
1959 emit_move_insn (src, orig_src);
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1968 int shift = 0;
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + bytelen > ssize)
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1975 if (bytelen <= 0)
1976 abort ();
1979 /* Optimize the access just a bit. */
1980 if (GET_CODE (src) == MEM
1981 && align >= GET_MODE_ALIGNMENT (mode)
1982 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1983 && bytelen == GET_MODE_SIZE (mode))
1985 tmps[i] = gen_reg_rtx (mode);
1986 emit_move_insn (tmps[i],
1987 change_address (src, mode,
1988 plus_constant (XEXP (src, 0),
1989 bytepos)));
1991 else if (GET_CODE (src) == CONCAT)
1993 if (bytepos == 0
1994 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1995 tmps[i] = XEXP (src, 0);
1996 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1997 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1998 tmps[i] = XEXP (src, 1);
1999 else
2000 abort ();
2002 else if ((CONSTANT_P (src)
2003 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2004 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2005 tmps[i] = src;
2006 else
2007 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2008 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2009 mode, mode, align, ssize);
2011 if (BYTES_BIG_ENDIAN && shift)
2012 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2013 tmps[i], 0, OPTAB_WIDEN);
2016 emit_queue ();
2018 /* Copy the extracted pieces into the proper (probable) hard regs. */
2019 for (i = start; i < XVECLEN (dst, 0); i++)
2020 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2023 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2024 registers represented by a PARALLEL. SSIZE represents the total size of
2025 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2027 void
2028 emit_group_store (orig_dst, src, ssize, align)
2029 rtx orig_dst, src;
2030 int ssize;
2031 unsigned int align;
2033 rtx *tmps, dst;
2034 int start, i;
2036 if (GET_CODE (src) != PARALLEL)
2037 abort ();
2039 /* Check for a NULL entry, used to indicate that the parameter goes
2040 both on the stack and in registers. */
2041 if (XEXP (XVECEXP (src, 0, 0), 0))
2042 start = 0;
2043 else
2044 start = 1;
2046 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2048 /* Copy the (probable) hard regs into pseudos. */
2049 for (i = start; i < XVECLEN (src, 0); i++)
2051 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2052 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2053 emit_move_insn (tmps[i], reg);
2055 emit_queue ();
2057 /* If we won't be storing directly into memory, protect the real destination
2058 from strange tricks we might play. */
2059 dst = orig_dst;
2060 if (GET_CODE (dst) == PARALLEL)
2062 rtx temp;
2064 /* We can get a PARALLEL dst if there is a conditional expression in
2065 a return statement. In that case, the dst and src are the same,
2066 so no action is necessary. */
2067 if (rtx_equal_p (dst, src))
2068 return;
2070 /* It is unclear if we can ever reach here, but we may as well handle
2071 it. Allocate a temporary, and split this into a store/load to/from
2072 the temporary. */
2074 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2075 emit_group_store (temp, src, ssize, align);
2076 emit_group_load (dst, temp, ssize, align);
2077 return;
2079 else if (GET_CODE (dst) != MEM)
2081 dst = gen_reg_rtx (GET_MODE (orig_dst));
2082 /* Make life a bit easier for combine. */
2083 emit_move_insn (dst, const0_rtx);
2086 /* Process the pieces. */
2087 for (i = start; i < XVECLEN (src, 0); i++)
2089 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2090 enum machine_mode mode = GET_MODE (tmps[i]);
2091 unsigned int bytelen = GET_MODE_SIZE (mode);
2093 /* Handle trailing fragments that run over the size of the struct. */
2094 if (ssize >= 0 && bytepos + bytelen > ssize)
2096 if (BYTES_BIG_ENDIAN)
2098 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2099 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2100 tmps[i], 0, OPTAB_WIDEN);
2102 bytelen = ssize - bytepos;
2105 /* Optimize the access just a bit. */
2106 if (GET_CODE (dst) == MEM
2107 && align >= GET_MODE_ALIGNMENT (mode)
2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2109 && bytelen == GET_MODE_SIZE (mode))
2110 emit_move_insn (change_address (dst, mode,
2111 plus_constant (XEXP (dst, 0),
2112 bytepos)),
2113 tmps[i]);
2114 else
2115 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2116 mode, tmps[i], align, ssize);
2119 emit_queue ();
2121 /* Copy from the pseudo into the (probable) hard reg. */
2122 if (GET_CODE (dst) == REG)
2123 emit_move_insn (orig_dst, dst);
2126 /* Generate code to copy a BLKmode object of TYPE out of a
2127 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2128 is null, a stack temporary is created. TGTBLK is returned.
2130 The primary purpose of this routine is to handle functions
2131 that return BLKmode structures in registers. Some machines
2132 (the PA for example) want to return all small structures
2133 in registers regardless of the structure's alignment. */
2136 copy_blkmode_from_reg (tgtblk, srcreg, type)
2137 rtx tgtblk;
2138 rtx srcreg;
2139 tree type;
2141 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2142 rtx src = NULL, dst = NULL;
2143 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2144 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2146 if (tgtblk == 0)
2148 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2149 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2150 preserve_temp_slots (tgtblk);
2153 /* This code assumes srcreg is at least a full word. If it isn't,
2154 copy it into a new pseudo which is a full word. */
2155 if (GET_MODE (srcreg) != BLKmode
2156 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2157 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2159 /* Structures whose size is not a multiple of a word are aligned
2160 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2161 machine, this means we must skip the empty high order bytes when
2162 calculating the bit offset. */
2163 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2164 big_endian_correction
2165 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2167 /* Copy the structure BITSIZE bites at a time.
2169 We could probably emit more efficient code for machines which do not use
2170 strict alignment, but it doesn't seem worth the effort at the current
2171 time. */
2172 for (bitpos = 0, xbitpos = big_endian_correction;
2173 bitpos < bytes * BITS_PER_UNIT;
2174 bitpos += bitsize, xbitpos += bitsize)
2176 /* We need a new source operand each time xbitpos is on a
2177 word boundary and when xbitpos == big_endian_correction
2178 (the first time through). */
2179 if (xbitpos % BITS_PER_WORD == 0
2180 || xbitpos == big_endian_correction)
2181 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2183 /* We need a new destination operand each time bitpos is on
2184 a word boundary. */
2185 if (bitpos % BITS_PER_WORD == 0)
2186 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2188 /* Use xbitpos for the source extraction (right justified) and
2189 xbitpos for the destination store (left justified). */
2190 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2191 extract_bit_field (src, bitsize,
2192 xbitpos % BITS_PER_WORD, 1,
2193 NULL_RTX, word_mode, word_mode,
2194 bitsize, BITS_PER_WORD),
2195 bitsize, BITS_PER_WORD);
2198 return tgtblk;
2201 /* Add a USE expression for REG to the (possibly empty) list pointed
2202 to by CALL_FUSAGE. REG must denote a hard register. */
2204 void
2205 use_reg (call_fusage, reg)
2206 rtx *call_fusage, reg;
2208 if (GET_CODE (reg) != REG
2209 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2210 abort ();
2212 *call_fusage
2213 = gen_rtx_EXPR_LIST (VOIDmode,
2214 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2217 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2218 starting at REGNO. All of these registers must be hard registers. */
2220 void
2221 use_regs (call_fusage, regno, nregs)
2222 rtx *call_fusage;
2223 int regno;
2224 int nregs;
2226 int i;
2228 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2229 abort ();
2231 for (i = 0; i < nregs; i++)
2232 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2239 void
2240 use_group_regs (call_fusage, regs)
2241 rtx *call_fusage;
2242 rtx regs;
2244 int i;
2246 for (i = 0; i < XVECLEN (regs, 0); i++)
2248 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2250 /* A NULL entry means the parameter goes both on the stack and in
2251 registers. This can also be a MEM for targets that pass values
2252 partially on the stack and partially in registers. */
2253 if (reg != 0 && GET_CODE (reg) == REG)
2254 use_reg (call_fusage, reg);
2260 can_store_by_pieces (len, constfun, constfundata, align)
2261 unsigned HOST_WIDE_INT len;
2262 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2263 PTR constfundata;
2264 unsigned int align;
2266 unsigned HOST_WIDE_INT max_size, l;
2267 HOST_WIDE_INT offset = 0;
2268 enum machine_mode mode, tmode;
2269 enum insn_code icode;
2270 int reverse;
2271 rtx cst;
2273 if (! MOVE_BY_PIECES_P (len, align))
2274 return 0;
2276 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2277 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2278 align = MOVE_MAX * BITS_PER_UNIT;
2280 /* We would first store what we can in the largest integer mode, then go to
2281 successively smaller modes. */
2283 for (reverse = 0;
2284 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2285 reverse++)
2287 l = len;
2288 mode = VOIDmode;
2289 max_size = MOVE_MAX_PIECES + 1;
2290 while (max_size > 1)
2292 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2293 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2294 if (GET_MODE_SIZE (tmode) < max_size)
2295 mode = tmode;
2297 if (mode == VOIDmode)
2298 break;
2300 icode = mov_optab->handlers[(int) mode].insn_code;
2301 if (icode != CODE_FOR_nothing
2302 && align >= GET_MODE_ALIGNMENT (mode))
2304 unsigned int size = GET_MODE_SIZE (mode);
2306 while (l >= size)
2308 if (reverse)
2309 offset -= size;
2311 cst = (*constfun) (constfundata, offset, mode);
2312 if (!LEGITIMATE_CONSTANT_P (cst))
2313 return 0;
2315 if (!reverse)
2316 offset += size;
2318 l -= size;
2322 max_size = GET_MODE_SIZE (mode);
2325 /* The code above should have handled everything. */
2326 if (l != 0)
2327 abort ();
2330 return 1;
2333 /* Generate several move instructions to store LEN bytes generated by
2334 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2335 pointer which will be passed as argument in every CONSTFUN call.
2336 ALIGN is maximum alignment we can assume. */
2338 void
2339 store_by_pieces (to, len, constfun, constfundata, align)
2340 rtx to;
2341 unsigned HOST_WIDE_INT len;
2342 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2343 PTR constfundata;
2344 unsigned int align;
2346 struct store_by_pieces data;
2348 if (! MOVE_BY_PIECES_P (len, align))
2349 abort ();
2350 to = protect_from_queue (to, 1);
2351 data.constfun = constfun;
2352 data.constfundata = constfundata;
2353 data.len = len;
2354 data.to = to;
2355 store_by_pieces_1 (&data, align);
2358 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2359 rtx with BLKmode). The caller must pass TO through protect_from_queue
2360 before calling. ALIGN is maximum alignment we can assume. */
2362 static void
2363 clear_by_pieces (to, len, align)
2364 rtx to;
2365 unsigned HOST_WIDE_INT len;
2366 unsigned int align;
2368 struct store_by_pieces data;
2370 data.constfun = clear_by_pieces_1;
2371 data.constfundata = NULL_PTR;
2372 data.len = len;
2373 data.to = to;
2374 store_by_pieces_1 (&data, align);
2377 /* Callback routine for clear_by_pieces.
2378 Return const0_rtx unconditionally. */
2380 static rtx
2381 clear_by_pieces_1 (data, offset, mode)
2382 PTR data ATTRIBUTE_UNUSED;
2383 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2384 enum machine_mode mode ATTRIBUTE_UNUSED;
2386 return const0_rtx;
2389 /* Subroutine of clear_by_pieces and store_by_pieces.
2390 Generate several move instructions to store LEN bytes of block TO. (A MEM
2391 rtx with BLKmode). The caller must pass TO through protect_from_queue
2392 before calling. ALIGN is maximum alignment we can assume. */
2394 static void
2395 store_by_pieces_1 (data, align)
2396 struct store_by_pieces *data;
2397 unsigned int align;
2399 rtx to_addr = XEXP (data->to, 0);
2400 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2401 enum machine_mode mode = VOIDmode, tmode;
2402 enum insn_code icode;
2404 data->offset = 0;
2405 data->to_addr = to_addr;
2406 data->autinc_to
2407 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2408 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2410 data->explicit_inc_to = 0;
2411 data->reverse
2412 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2413 if (data->reverse)
2414 data->offset = data->len;
2416 /* If storing requires more than two move insns,
2417 copy addresses to registers (to make displacements shorter)
2418 and use post-increment if available. */
2419 if (!data->autinc_to
2420 && move_by_pieces_ninsns (data->len, align) > 2)
2422 /* Determine the main mode we'll be using. */
2423 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2424 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2425 if (GET_MODE_SIZE (tmode) < max_size)
2426 mode = tmode;
2428 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2430 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2431 data->autinc_to = 1;
2432 data->explicit_inc_to = -1;
2435 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2436 && ! data->autinc_to)
2438 data->to_addr = copy_addr_to_reg (to_addr);
2439 data->autinc_to = 1;
2440 data->explicit_inc_to = 1;
2443 if ( !data->autinc_to && CONSTANT_P (to_addr))
2444 data->to_addr = copy_addr_to_reg (to_addr);
2447 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2448 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2449 align = MOVE_MAX * BITS_PER_UNIT;
2451 /* First store what we can in the largest integer mode, then go to
2452 successively smaller modes. */
2454 while (max_size > 1)
2456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2458 if (GET_MODE_SIZE (tmode) < max_size)
2459 mode = tmode;
2461 if (mode == VOIDmode)
2462 break;
2464 icode = mov_optab->handlers[(int) mode].insn_code;
2465 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2466 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2468 max_size = GET_MODE_SIZE (mode);
2471 /* The code above should have handled everything. */
2472 if (data->len != 0)
2473 abort ();
2476 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2477 with move instructions for mode MODE. GENFUN is the gen_... function
2478 to make a move insn for that mode. DATA has all the other info. */
2480 static void
2481 store_by_pieces_2 (genfun, mode, data)
2482 rtx (*genfun) PARAMS ((rtx, ...));
2483 enum machine_mode mode;
2484 struct store_by_pieces *data;
2486 unsigned int size = GET_MODE_SIZE (mode);
2487 rtx to1, cst;
2489 while (data->len >= size)
2491 if (data->reverse)
2492 data->offset -= size;
2494 if (data->autinc_to)
2496 to1 = gen_rtx_MEM (mode, data->to_addr);
2497 MEM_COPY_ATTRIBUTES (to1, data->to);
2499 else
2500 to1 = change_address (data->to, mode,
2501 plus_constant (data->to_addr, data->offset));
2503 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2504 emit_insn (gen_add2_insn (data->to_addr,
2505 GEN_INT (-(HOST_WIDE_INT) size)));
2507 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2508 emit_insn ((*genfun) (to1, cst));
2510 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2511 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2513 if (! data->reverse)
2514 data->offset += size;
2516 data->len -= size;
2520 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2521 its length in bytes and ALIGN is the maximum alignment we can is has.
2523 If we call a function that returns the length of the block, return it. */
2526 clear_storage (object, size, align)
2527 rtx object;
2528 rtx size;
2529 unsigned int align;
2531 #ifdef TARGET_MEM_FUNCTIONS
2532 static tree fn;
2533 tree call_expr, arg_list;
2534 #endif
2535 rtx retval = 0;
2537 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2538 just move a zero. Otherwise, do this a piece at a time. */
2539 if (GET_MODE (object) != BLKmode
2540 && GET_CODE (size) == CONST_INT
2541 && GET_MODE_SIZE (GET_MODE (object)) == INTVAL (size))
2542 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2543 else
2545 object = protect_from_queue (object, 1);
2546 size = protect_from_queue (size, 0);
2548 if (GET_CODE (size) == CONST_INT
2549 && MOVE_BY_PIECES_P (INTVAL (size), align))
2550 clear_by_pieces (object, INTVAL (size), align);
2551 else
2553 /* Try the most limited insn first, because there's no point
2554 including more than one in the machine description unless
2555 the more limited one has some advantage. */
2557 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2558 enum machine_mode mode;
2560 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2561 mode = GET_MODE_WIDER_MODE (mode))
2563 enum insn_code code = clrstr_optab[(int) mode];
2564 insn_operand_predicate_fn pred;
2566 if (code != CODE_FOR_nothing
2567 /* We don't need MODE to be narrower than
2568 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2569 the mode mask, as it is returned by the macro, it will
2570 definitely be less than the actual mode mask. */
2571 && ((GET_CODE (size) == CONST_INT
2572 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2573 <= (GET_MODE_MASK (mode) >> 1)))
2574 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2575 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2576 || (*pred) (object, BLKmode))
2577 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2578 || (*pred) (opalign, VOIDmode)))
2580 rtx op1;
2581 rtx last = get_last_insn ();
2582 rtx pat;
2584 op1 = convert_to_mode (mode, size, 1);
2585 pred = insn_data[(int) code].operand[1].predicate;
2586 if (pred != 0 && ! (*pred) (op1, mode))
2587 op1 = copy_to_mode_reg (mode, op1);
2589 pat = GEN_FCN ((int) code) (object, op1, opalign);
2590 if (pat)
2592 emit_insn (pat);
2593 return 0;
2595 else
2596 delete_insns_since (last);
2600 /* OBJECT or SIZE may have been passed through protect_from_queue.
2602 It is unsafe to save the value generated by protect_from_queue
2603 and reuse it later. Consider what happens if emit_queue is
2604 called before the return value from protect_from_queue is used.
2606 Expansion of the CALL_EXPR below will call emit_queue before
2607 we are finished emitting RTL for argument setup. So if we are
2608 not careful we could get the wrong value for an argument.
2610 To avoid this problem we go ahead and emit code to copy OBJECT
2611 and SIZE into new pseudos. We can then place those new pseudos
2612 into an RTL_EXPR and use them later, even after a call to
2613 emit_queue.
2615 Note this is not strictly needed for library calls since they
2616 do not call emit_queue before loading their arguments. However,
2617 we may need to have library calls call emit_queue in the future
2618 since failing to do so could cause problems for targets which
2619 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2620 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2622 #ifdef TARGET_MEM_FUNCTIONS
2623 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2624 #else
2625 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2626 TREE_UNSIGNED (integer_type_node));
2627 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2628 #endif
2630 #ifdef TARGET_MEM_FUNCTIONS
2631 /* It is incorrect to use the libcall calling conventions to call
2632 memset in this context.
2634 This could be a user call to memset and the user may wish to
2635 examine the return value from memset.
2637 For targets where libcalls and normal calls have different
2638 conventions for returning pointers, we could end up generating
2639 incorrect code.
2641 So instead of using a libcall sequence we build up a suitable
2642 CALL_EXPR and expand the call in the normal fashion. */
2643 if (fn == NULL_TREE)
2645 tree fntype;
2647 /* This was copied from except.c, I don't know if all this is
2648 necessary in this context or not. */
2649 fn = get_identifier ("memset");
2650 fntype = build_pointer_type (void_type_node);
2651 fntype = build_function_type (fntype, NULL_TREE);
2652 fn = build_decl (FUNCTION_DECL, fn, fntype);
2653 ggc_add_tree_root (&fn, 1);
2654 DECL_EXTERNAL (fn) = 1;
2655 TREE_PUBLIC (fn) = 1;
2656 DECL_ARTIFICIAL (fn) = 1;
2657 make_decl_rtl (fn, NULL_PTR, 1);
2658 assemble_external (fn);
2661 /* We need to make an argument list for the function call.
2663 memset has three arguments, the first is a void * addresses, the
2664 second a integer with the initialization value, the last is a
2665 size_t byte count for the copy. */
2666 arg_list
2667 = build_tree_list (NULL_TREE,
2668 make_tree (build_pointer_type (void_type_node),
2669 object));
2670 TREE_CHAIN (arg_list)
2671 = build_tree_list (NULL_TREE,
2672 make_tree (integer_type_node, const0_rtx));
2673 TREE_CHAIN (TREE_CHAIN (arg_list))
2674 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2675 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2677 /* Now we have to build up the CALL_EXPR itself. */
2678 call_expr = build1 (ADDR_EXPR,
2679 build_pointer_type (TREE_TYPE (fn)), fn);
2680 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2681 call_expr, arg_list, NULL_TREE);
2682 TREE_SIDE_EFFECTS (call_expr) = 1;
2684 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2685 #else
2686 emit_library_call (bzero_libfunc, LCT_NORMAL,
2687 VOIDmode, 2, object, Pmode, size,
2688 TYPE_MODE (integer_type_node));
2689 #endif
2693 return retval;
2696 /* Generate code to copy Y into X.
2697 Both Y and X must have the same mode, except that
2698 Y can be a constant with VOIDmode.
2699 This mode cannot be BLKmode; use emit_block_move for that.
2701 Return the last instruction emitted. */
2704 emit_move_insn (x, y)
2705 rtx x, y;
2707 enum machine_mode mode = GET_MODE (x);
2709 x = protect_from_queue (x, 1);
2710 y = protect_from_queue (y, 0);
2712 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2713 abort ();
2715 /* Never force constant_p_rtx to memory. */
2716 if (GET_CODE (y) == CONSTANT_P_RTX)
2718 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2719 y = force_const_mem (mode, y);
2721 /* If X or Y are memory references, verify that their addresses are valid
2722 for the machine. */
2723 if (GET_CODE (x) == MEM
2724 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2725 && ! push_operand (x, GET_MODE (x)))
2726 || (flag_force_addr
2727 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2728 x = change_address (x, VOIDmode, XEXP (x, 0));
2730 if (GET_CODE (y) == MEM
2731 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2732 || (flag_force_addr
2733 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2734 y = change_address (y, VOIDmode, XEXP (y, 0));
2736 if (mode == BLKmode)
2737 abort ();
2739 return emit_move_insn_1 (x, y);
2742 /* Low level part of emit_move_insn.
2743 Called just like emit_move_insn, but assumes X and Y
2744 are basically valid. */
2747 emit_move_insn_1 (x, y)
2748 rtx x, y;
2750 enum machine_mode mode = GET_MODE (x);
2751 enum machine_mode submode;
2752 enum mode_class class = GET_MODE_CLASS (mode);
2753 unsigned int i;
2755 if (mode >= MAX_MACHINE_MODE)
2756 abort ();
2758 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2759 return
2760 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2762 /* Expand complex moves by moving real part and imag part, if possible. */
2763 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2764 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2765 * BITS_PER_UNIT),
2766 (class == MODE_COMPLEX_INT
2767 ? MODE_INT : MODE_FLOAT),
2769 && (mov_optab->handlers[(int) submode].insn_code
2770 != CODE_FOR_nothing))
2772 /* Don't split destination if it is a stack push. */
2773 int stack = push_operand (x, GET_MODE (x));
2775 /* If this is a stack, push the highpart first, so it
2776 will be in the argument order.
2778 In that case, change_address is used only to convert
2779 the mode, not to change the address. */
2780 if (stack)
2782 /* Note that the real part always precedes the imag part in memory
2783 regardless of machine's endianness. */
2784 #ifdef STACK_GROWS_DOWNWARD
2785 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2786 (gen_rtx_MEM (submode, XEXP (x, 0)),
2787 gen_imagpart (submode, y)));
2788 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2789 (gen_rtx_MEM (submode, XEXP (x, 0)),
2790 gen_realpart (submode, y)));
2791 #else
2792 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2793 (gen_rtx_MEM (submode, XEXP (x, 0)),
2794 gen_realpart (submode, y)));
2795 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2796 (gen_rtx_MEM (submode, XEXP (x, 0)),
2797 gen_imagpart (submode, y)));
2798 #endif
2800 else
2802 rtx realpart_x, realpart_y;
2803 rtx imagpart_x, imagpart_y;
2805 /* If this is a complex value with each part being smaller than a
2806 word, the usual calling sequence will likely pack the pieces into
2807 a single register. Unfortunately, SUBREG of hard registers only
2808 deals in terms of words, so we have a problem converting input
2809 arguments to the CONCAT of two registers that is used elsewhere
2810 for complex values. If this is before reload, we can copy it into
2811 memory and reload. FIXME, we should see about using extract and
2812 insert on integer registers, but complex short and complex char
2813 variables should be rarely used. */
2814 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2815 && (reload_in_progress | reload_completed) == 0)
2817 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2818 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2820 if (packed_dest_p || packed_src_p)
2822 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2823 ? MODE_FLOAT : MODE_INT);
2825 enum machine_mode reg_mode =
2826 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2828 if (reg_mode != BLKmode)
2830 rtx mem = assign_stack_temp (reg_mode,
2831 GET_MODE_SIZE (mode), 0);
2833 rtx cmem = change_address (mem, mode, NULL_RTX);
2835 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2837 if (packed_dest_p)
2839 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2840 emit_move_insn_1 (cmem, y);
2841 return emit_move_insn_1 (sreg, mem);
2843 else
2845 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2846 emit_move_insn_1 (mem, sreg);
2847 return emit_move_insn_1 (x, cmem);
2853 realpart_x = gen_realpart (submode, x);
2854 realpart_y = gen_realpart (submode, y);
2855 imagpart_x = gen_imagpart (submode, x);
2856 imagpart_y = gen_imagpart (submode, y);
2858 /* Show the output dies here. This is necessary for SUBREGs
2859 of pseudos since we cannot track their lifetimes correctly;
2860 hard regs shouldn't appear here except as return values.
2861 We never want to emit such a clobber after reload. */
2862 if (x != y
2863 && ! (reload_in_progress || reload_completed)
2864 && (GET_CODE (realpart_x) == SUBREG
2865 || GET_CODE (imagpart_x) == SUBREG))
2867 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2870 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2871 (realpart_x, realpart_y));
2872 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2873 (imagpart_x, imagpart_y));
2876 return get_last_insn ();
2879 /* This will handle any multi-word mode that lacks a move_insn pattern.
2880 However, you will get better code if you define such patterns,
2881 even if they must turn into multiple assembler instructions. */
2882 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2884 rtx last_insn = 0;
2885 rtx seq, inner;
2886 int need_clobber;
2888 #ifdef PUSH_ROUNDING
2890 /* If X is a push on the stack, do the push now and replace
2891 X with a reference to the stack pointer. */
2892 if (push_operand (x, GET_MODE (x)))
2894 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2895 x = change_address (x, VOIDmode, stack_pointer_rtx);
2897 #endif
2899 /* If we are in reload, see if either operand is a MEM whose address
2900 is scheduled for replacement. */
2901 if (reload_in_progress && GET_CODE (x) == MEM
2902 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2904 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2906 MEM_COPY_ATTRIBUTES (new, x);
2907 x = new;
2909 if (reload_in_progress && GET_CODE (y) == MEM
2910 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2912 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2914 MEM_COPY_ATTRIBUTES (new, y);
2915 y = new;
2918 start_sequence ();
2920 need_clobber = 0;
2921 for (i = 0;
2922 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2923 i++)
2925 rtx xpart = operand_subword (x, i, 1, mode);
2926 rtx ypart = operand_subword (y, i, 1, mode);
2928 /* If we can't get a part of Y, put Y into memory if it is a
2929 constant. Otherwise, force it into a register. If we still
2930 can't get a part of Y, abort. */
2931 if (ypart == 0 && CONSTANT_P (y))
2933 y = force_const_mem (mode, y);
2934 ypart = operand_subword (y, i, 1, mode);
2936 else if (ypart == 0)
2937 ypart = operand_subword_force (y, i, mode);
2939 if (xpart == 0 || ypart == 0)
2940 abort ();
2942 need_clobber |= (GET_CODE (xpart) == SUBREG);
2944 last_insn = emit_move_insn (xpart, ypart);
2947 seq = gen_sequence ();
2948 end_sequence ();
2950 /* Show the output dies here. This is necessary for SUBREGs
2951 of pseudos since we cannot track their lifetimes correctly;
2952 hard regs shouldn't appear here except as return values.
2953 We never want to emit such a clobber after reload. */
2954 if (x != y
2955 && ! (reload_in_progress || reload_completed)
2956 && need_clobber != 0)
2958 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2961 emit_insn (seq);
2963 return last_insn;
2965 else
2966 abort ();
2969 /* Pushing data onto the stack. */
2971 /* Push a block of length SIZE (perhaps variable)
2972 and return an rtx to address the beginning of the block.
2973 Note that it is not possible for the value returned to be a QUEUED.
2974 The value may be virtual_outgoing_args_rtx.
2976 EXTRA is the number of bytes of padding to push in addition to SIZE.
2977 BELOW nonzero means this padding comes at low addresses;
2978 otherwise, the padding comes at high addresses. */
2981 push_block (size, extra, below)
2982 rtx size;
2983 int extra, below;
2985 register rtx temp;
2987 size = convert_modes (Pmode, ptr_mode, size, 1);
2988 if (CONSTANT_P (size))
2989 anti_adjust_stack (plus_constant (size, extra));
2990 else if (GET_CODE (size) == REG && extra == 0)
2991 anti_adjust_stack (size);
2992 else
2994 temp = copy_to_mode_reg (Pmode, size);
2995 if (extra != 0)
2996 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2997 temp, 0, OPTAB_LIB_WIDEN);
2998 anti_adjust_stack (temp);
3001 #ifndef STACK_GROWS_DOWNWARD
3002 #ifdef ARGS_GROW_DOWNWARD
3003 if (!ACCUMULATE_OUTGOING_ARGS)
3004 #else
3005 if (0)
3006 #endif
3007 #else
3008 if (1)
3009 #endif
3011 /* Return the lowest stack address when STACK or ARGS grow downward and
3012 we are not aaccumulating outgoing arguments (the c4x port uses such
3013 conventions). */
3014 temp = virtual_outgoing_args_rtx;
3015 if (extra != 0 && below)
3016 temp = plus_constant (temp, extra);
3018 else
3020 if (GET_CODE (size) == CONST_INT)
3021 temp = plus_constant (virtual_outgoing_args_rtx,
3022 -INTVAL (size) - (below ? 0 : extra));
3023 else if (extra != 0 && !below)
3024 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3025 negate_rtx (Pmode, plus_constant (size, extra)));
3026 else
3027 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3028 negate_rtx (Pmode, size));
3031 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3035 gen_push_operand ()
3037 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3040 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3041 block of SIZE bytes. */
3043 static rtx
3044 get_push_address (size)
3045 int size;
3047 register rtx temp;
3049 if (STACK_PUSH_CODE == POST_DEC)
3050 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3051 else if (STACK_PUSH_CODE == POST_INC)
3052 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3053 else
3054 temp = stack_pointer_rtx;
3056 return copy_to_reg (temp);
3059 /* Generate code to push X onto the stack, assuming it has mode MODE and
3060 type TYPE.
3061 MODE is redundant except when X is a CONST_INT (since they don't
3062 carry mode info).
3063 SIZE is an rtx for the size of data to be copied (in bytes),
3064 needed only if X is BLKmode.
3066 ALIGN is maximum alignment we can assume.
3068 If PARTIAL and REG are both nonzero, then copy that many of the first
3069 words of X into registers starting with REG, and push the rest of X.
3070 The amount of space pushed is decreased by PARTIAL words,
3071 rounded *down* to a multiple of PARM_BOUNDARY.
3072 REG must be a hard register in this case.
3073 If REG is zero but PARTIAL is not, take any all others actions for an
3074 argument partially in registers, but do not actually load any
3075 registers.
3077 EXTRA is the amount in bytes of extra space to leave next to this arg.
3078 This is ignored if an argument block has already been allocated.
3080 On a machine that lacks real push insns, ARGS_ADDR is the address of
3081 the bottom of the argument block for this call. We use indexing off there
3082 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3083 argument block has not been preallocated.
3085 ARGS_SO_FAR is the size of args previously pushed for this call.
3087 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3088 for arguments passed in registers. If nonzero, it will be the number
3089 of bytes required. */
3091 void
3092 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3093 args_addr, args_so_far, reg_parm_stack_space,
3094 alignment_pad)
3095 register rtx x;
3096 enum machine_mode mode;
3097 tree type;
3098 rtx size;
3099 unsigned int align;
3100 int partial;
3101 rtx reg;
3102 int extra;
3103 rtx args_addr;
3104 rtx args_so_far;
3105 int reg_parm_stack_space;
3106 rtx alignment_pad;
3108 rtx xinner;
3109 enum direction stack_direction
3110 #ifdef STACK_GROWS_DOWNWARD
3111 = downward;
3112 #else
3113 = upward;
3114 #endif
3116 /* Decide where to pad the argument: `downward' for below,
3117 `upward' for above, or `none' for don't pad it.
3118 Default is below for small data on big-endian machines; else above. */
3119 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3121 /* Invert direction if stack is post-update. */
3122 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3123 if (where_pad != none)
3124 where_pad = (where_pad == downward ? upward : downward);
3126 xinner = x = protect_from_queue (x, 0);
3128 if (mode == BLKmode)
3130 /* Copy a block into the stack, entirely or partially. */
3132 register rtx temp;
3133 int used = partial * UNITS_PER_WORD;
3134 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3135 int skip;
3137 if (size == 0)
3138 abort ();
3140 used -= offset;
3142 /* USED is now the # of bytes we need not copy to the stack
3143 because registers will take care of them. */
3145 if (partial != 0)
3146 xinner = change_address (xinner, BLKmode,
3147 plus_constant (XEXP (xinner, 0), used));
3149 /* If the partial register-part of the arg counts in its stack size,
3150 skip the part of stack space corresponding to the registers.
3151 Otherwise, start copying to the beginning of the stack space,
3152 by setting SKIP to 0. */
3153 skip = (reg_parm_stack_space == 0) ? 0 : used;
3155 #ifdef PUSH_ROUNDING
3156 /* Do it with several push insns if that doesn't take lots of insns
3157 and if there is no difficulty with push insns that skip bytes
3158 on the stack for alignment purposes. */
3159 if (args_addr == 0
3160 && PUSH_ARGS
3161 && GET_CODE (size) == CONST_INT
3162 && skip == 0
3163 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3164 /* Here we avoid the case of a structure whose weak alignment
3165 forces many pushes of a small amount of data,
3166 and such small pushes do rounding that causes trouble. */
3167 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3168 || align >= BIGGEST_ALIGNMENT
3169 || PUSH_ROUNDING (align) == align)
3170 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3172 /* Push padding now if padding above and stack grows down,
3173 or if padding below and stack grows up.
3174 But if space already allocated, this has already been done. */
3175 if (extra && args_addr == 0
3176 && where_pad != none && where_pad != stack_direction)
3177 anti_adjust_stack (GEN_INT (extra));
3179 stack_pointer_delta += INTVAL (size) - used;
3180 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3181 INTVAL (size) - used, align);
3183 if (current_function_check_memory_usage && ! in_check_memory_usage)
3185 rtx temp;
3187 in_check_memory_usage = 1;
3188 temp = get_push_address (INTVAL (size) - used);
3189 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3190 emit_library_call (chkr_copy_bitmap_libfunc,
3191 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3192 Pmode, XEXP (xinner, 0), Pmode,
3193 GEN_INT (INTVAL (size) - used),
3194 TYPE_MODE (sizetype));
3195 else
3196 emit_library_call (chkr_set_right_libfunc,
3197 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3198 Pmode, GEN_INT (INTVAL (size) - used),
3199 TYPE_MODE (sizetype),
3200 GEN_INT (MEMORY_USE_RW),
3201 TYPE_MODE (integer_type_node));
3202 in_check_memory_usage = 0;
3205 else
3206 #endif /* PUSH_ROUNDING */
3208 rtx target;
3210 /* Otherwise make space on the stack and copy the data
3211 to the address of that space. */
3213 /* Deduct words put into registers from the size we must copy. */
3214 if (partial != 0)
3216 if (GET_CODE (size) == CONST_INT)
3217 size = GEN_INT (INTVAL (size) - used);
3218 else
3219 size = expand_binop (GET_MODE (size), sub_optab, size,
3220 GEN_INT (used), NULL_RTX, 0,
3221 OPTAB_LIB_WIDEN);
3224 /* Get the address of the stack space.
3225 In this case, we do not deal with EXTRA separately.
3226 A single stack adjust will do. */
3227 if (! args_addr)
3229 temp = push_block (size, extra, where_pad == downward);
3230 extra = 0;
3232 else if (GET_CODE (args_so_far) == CONST_INT)
3233 temp = memory_address (BLKmode,
3234 plus_constant (args_addr,
3235 skip + INTVAL (args_so_far)));
3236 else
3237 temp = memory_address (BLKmode,
3238 plus_constant (gen_rtx_PLUS (Pmode,
3239 args_addr,
3240 args_so_far),
3241 skip));
3242 if (current_function_check_memory_usage && ! in_check_memory_usage)
3244 in_check_memory_usage = 1;
3245 target = copy_to_reg (temp);
3246 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3247 emit_library_call (chkr_copy_bitmap_libfunc,
3248 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3249 target, Pmode,
3250 XEXP (xinner, 0), Pmode,
3251 size, TYPE_MODE (sizetype));
3252 else
3253 emit_library_call (chkr_set_right_libfunc,
3254 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3255 target, Pmode,
3256 size, TYPE_MODE (sizetype),
3257 GEN_INT (MEMORY_USE_RW),
3258 TYPE_MODE (integer_type_node));
3259 in_check_memory_usage = 0;
3262 target = gen_rtx_MEM (BLKmode, temp);
3264 if (type != 0)
3266 set_mem_attributes (target, type, 1);
3267 /* Function incoming arguments may overlap with sibling call
3268 outgoing arguments and we cannot allow reordering of reads
3269 from function arguments with stores to outgoing arguments
3270 of sibling calls. */
3271 MEM_ALIAS_SET (target) = 0;
3274 /* TEMP is the address of the block. Copy the data there. */
3275 if (GET_CODE (size) == CONST_INT
3276 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3278 move_by_pieces (target, xinner, INTVAL (size), align);
3279 goto ret;
3281 else
3283 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3284 enum machine_mode mode;
3286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3287 mode != VOIDmode;
3288 mode = GET_MODE_WIDER_MODE (mode))
3290 enum insn_code code = movstr_optab[(int) mode];
3291 insn_operand_predicate_fn pred;
3293 if (code != CODE_FOR_nothing
3294 && ((GET_CODE (size) == CONST_INT
3295 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3296 <= (GET_MODE_MASK (mode) >> 1)))
3297 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3298 && (!(pred = insn_data[(int) code].operand[0].predicate)
3299 || ((*pred) (target, BLKmode)))
3300 && (!(pred = insn_data[(int) code].operand[1].predicate)
3301 || ((*pred) (xinner, BLKmode)))
3302 && (!(pred = insn_data[(int) code].operand[3].predicate)
3303 || ((*pred) (opalign, VOIDmode))))
3305 rtx op2 = convert_to_mode (mode, size, 1);
3306 rtx last = get_last_insn ();
3307 rtx pat;
3309 pred = insn_data[(int) code].operand[2].predicate;
3310 if (pred != 0 && ! (*pred) (op2, mode))
3311 op2 = copy_to_mode_reg (mode, op2);
3313 pat = GEN_FCN ((int) code) (target, xinner,
3314 op2, opalign);
3315 if (pat)
3317 emit_insn (pat);
3318 goto ret;
3320 else
3321 delete_insns_since (last);
3326 if (!ACCUMULATE_OUTGOING_ARGS)
3328 /* If the source is referenced relative to the stack pointer,
3329 copy it to another register to stabilize it. We do not need
3330 to do this if we know that we won't be changing sp. */
3332 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3333 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3334 temp = copy_to_reg (temp);
3337 /* Make inhibit_defer_pop nonzero around the library call
3338 to force it to pop the bcopy-arguments right away. */
3339 NO_DEFER_POP;
3340 #ifdef TARGET_MEM_FUNCTIONS
3341 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3342 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3343 convert_to_mode (TYPE_MODE (sizetype),
3344 size, TREE_UNSIGNED (sizetype)),
3345 TYPE_MODE (sizetype));
3346 #else
3347 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3348 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3349 convert_to_mode (TYPE_MODE (integer_type_node),
3350 size,
3351 TREE_UNSIGNED (integer_type_node)),
3352 TYPE_MODE (integer_type_node));
3353 #endif
3354 OK_DEFER_POP;
3357 else if (partial > 0)
3359 /* Scalar partly in registers. */
3361 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3362 int i;
3363 int not_stack;
3364 /* # words of start of argument
3365 that we must make space for but need not store. */
3366 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3367 int args_offset = INTVAL (args_so_far);
3368 int skip;
3370 /* Push padding now if padding above and stack grows down,
3371 or if padding below and stack grows up.
3372 But if space already allocated, this has already been done. */
3373 if (extra && args_addr == 0
3374 && where_pad != none && where_pad != stack_direction)
3375 anti_adjust_stack (GEN_INT (extra));
3377 /* If we make space by pushing it, we might as well push
3378 the real data. Otherwise, we can leave OFFSET nonzero
3379 and leave the space uninitialized. */
3380 if (args_addr == 0)
3381 offset = 0;
3383 /* Now NOT_STACK gets the number of words that we don't need to
3384 allocate on the stack. */
3385 not_stack = partial - offset;
3387 /* If the partial register-part of the arg counts in its stack size,
3388 skip the part of stack space corresponding to the registers.
3389 Otherwise, start copying to the beginning of the stack space,
3390 by setting SKIP to 0. */
3391 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3393 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3394 x = validize_mem (force_const_mem (mode, x));
3396 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3397 SUBREGs of such registers are not allowed. */
3398 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3399 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3400 x = copy_to_reg (x);
3402 /* Loop over all the words allocated on the stack for this arg. */
3403 /* We can do it by words, because any scalar bigger than a word
3404 has a size a multiple of a word. */
3405 #ifndef PUSH_ARGS_REVERSED
3406 for (i = not_stack; i < size; i++)
3407 #else
3408 for (i = size - 1; i >= not_stack; i--)
3409 #endif
3410 if (i >= not_stack + offset)
3411 emit_push_insn (operand_subword_force (x, i, mode),
3412 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3413 0, args_addr,
3414 GEN_INT (args_offset + ((i - not_stack + skip)
3415 * UNITS_PER_WORD)),
3416 reg_parm_stack_space, alignment_pad);
3418 else
3420 rtx addr;
3421 rtx target = NULL_RTX;
3422 rtx dest;
3424 /* Push padding now if padding above and stack grows down,
3425 or if padding below and stack grows up.
3426 But if space already allocated, this has already been done. */
3427 if (extra && args_addr == 0
3428 && where_pad != none && where_pad != stack_direction)
3429 anti_adjust_stack (GEN_INT (extra));
3431 #ifdef PUSH_ROUNDING
3432 if (args_addr == 0 && PUSH_ARGS)
3434 addr = gen_push_operand ();
3435 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3437 else
3438 #endif
3440 if (GET_CODE (args_so_far) == CONST_INT)
3441 addr
3442 = memory_address (mode,
3443 plus_constant (args_addr,
3444 INTVAL (args_so_far)));
3445 else
3446 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3447 args_so_far));
3448 target = addr;
3451 dest = gen_rtx_MEM (mode, addr);
3452 if (type != 0)
3454 set_mem_attributes (dest, type, 1);
3455 /* Function incoming arguments may overlap with sibling call
3456 outgoing arguments and we cannot allow reordering of reads
3457 from function arguments with stores to outgoing arguments
3458 of sibling calls. */
3459 MEM_ALIAS_SET (dest) = 0;
3462 emit_move_insn (dest, x);
3464 if (current_function_check_memory_usage && ! in_check_memory_usage)
3466 in_check_memory_usage = 1;
3467 if (target == 0)
3468 target = get_push_address (GET_MODE_SIZE (mode));
3470 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3471 emit_library_call (chkr_copy_bitmap_libfunc,
3472 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3473 Pmode, XEXP (x, 0), Pmode,
3474 GEN_INT (GET_MODE_SIZE (mode)),
3475 TYPE_MODE (sizetype));
3476 else
3477 emit_library_call (chkr_set_right_libfunc,
3478 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3479 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3480 TYPE_MODE (sizetype),
3481 GEN_INT (MEMORY_USE_RW),
3482 TYPE_MODE (integer_type_node));
3483 in_check_memory_usage = 0;
3487 ret:
3488 /* If part should go in registers, copy that part
3489 into the appropriate registers. Do this now, at the end,
3490 since mem-to-mem copies above may do function calls. */
3491 if (partial > 0 && reg != 0)
3493 /* Handle calls that pass values in multiple non-contiguous locations.
3494 The Irix 6 ABI has examples of this. */
3495 if (GET_CODE (reg) == PARALLEL)
3496 emit_group_load (reg, x, -1, align); /* ??? size? */
3497 else
3498 move_block_to_reg (REGNO (reg), x, partial, mode);
3501 if (extra && args_addr == 0 && where_pad == stack_direction)
3502 anti_adjust_stack (GEN_INT (extra));
3504 if (alignment_pad && args_addr == 0)
3505 anti_adjust_stack (alignment_pad);
3508 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3509 operations. */
3511 static rtx
3512 get_subtarget (x)
3513 rtx x;
3515 return ((x == 0
3516 /* Only registers can be subtargets. */
3517 || GET_CODE (x) != REG
3518 /* If the register is readonly, it can't be set more than once. */
3519 || RTX_UNCHANGING_P (x)
3520 /* Don't use hard regs to avoid extending their life. */
3521 || REGNO (x) < FIRST_PSEUDO_REGISTER
3522 /* Avoid subtargets inside loops,
3523 since they hide some invariant expressions. */
3524 || preserve_subexpressions_p ())
3525 ? 0 : x);
3528 /* Expand an assignment that stores the value of FROM into TO.
3529 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3530 (This may contain a QUEUED rtx;
3531 if the value is constant, this rtx is a constant.)
3532 Otherwise, the returned value is NULL_RTX.
3534 SUGGEST_REG is no longer actually used.
3535 It used to mean, copy the value through a register
3536 and return that register, if that is possible.
3537 We now use WANT_VALUE to decide whether to do this. */
3540 expand_assignment (to, from, want_value, suggest_reg)
3541 tree to, from;
3542 int want_value;
3543 int suggest_reg ATTRIBUTE_UNUSED;
3545 register rtx to_rtx = 0;
3546 rtx result;
3548 /* Don't crash if the lhs of the assignment was erroneous. */
3550 if (TREE_CODE (to) == ERROR_MARK)
3552 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3553 return want_value ? result : NULL_RTX;
3556 /* Assignment of a structure component needs special treatment
3557 if the structure component's rtx is not simply a MEM.
3558 Assignment of an array element at a constant index, and assignment of
3559 an array element in an unaligned packed structure field, has the same
3560 problem. */
3562 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3563 || TREE_CODE (to) == ARRAY_REF)
3565 enum machine_mode mode1;
3566 HOST_WIDE_INT bitsize, bitpos;
3567 tree offset;
3568 int unsignedp;
3569 int volatilep = 0;
3570 tree tem;
3571 unsigned int alignment;
3573 push_temp_slots ();
3574 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3575 &unsignedp, &volatilep, &alignment);
3577 /* If we are going to use store_bit_field and extract_bit_field,
3578 make sure to_rtx will be safe for multiple use. */
3580 if (mode1 == VOIDmode && want_value)
3581 tem = stabilize_reference (tem);
3583 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3584 if (offset != 0)
3586 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3588 if (GET_CODE (to_rtx) != MEM)
3589 abort ();
3591 if (GET_MODE (offset_rtx) != ptr_mode)
3593 #ifdef POINTERS_EXTEND_UNSIGNED
3594 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3595 #else
3596 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3597 #endif
3600 /* A constant address in TO_RTX can have VOIDmode, we must not try
3601 to call force_reg for that case. Avoid that case. */
3602 if (GET_CODE (to_rtx) == MEM
3603 && GET_MODE (to_rtx) == BLKmode
3604 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3605 && bitsize
3606 && (bitpos % bitsize) == 0
3607 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3608 && alignment == GET_MODE_ALIGNMENT (mode1))
3610 rtx temp = change_address (to_rtx, mode1,
3611 plus_constant (XEXP (to_rtx, 0),
3612 (bitpos /
3613 BITS_PER_UNIT)));
3614 if (GET_CODE (XEXP (temp, 0)) == REG)
3615 to_rtx = temp;
3616 else
3617 to_rtx = change_address (to_rtx, mode1,
3618 force_reg (GET_MODE (XEXP (temp, 0)),
3619 XEXP (temp, 0)));
3620 bitpos = 0;
3623 to_rtx = change_address (to_rtx, VOIDmode,
3624 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3625 force_reg (ptr_mode,
3626 offset_rtx)));
3629 if (volatilep)
3631 if (GET_CODE (to_rtx) == MEM)
3633 /* When the offset is zero, to_rtx is the address of the
3634 structure we are storing into, and hence may be shared.
3635 We must make a new MEM before setting the volatile bit. */
3636 if (offset == 0)
3637 to_rtx = copy_rtx (to_rtx);
3639 MEM_VOLATILE_P (to_rtx) = 1;
3641 #if 0 /* This was turned off because, when a field is volatile
3642 in an object which is not volatile, the object may be in a register,
3643 and then we would abort over here. */
3644 else
3645 abort ();
3646 #endif
3649 if (TREE_CODE (to) == COMPONENT_REF
3650 && TREE_READONLY (TREE_OPERAND (to, 1)))
3652 if (offset == 0)
3653 to_rtx = copy_rtx (to_rtx);
3655 RTX_UNCHANGING_P (to_rtx) = 1;
3658 /* Check the access. */
3659 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3661 rtx to_addr;
3662 int size;
3663 int best_mode_size;
3664 enum machine_mode best_mode;
3666 best_mode = get_best_mode (bitsize, bitpos,
3667 TYPE_ALIGN (TREE_TYPE (tem)),
3668 mode1, volatilep);
3669 if (best_mode == VOIDmode)
3670 best_mode = QImode;
3672 best_mode_size = GET_MODE_BITSIZE (best_mode);
3673 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3674 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3675 size *= GET_MODE_SIZE (best_mode);
3677 /* Check the access right of the pointer. */
3678 in_check_memory_usage = 1;
3679 if (size)
3680 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3681 VOIDmode, 3, to_addr, Pmode,
3682 GEN_INT (size), TYPE_MODE (sizetype),
3683 GEN_INT (MEMORY_USE_WO),
3684 TYPE_MODE (integer_type_node));
3685 in_check_memory_usage = 0;
3688 /* If this is a varying-length object, we must get the address of
3689 the source and do an explicit block move. */
3690 if (bitsize < 0)
3692 unsigned int from_align;
3693 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3694 rtx inner_to_rtx
3695 = change_address (to_rtx, VOIDmode,
3696 plus_constant (XEXP (to_rtx, 0),
3697 bitpos / BITS_PER_UNIT));
3699 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3700 MIN (alignment, from_align));
3701 free_temp_slots ();
3702 pop_temp_slots ();
3703 return to_rtx;
3705 else
3707 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3708 (want_value
3709 /* Spurious cast for HPUX compiler. */
3710 ? ((enum machine_mode)
3711 TYPE_MODE (TREE_TYPE (to)))
3712 : VOIDmode),
3713 unsignedp,
3714 alignment,
3715 int_size_in_bytes (TREE_TYPE (tem)),
3716 get_alias_set (to));
3718 preserve_temp_slots (result);
3719 free_temp_slots ();
3720 pop_temp_slots ();
3722 /* If the value is meaningful, convert RESULT to the proper mode.
3723 Otherwise, return nothing. */
3724 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3725 TYPE_MODE (TREE_TYPE (from)),
3726 result,
3727 TREE_UNSIGNED (TREE_TYPE (to)))
3728 : NULL_RTX);
3732 /* If the rhs is a function call and its value is not an aggregate,
3733 call the function before we start to compute the lhs.
3734 This is needed for correct code for cases such as
3735 val = setjmp (buf) on machines where reference to val
3736 requires loading up part of an address in a separate insn.
3738 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3739 since it might be a promoted variable where the zero- or sign- extension
3740 needs to be done. Handling this in the normal way is safe because no
3741 computation is done before the call. */
3742 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3743 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3744 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3745 && GET_CODE (DECL_RTL (to)) == REG))
3747 rtx value;
3749 push_temp_slots ();
3750 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3751 if (to_rtx == 0)
3752 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3754 /* Handle calls that return values in multiple non-contiguous locations.
3755 The Irix 6 ABI has examples of this. */
3756 if (GET_CODE (to_rtx) == PARALLEL)
3757 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3758 TYPE_ALIGN (TREE_TYPE (from)));
3759 else if (GET_MODE (to_rtx) == BLKmode)
3760 emit_block_move (to_rtx, value, expr_size (from),
3761 TYPE_ALIGN (TREE_TYPE (from)));
3762 else
3764 #ifdef POINTERS_EXTEND_UNSIGNED
3765 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3766 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3767 value = convert_memory_address (GET_MODE (to_rtx), value);
3768 #endif
3769 emit_move_insn (to_rtx, value);
3771 preserve_temp_slots (to_rtx);
3772 free_temp_slots ();
3773 pop_temp_slots ();
3774 return want_value ? to_rtx : NULL_RTX;
3777 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3778 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3780 if (to_rtx == 0)
3782 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3783 if (GET_CODE (to_rtx) == MEM)
3784 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3787 /* Don't move directly into a return register. */
3788 if (TREE_CODE (to) == RESULT_DECL
3789 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3791 rtx temp;
3793 push_temp_slots ();
3794 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3796 if (GET_CODE (to_rtx) == PARALLEL)
3797 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3798 TYPE_ALIGN (TREE_TYPE (from)));
3799 else
3800 emit_move_insn (to_rtx, temp);
3802 preserve_temp_slots (to_rtx);
3803 free_temp_slots ();
3804 pop_temp_slots ();
3805 return want_value ? to_rtx : NULL_RTX;
3808 /* In case we are returning the contents of an object which overlaps
3809 the place the value is being stored, use a safe function when copying
3810 a value through a pointer into a structure value return block. */
3811 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3812 && current_function_returns_struct
3813 && !current_function_returns_pcc_struct)
3815 rtx from_rtx, size;
3817 push_temp_slots ();
3818 size = expr_size (from);
3819 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3820 EXPAND_MEMORY_USE_DONT);
3822 /* Copy the rights of the bitmap. */
3823 if (current_function_check_memory_usage)
3824 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3825 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3826 XEXP (from_rtx, 0), Pmode,
3827 convert_to_mode (TYPE_MODE (sizetype),
3828 size, TREE_UNSIGNED (sizetype)),
3829 TYPE_MODE (sizetype));
3831 #ifdef TARGET_MEM_FUNCTIONS
3832 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3833 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3834 XEXP (from_rtx, 0), Pmode,
3835 convert_to_mode (TYPE_MODE (sizetype),
3836 size, TREE_UNSIGNED (sizetype)),
3837 TYPE_MODE (sizetype));
3838 #else
3839 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3840 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3841 XEXP (to_rtx, 0), Pmode,
3842 convert_to_mode (TYPE_MODE (integer_type_node),
3843 size, TREE_UNSIGNED (integer_type_node)),
3844 TYPE_MODE (integer_type_node));
3845 #endif
3847 preserve_temp_slots (to_rtx);
3848 free_temp_slots ();
3849 pop_temp_slots ();
3850 return want_value ? to_rtx : NULL_RTX;
3853 /* Compute FROM and store the value in the rtx we got. */
3855 push_temp_slots ();
3856 result = store_expr (from, to_rtx, want_value);
3857 preserve_temp_slots (result);
3858 free_temp_slots ();
3859 pop_temp_slots ();
3860 return want_value ? result : NULL_RTX;
3863 /* Generate code for computing expression EXP,
3864 and storing the value into TARGET.
3865 TARGET may contain a QUEUED rtx.
3867 If WANT_VALUE is nonzero, return a copy of the value
3868 not in TARGET, so that we can be sure to use the proper
3869 value in a containing expression even if TARGET has something
3870 else stored in it. If possible, we copy the value through a pseudo
3871 and return that pseudo. Or, if the value is constant, we try to
3872 return the constant. In some cases, we return a pseudo
3873 copied *from* TARGET.
3875 If the mode is BLKmode then we may return TARGET itself.
3876 It turns out that in BLKmode it doesn't cause a problem.
3877 because C has no operators that could combine two different
3878 assignments into the same BLKmode object with different values
3879 with no sequence point. Will other languages need this to
3880 be more thorough?
3882 If WANT_VALUE is 0, we return NULL, to make sure
3883 to catch quickly any cases where the caller uses the value
3884 and fails to set WANT_VALUE. */
3887 store_expr (exp, target, want_value)
3888 register tree exp;
3889 register rtx target;
3890 int want_value;
3892 register rtx temp;
3893 int dont_return_target = 0;
3895 if (TREE_CODE (exp) == COMPOUND_EXPR)
3897 /* Perform first part of compound expression, then assign from second
3898 part. */
3899 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3900 emit_queue ();
3901 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3903 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3905 /* For conditional expression, get safe form of the target. Then
3906 test the condition, doing the appropriate assignment on either
3907 side. This avoids the creation of unnecessary temporaries.
3908 For non-BLKmode, it is more efficient not to do this. */
3910 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3912 emit_queue ();
3913 target = protect_from_queue (target, 1);
3915 do_pending_stack_adjust ();
3916 NO_DEFER_POP;
3917 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3918 start_cleanup_deferral ();
3919 store_expr (TREE_OPERAND (exp, 1), target, 0);
3920 end_cleanup_deferral ();
3921 emit_queue ();
3922 emit_jump_insn (gen_jump (lab2));
3923 emit_barrier ();
3924 emit_label (lab1);
3925 start_cleanup_deferral ();
3926 store_expr (TREE_OPERAND (exp, 2), target, 0);
3927 end_cleanup_deferral ();
3928 emit_queue ();
3929 emit_label (lab2);
3930 OK_DEFER_POP;
3932 return want_value ? target : NULL_RTX;
3934 else if (queued_subexp_p (target))
3935 /* If target contains a postincrement, let's not risk
3936 using it as the place to generate the rhs. */
3938 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3940 /* Expand EXP into a new pseudo. */
3941 temp = gen_reg_rtx (GET_MODE (target));
3942 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3944 else
3945 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3947 /* If target is volatile, ANSI requires accessing the value
3948 *from* the target, if it is accessed. So make that happen.
3949 In no case return the target itself. */
3950 if (! MEM_VOLATILE_P (target) && want_value)
3951 dont_return_target = 1;
3953 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3954 && GET_MODE (target) != BLKmode)
3955 /* If target is in memory and caller wants value in a register instead,
3956 arrange that. Pass TARGET as target for expand_expr so that,
3957 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3958 We know expand_expr will not use the target in that case.
3959 Don't do this if TARGET is volatile because we are supposed
3960 to write it and then read it. */
3962 temp = expand_expr (exp, target, GET_MODE (target), 0);
3963 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3964 temp = copy_to_reg (temp);
3965 dont_return_target = 1;
3967 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3968 /* If this is an scalar in a register that is stored in a wider mode
3969 than the declared mode, compute the result into its declared mode
3970 and then convert to the wider mode. Our value is the computed
3971 expression. */
3973 /* If we don't want a value, we can do the conversion inside EXP,
3974 which will often result in some optimizations. Do the conversion
3975 in two steps: first change the signedness, if needed, then
3976 the extend. But don't do this if the type of EXP is a subtype
3977 of something else since then the conversion might involve
3978 more than just converting modes. */
3979 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3980 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3982 if (TREE_UNSIGNED (TREE_TYPE (exp))
3983 != SUBREG_PROMOTED_UNSIGNED_P (target))
3985 = convert
3986 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3987 TREE_TYPE (exp)),
3988 exp);
3990 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3991 SUBREG_PROMOTED_UNSIGNED_P (target)),
3992 exp);
3995 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3997 /* If TEMP is a volatile MEM and we want a result value, make
3998 the access now so it gets done only once. Likewise if
3999 it contains TARGET. */
4000 if (GET_CODE (temp) == MEM && want_value
4001 && (MEM_VOLATILE_P (temp)
4002 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4003 temp = copy_to_reg (temp);
4005 /* If TEMP is a VOIDmode constant, use convert_modes to make
4006 sure that we properly convert it. */
4007 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4008 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4009 TYPE_MODE (TREE_TYPE (exp)), temp,
4010 SUBREG_PROMOTED_UNSIGNED_P (target));
4012 convert_move (SUBREG_REG (target), temp,
4013 SUBREG_PROMOTED_UNSIGNED_P (target));
4015 /* If we promoted a constant, change the mode back down to match
4016 target. Otherwise, the caller might get confused by a result whose
4017 mode is larger than expected. */
4019 if (want_value && GET_MODE (temp) != GET_MODE (target)
4020 && GET_MODE (temp) != VOIDmode)
4022 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4023 SUBREG_PROMOTED_VAR_P (temp) = 1;
4024 SUBREG_PROMOTED_UNSIGNED_P (temp)
4025 = SUBREG_PROMOTED_UNSIGNED_P (target);
4028 return want_value ? temp : NULL_RTX;
4030 else
4032 temp = expand_expr (exp, target, GET_MODE (target), 0);
4033 /* Return TARGET if it's a specified hardware register.
4034 If TARGET is a volatile mem ref, either return TARGET
4035 or return a reg copied *from* TARGET; ANSI requires this.
4037 Otherwise, if TEMP is not TARGET, return TEMP
4038 if it is constant (for efficiency),
4039 or if we really want the correct value. */
4040 if (!(target && GET_CODE (target) == REG
4041 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4042 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4043 && ! rtx_equal_p (temp, target)
4044 && (CONSTANT_P (temp) || want_value))
4045 dont_return_target = 1;
4048 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4049 the same as that of TARGET, adjust the constant. This is needed, for
4050 example, in case it is a CONST_DOUBLE and we want only a word-sized
4051 value. */
4052 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4053 && TREE_CODE (exp) != ERROR_MARK
4054 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4055 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4056 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4058 if (current_function_check_memory_usage
4059 && GET_CODE (target) == MEM
4060 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4062 in_check_memory_usage = 1;
4063 if (GET_CODE (temp) == MEM)
4064 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4065 VOIDmode, 3, XEXP (target, 0), Pmode,
4066 XEXP (temp, 0), Pmode,
4067 expr_size (exp), TYPE_MODE (sizetype));
4068 else
4069 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4070 VOIDmode, 3, XEXP (target, 0), Pmode,
4071 expr_size (exp), TYPE_MODE (sizetype),
4072 GEN_INT (MEMORY_USE_WO),
4073 TYPE_MODE (integer_type_node));
4074 in_check_memory_usage = 0;
4077 /* If value was not generated in the target, store it there.
4078 Convert the value to TARGET's type first if nec. */
4079 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4080 one or both of them are volatile memory refs, we have to distinguish
4081 two cases:
4082 - expand_expr has used TARGET. In this case, we must not generate
4083 another copy. This can be detected by TARGET being equal according
4084 to == .
4085 - expand_expr has not used TARGET - that means that the source just
4086 happens to have the same RTX form. Since temp will have been created
4087 by expand_expr, it will compare unequal according to == .
4088 We must generate a copy in this case, to reach the correct number
4089 of volatile memory references. */
4091 if ((! rtx_equal_p (temp, target)
4092 || (temp != target && (side_effects_p (temp)
4093 || side_effects_p (target))))
4094 && TREE_CODE (exp) != ERROR_MARK)
4096 target = protect_from_queue (target, 1);
4097 if (GET_MODE (temp) != GET_MODE (target)
4098 && GET_MODE (temp) != VOIDmode)
4100 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4101 if (dont_return_target)
4103 /* In this case, we will return TEMP,
4104 so make sure it has the proper mode.
4105 But don't forget to store the value into TARGET. */
4106 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4107 emit_move_insn (target, temp);
4109 else
4110 convert_move (target, temp, unsignedp);
4113 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4115 /* Handle copying a string constant into an array.
4116 The string constant may be shorter than the array.
4117 So copy just the string's actual length, and clear the rest. */
4118 rtx size;
4119 rtx addr;
4121 /* Get the size of the data type of the string,
4122 which is actually the size of the target. */
4123 size = expr_size (exp);
4124 if (GET_CODE (size) == CONST_INT
4125 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4126 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4127 else
4129 /* Compute the size of the data to copy from the string. */
4130 tree copy_size
4131 = size_binop (MIN_EXPR,
4132 make_tree (sizetype, size),
4133 size_int (TREE_STRING_LENGTH (exp)));
4134 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4135 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4136 VOIDmode, 0);
4137 rtx label = 0;
4139 /* Copy that much. */
4140 emit_block_move (target, temp, copy_size_rtx,
4141 TYPE_ALIGN (TREE_TYPE (exp)));
4143 /* Figure out how much is left in TARGET that we have to clear.
4144 Do all calculations in ptr_mode. */
4146 addr = XEXP (target, 0);
4147 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4149 if (GET_CODE (copy_size_rtx) == CONST_INT)
4151 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4152 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4153 align = MIN (align, (BITS_PER_UNIT
4154 * (INTVAL (copy_size_rtx)
4155 & - INTVAL (copy_size_rtx))));
4157 else
4159 addr = force_reg (ptr_mode, addr);
4160 addr = expand_binop (ptr_mode, add_optab, addr,
4161 copy_size_rtx, NULL_RTX, 0,
4162 OPTAB_LIB_WIDEN);
4164 size = expand_binop (ptr_mode, sub_optab, size,
4165 copy_size_rtx, NULL_RTX, 0,
4166 OPTAB_LIB_WIDEN);
4168 align = BITS_PER_UNIT;
4169 label = gen_label_rtx ();
4170 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4171 GET_MODE (size), 0, 0, label);
4173 align = MIN (align, expr_align (copy_size));
4175 if (size != const0_rtx)
4177 rtx dest = gen_rtx_MEM (BLKmode, addr);
4179 MEM_COPY_ATTRIBUTES (dest, target);
4181 /* Be sure we can write on ADDR. */
4182 in_check_memory_usage = 1;
4183 if (current_function_check_memory_usage)
4184 emit_library_call (chkr_check_addr_libfunc,
4185 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4186 addr, Pmode,
4187 size, TYPE_MODE (sizetype),
4188 GEN_INT (MEMORY_USE_WO),
4189 TYPE_MODE (integer_type_node));
4190 in_check_memory_usage = 0;
4191 clear_storage (dest, size, align);
4194 if (label)
4195 emit_label (label);
4198 /* Handle calls that return values in multiple non-contiguous locations.
4199 The Irix 6 ABI has examples of this. */
4200 else if (GET_CODE (target) == PARALLEL)
4201 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4202 TYPE_ALIGN (TREE_TYPE (exp)));
4203 else if (GET_MODE (temp) == BLKmode)
4204 emit_block_move (target, temp, expr_size (exp),
4205 TYPE_ALIGN (TREE_TYPE (exp)));
4206 else
4207 emit_move_insn (target, temp);
4210 /* If we don't want a value, return NULL_RTX. */
4211 if (! want_value)
4212 return NULL_RTX;
4214 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4215 ??? The latter test doesn't seem to make sense. */
4216 else if (dont_return_target && GET_CODE (temp) != MEM)
4217 return temp;
4219 /* Return TARGET itself if it is a hard register. */
4220 else if (want_value && GET_MODE (target) != BLKmode
4221 && ! (GET_CODE (target) == REG
4222 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4223 return copy_to_reg (target);
4225 else
4226 return target;
4229 /* Return 1 if EXP just contains zeros. */
4231 static int
4232 is_zeros_p (exp)
4233 tree exp;
4235 tree elt;
4237 switch (TREE_CODE (exp))
4239 case CONVERT_EXPR:
4240 case NOP_EXPR:
4241 case NON_LVALUE_EXPR:
4242 return is_zeros_p (TREE_OPERAND (exp, 0));
4244 case INTEGER_CST:
4245 return integer_zerop (exp);
4247 case COMPLEX_CST:
4248 return
4249 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4251 case REAL_CST:
4252 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4254 case CONSTRUCTOR:
4255 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4256 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4257 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4258 if (! is_zeros_p (TREE_VALUE (elt)))
4259 return 0;
4261 return 1;
4263 default:
4264 return 0;
4268 /* Return 1 if EXP contains mostly (3/4) zeros. */
4270 static int
4271 mostly_zeros_p (exp)
4272 tree exp;
4274 if (TREE_CODE (exp) == CONSTRUCTOR)
4276 int elts = 0, zeros = 0;
4277 tree elt = CONSTRUCTOR_ELTS (exp);
4278 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4280 /* If there are no ranges of true bits, it is all zero. */
4281 return elt == NULL_TREE;
4283 for (; elt; elt = TREE_CHAIN (elt))
4285 /* We do not handle the case where the index is a RANGE_EXPR,
4286 so the statistic will be somewhat inaccurate.
4287 We do make a more accurate count in store_constructor itself,
4288 so since this function is only used for nested array elements,
4289 this should be close enough. */
4290 if (mostly_zeros_p (TREE_VALUE (elt)))
4291 zeros++;
4292 elts++;
4295 return 4 * zeros >= 3 * elts;
4298 return is_zeros_p (exp);
4301 /* Helper function for store_constructor.
4302 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4303 TYPE is the type of the CONSTRUCTOR, not the element type.
4304 ALIGN and CLEARED are as for store_constructor.
4305 ALIAS_SET is the alias set to use for any stores.
4307 This provides a recursive shortcut back to store_constructor when it isn't
4308 necessary to go through store_field. This is so that we can pass through
4309 the cleared field to let store_constructor know that we may not have to
4310 clear a substructure if the outer structure has already been cleared. */
4312 static void
4313 store_constructor_field (target, bitsize, bitpos,
4314 mode, exp, type, align, cleared, alias_set)
4315 rtx target;
4316 unsigned HOST_WIDE_INT bitsize;
4317 HOST_WIDE_INT bitpos;
4318 enum machine_mode mode;
4319 tree exp, type;
4320 unsigned int align;
4321 int cleared;
4322 int alias_set;
4324 if (TREE_CODE (exp) == CONSTRUCTOR
4325 && bitpos % BITS_PER_UNIT == 0
4326 /* If we have a non-zero bitpos for a register target, then we just
4327 let store_field do the bitfield handling. This is unlikely to
4328 generate unnecessary clear instructions anyways. */
4329 && (bitpos == 0 || GET_CODE (target) == MEM))
4331 if (bitpos != 0)
4332 target
4333 = change_address (target,
4334 GET_MODE (target) == BLKmode
4335 || 0 != (bitpos
4336 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4337 ? BLKmode : VOIDmode,
4338 plus_constant (XEXP (target, 0),
4339 bitpos / BITS_PER_UNIT));
4341 if (GET_CODE (target) == MEM)
4342 MEM_ALIAS_SET (target) = alias_set;
4343 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4345 else
4346 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4347 int_size_in_bytes (type), alias_set);
4350 /* Store the value of constructor EXP into the rtx TARGET.
4351 TARGET is either a REG or a MEM.
4352 ALIGN is the maximum known alignment for TARGET.
4353 CLEARED is true if TARGET is known to have been zero'd.
4354 SIZE is the number of bytes of TARGET we are allowed to modify: this
4355 may not be the same as the size of EXP if we are assigning to a field
4356 which has been packed to exclude padding bits. */
4358 static void
4359 store_constructor (exp, target, align, cleared, size)
4360 tree exp;
4361 rtx target;
4362 unsigned int align;
4363 int cleared;
4364 HOST_WIDE_INT size;
4366 tree type = TREE_TYPE (exp);
4367 #ifdef WORD_REGISTER_OPERATIONS
4368 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4369 #endif
4371 /* We know our target cannot conflict, since safe_from_p has been called. */
4372 #if 0
4373 /* Don't try copying piece by piece into a hard register
4374 since that is vulnerable to being clobbered by EXP.
4375 Instead, construct in a pseudo register and then copy it all. */
4376 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4378 rtx temp = gen_reg_rtx (GET_MODE (target));
4379 store_constructor (exp, temp, align, cleared, size);
4380 emit_move_insn (target, temp);
4381 return;
4383 #endif
4385 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4386 || TREE_CODE (type) == QUAL_UNION_TYPE)
4388 register tree elt;
4390 /* Inform later passes that the whole union value is dead. */
4391 if ((TREE_CODE (type) == UNION_TYPE
4392 || TREE_CODE (type) == QUAL_UNION_TYPE)
4393 && ! cleared)
4395 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4397 /* If the constructor is empty, clear the union. */
4398 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4399 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4402 /* If we are building a static constructor into a register,
4403 set the initial value as zero so we can fold the value into
4404 a constant. But if more than one register is involved,
4405 this probably loses. */
4406 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4407 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4409 if (! cleared)
4410 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4412 cleared = 1;
4415 /* If the constructor has fewer fields than the structure
4416 or if we are initializing the structure to mostly zeros,
4417 clear the whole structure first. Don't do this is TARGET is
4418 register whose mode size isn't equal to SIZE since clear_storage
4419 can't handle this case. */
4420 else if (size > 0
4421 && ((list_length (CONSTRUCTOR_ELTS (exp))
4422 != fields_length (type))
4423 || mostly_zeros_p (exp))
4424 && (GET_CODE (target) != REG
4425 || GET_MODE_SIZE (GET_MODE (target)) == size))
4427 if (! cleared)
4428 clear_storage (target, GEN_INT (size), align);
4430 cleared = 1;
4432 else if (! cleared)
4433 /* Inform later passes that the old value is dead. */
4434 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4436 /* Store each element of the constructor into
4437 the corresponding field of TARGET. */
4439 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4441 register tree field = TREE_PURPOSE (elt);
4442 #ifdef WORD_REGISTER_OPERATIONS
4443 tree value = TREE_VALUE (elt);
4444 #endif
4445 register enum machine_mode mode;
4446 HOST_WIDE_INT bitsize;
4447 HOST_WIDE_INT bitpos = 0;
4448 int unsignedp;
4449 tree offset;
4450 rtx to_rtx = target;
4452 /* Just ignore missing fields.
4453 We cleared the whole structure, above,
4454 if any fields are missing. */
4455 if (field == 0)
4456 continue;
4458 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4459 continue;
4461 if (host_integerp (DECL_SIZE (field), 1))
4462 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4463 else
4464 bitsize = -1;
4466 unsignedp = TREE_UNSIGNED (field);
4467 mode = DECL_MODE (field);
4468 if (DECL_BIT_FIELD (field))
4469 mode = VOIDmode;
4471 offset = DECL_FIELD_OFFSET (field);
4472 if (host_integerp (offset, 0)
4473 && host_integerp (bit_position (field), 0))
4475 bitpos = int_bit_position (field);
4476 offset = 0;
4478 else
4479 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4481 if (offset)
4483 rtx offset_rtx;
4485 if (contains_placeholder_p (offset))
4486 offset = build (WITH_RECORD_EXPR, sizetype,
4487 offset, make_tree (TREE_TYPE (exp), target));
4489 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4490 if (GET_CODE (to_rtx) != MEM)
4491 abort ();
4493 if (GET_MODE (offset_rtx) != ptr_mode)
4495 #ifdef POINTERS_EXTEND_UNSIGNED
4496 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4497 #else
4498 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4499 #endif
4502 to_rtx
4503 = change_address (to_rtx, VOIDmode,
4504 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4505 force_reg (ptr_mode,
4506 offset_rtx)));
4507 align = DECL_OFFSET_ALIGN (field);
4510 if (TREE_READONLY (field))
4512 if (GET_CODE (to_rtx) == MEM)
4513 to_rtx = copy_rtx (to_rtx);
4515 RTX_UNCHANGING_P (to_rtx) = 1;
4518 #ifdef WORD_REGISTER_OPERATIONS
4519 /* If this initializes a field that is smaller than a word, at the
4520 start of a word, try to widen it to a full word.
4521 This special case allows us to output C++ member function
4522 initializations in a form that the optimizers can understand. */
4523 if (GET_CODE (target) == REG
4524 && bitsize < BITS_PER_WORD
4525 && bitpos % BITS_PER_WORD == 0
4526 && GET_MODE_CLASS (mode) == MODE_INT
4527 && TREE_CODE (value) == INTEGER_CST
4528 && exp_size >= 0
4529 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4531 tree type = TREE_TYPE (value);
4532 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4534 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4535 value = convert (type, value);
4537 if (BYTES_BIG_ENDIAN)
4538 value
4539 = fold (build (LSHIFT_EXPR, type, value,
4540 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4541 bitsize = BITS_PER_WORD;
4542 mode = word_mode;
4544 #endif
4545 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4546 TREE_VALUE (elt), type, align, cleared,
4547 (DECL_NONADDRESSABLE_P (field)
4548 && GET_CODE (to_rtx) == MEM)
4549 ? MEM_ALIAS_SET (to_rtx)
4550 : get_alias_set (TREE_TYPE (field)));
4553 else if (TREE_CODE (type) == ARRAY_TYPE)
4555 register tree elt;
4556 register int i;
4557 int need_to_clear;
4558 tree domain = TYPE_DOMAIN (type);
4559 tree elttype = TREE_TYPE (type);
4560 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4561 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4562 HOST_WIDE_INT minelt;
4563 HOST_WIDE_INT maxelt;
4565 /* If we have constant bounds for the range of the type, get them. */
4566 if (const_bounds_p)
4568 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4569 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4572 /* If the constructor has fewer elements than the array,
4573 clear the whole array first. Similarly if this is
4574 static constructor of a non-BLKmode object. */
4575 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4576 need_to_clear = 1;
4577 else
4579 HOST_WIDE_INT count = 0, zero_count = 0;
4580 need_to_clear = ! const_bounds_p;
4582 /* This loop is a more accurate version of the loop in
4583 mostly_zeros_p (it handles RANGE_EXPR in an index).
4584 It is also needed to check for missing elements. */
4585 for (elt = CONSTRUCTOR_ELTS (exp);
4586 elt != NULL_TREE && ! need_to_clear;
4587 elt = TREE_CHAIN (elt))
4589 tree index = TREE_PURPOSE (elt);
4590 HOST_WIDE_INT this_node_count;
4592 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4594 tree lo_index = TREE_OPERAND (index, 0);
4595 tree hi_index = TREE_OPERAND (index, 1);
4597 if (! host_integerp (lo_index, 1)
4598 || ! host_integerp (hi_index, 1))
4600 need_to_clear = 1;
4601 break;
4604 this_node_count = (tree_low_cst (hi_index, 1)
4605 - tree_low_cst (lo_index, 1) + 1);
4607 else
4608 this_node_count = 1;
4610 count += this_node_count;
4611 if (mostly_zeros_p (TREE_VALUE (elt)))
4612 zero_count += this_node_count;
4615 /* Clear the entire array first if there are any missing elements,
4616 or if the incidence of zero elements is >= 75%. */
4617 if (! need_to_clear
4618 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4619 need_to_clear = 1;
4622 if (need_to_clear && size > 0)
4624 if (! cleared)
4625 clear_storage (target, GEN_INT (size), align);
4626 cleared = 1;
4628 else
4629 /* Inform later passes that the old value is dead. */
4630 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4632 /* Store each element of the constructor into
4633 the corresponding element of TARGET, determined
4634 by counting the elements. */
4635 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4636 elt;
4637 elt = TREE_CHAIN (elt), i++)
4639 register enum machine_mode mode;
4640 HOST_WIDE_INT bitsize;
4641 HOST_WIDE_INT bitpos;
4642 int unsignedp;
4643 tree value = TREE_VALUE (elt);
4644 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4645 tree index = TREE_PURPOSE (elt);
4646 rtx xtarget = target;
4648 if (cleared && is_zeros_p (value))
4649 continue;
4651 unsignedp = TREE_UNSIGNED (elttype);
4652 mode = TYPE_MODE (elttype);
4653 if (mode == BLKmode)
4654 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4655 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4656 : -1);
4657 else
4658 bitsize = GET_MODE_BITSIZE (mode);
4660 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4662 tree lo_index = TREE_OPERAND (index, 0);
4663 tree hi_index = TREE_OPERAND (index, 1);
4664 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4665 struct nesting *loop;
4666 HOST_WIDE_INT lo, hi, count;
4667 tree position;
4669 /* If the range is constant and "small", unroll the loop. */
4670 if (const_bounds_p
4671 && host_integerp (lo_index, 0)
4672 && host_integerp (hi_index, 0)
4673 && (lo = tree_low_cst (lo_index, 0),
4674 hi = tree_low_cst (hi_index, 0),
4675 count = hi - lo + 1,
4676 (GET_CODE (target) != MEM
4677 || count <= 2
4678 || (host_integerp (TYPE_SIZE (elttype), 1)
4679 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4680 <= 40 * 8)))))
4682 lo -= minelt; hi -= minelt;
4683 for (; lo <= hi; lo++)
4685 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4686 store_constructor_field
4687 (target, bitsize, bitpos, mode, value, type, align,
4688 cleared,
4689 TYPE_NONALIASED_COMPONENT (type)
4690 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4693 else
4695 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4696 loop_top = gen_label_rtx ();
4697 loop_end = gen_label_rtx ();
4699 unsignedp = TREE_UNSIGNED (domain);
4701 index = build_decl (VAR_DECL, NULL_TREE, domain);
4703 DECL_RTL (index) = index_r
4704 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4705 &unsignedp, 0));
4707 if (TREE_CODE (value) == SAVE_EXPR
4708 && SAVE_EXPR_RTL (value) == 0)
4710 /* Make sure value gets expanded once before the
4711 loop. */
4712 expand_expr (value, const0_rtx, VOIDmode, 0);
4713 emit_queue ();
4715 store_expr (lo_index, index_r, 0);
4716 loop = expand_start_loop (0);
4718 /* Assign value to element index. */
4719 position
4720 = convert (ssizetype,
4721 fold (build (MINUS_EXPR, TREE_TYPE (index),
4722 index, TYPE_MIN_VALUE (domain))));
4723 position = size_binop (MULT_EXPR, position,
4724 convert (ssizetype,
4725 TYPE_SIZE_UNIT (elttype)));
4727 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4728 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4729 xtarget = change_address (target, mode, addr);
4730 if (TREE_CODE (value) == CONSTRUCTOR)
4731 store_constructor (value, xtarget, align, cleared,
4732 bitsize / BITS_PER_UNIT);
4733 else
4734 store_expr (value, xtarget, 0);
4736 expand_exit_loop_if_false (loop,
4737 build (LT_EXPR, integer_type_node,
4738 index, hi_index));
4740 expand_increment (build (PREINCREMENT_EXPR,
4741 TREE_TYPE (index),
4742 index, integer_one_node), 0, 0);
4743 expand_end_loop ();
4744 emit_label (loop_end);
4747 else if ((index != 0 && ! host_integerp (index, 0))
4748 || ! host_integerp (TYPE_SIZE (elttype), 1))
4750 rtx pos_rtx, addr;
4751 tree position;
4753 if (index == 0)
4754 index = ssize_int (1);
4756 if (minelt)
4757 index = convert (ssizetype,
4758 fold (build (MINUS_EXPR, index,
4759 TYPE_MIN_VALUE (domain))));
4761 position = size_binop (MULT_EXPR, index,
4762 convert (ssizetype,
4763 TYPE_SIZE_UNIT (elttype)));
4764 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4765 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4766 xtarget = change_address (target, mode, addr);
4767 store_expr (value, xtarget, 0);
4769 else
4771 if (index != 0)
4772 bitpos = ((tree_low_cst (index, 0) - minelt)
4773 * tree_low_cst (TYPE_SIZE (elttype), 1));
4774 else
4775 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4777 store_constructor_field (target, bitsize, bitpos, mode, value,
4778 type, align, cleared,
4779 TYPE_NONALIASED_COMPONENT (type)
4780 ? MEM_ALIAS_SET (target) :
4781 get_alias_set (elttype));
4787 /* Set constructor assignments. */
4788 else if (TREE_CODE (type) == SET_TYPE)
4790 tree elt = CONSTRUCTOR_ELTS (exp);
4791 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4792 tree domain = TYPE_DOMAIN (type);
4793 tree domain_min, domain_max, bitlength;
4795 /* The default implementation strategy is to extract the constant
4796 parts of the constructor, use that to initialize the target,
4797 and then "or" in whatever non-constant ranges we need in addition.
4799 If a large set is all zero or all ones, it is
4800 probably better to set it using memset (if available) or bzero.
4801 Also, if a large set has just a single range, it may also be
4802 better to first clear all the first clear the set (using
4803 bzero/memset), and set the bits we want. */
4805 /* Check for all zeros. */
4806 if (elt == NULL_TREE && size > 0)
4808 if (!cleared)
4809 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4810 return;
4813 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4814 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4815 bitlength = size_binop (PLUS_EXPR,
4816 size_diffop (domain_max, domain_min),
4817 ssize_int (1));
4819 nbits = tree_low_cst (bitlength, 1);
4821 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4822 are "complicated" (more than one range), initialize (the
4823 constant parts) by copying from a constant. */
4824 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4825 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4827 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4828 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4829 char *bit_buffer = (char *) alloca (nbits);
4830 HOST_WIDE_INT word = 0;
4831 unsigned int bit_pos = 0;
4832 unsigned int ibit = 0;
4833 unsigned int offset = 0; /* In bytes from beginning of set. */
4835 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4836 for (;;)
4838 if (bit_buffer[ibit])
4840 if (BYTES_BIG_ENDIAN)
4841 word |= (1 << (set_word_size - 1 - bit_pos));
4842 else
4843 word |= 1 << bit_pos;
4846 bit_pos++; ibit++;
4847 if (bit_pos >= set_word_size || ibit == nbits)
4849 if (word != 0 || ! cleared)
4851 rtx datum = GEN_INT (word);
4852 rtx to_rtx;
4854 /* The assumption here is that it is safe to use
4855 XEXP if the set is multi-word, but not if
4856 it's single-word. */
4857 if (GET_CODE (target) == MEM)
4859 to_rtx = plus_constant (XEXP (target, 0), offset);
4860 to_rtx = change_address (target, mode, to_rtx);
4862 else if (offset == 0)
4863 to_rtx = target;
4864 else
4865 abort ();
4866 emit_move_insn (to_rtx, datum);
4869 if (ibit == nbits)
4870 break;
4871 word = 0;
4872 bit_pos = 0;
4873 offset += set_word_size / BITS_PER_UNIT;
4877 else if (!cleared)
4878 /* Don't bother clearing storage if the set is all ones. */
4879 if (TREE_CHAIN (elt) != NULL_TREE
4880 || (TREE_PURPOSE (elt) == NULL_TREE
4881 ? nbits != 1
4882 : ( ! host_integerp (TREE_VALUE (elt), 0)
4883 || ! host_integerp (TREE_PURPOSE (elt), 0)
4884 || (tree_low_cst (TREE_VALUE (elt), 0)
4885 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4886 != (HOST_WIDE_INT) nbits))))
4887 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4889 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4891 /* Start of range of element or NULL. */
4892 tree startbit = TREE_PURPOSE (elt);
4893 /* End of range of element, or element value. */
4894 tree endbit = TREE_VALUE (elt);
4895 #ifdef TARGET_MEM_FUNCTIONS
4896 HOST_WIDE_INT startb, endb;
4897 #endif
4898 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4900 bitlength_rtx = expand_expr (bitlength,
4901 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4903 /* Handle non-range tuple element like [ expr ]. */
4904 if (startbit == NULL_TREE)
4906 startbit = save_expr (endbit);
4907 endbit = startbit;
4910 startbit = convert (sizetype, startbit);
4911 endbit = convert (sizetype, endbit);
4912 if (! integer_zerop (domain_min))
4914 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4915 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4917 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4918 EXPAND_CONST_ADDRESS);
4919 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4920 EXPAND_CONST_ADDRESS);
4922 if (REG_P (target))
4924 targetx = assign_stack_temp (GET_MODE (target),
4925 GET_MODE_SIZE (GET_MODE (target)),
4927 emit_move_insn (targetx, target);
4930 else if (GET_CODE (target) == MEM)
4931 targetx = target;
4932 else
4933 abort ();
4935 #ifdef TARGET_MEM_FUNCTIONS
4936 /* Optimization: If startbit and endbit are
4937 constants divisible by BITS_PER_UNIT,
4938 call memset instead. */
4939 if (TREE_CODE (startbit) == INTEGER_CST
4940 && TREE_CODE (endbit) == INTEGER_CST
4941 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4942 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4944 emit_library_call (memset_libfunc, LCT_NORMAL,
4945 VOIDmode, 3,
4946 plus_constant (XEXP (targetx, 0),
4947 startb / BITS_PER_UNIT),
4948 Pmode,
4949 constm1_rtx, TYPE_MODE (integer_type_node),
4950 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4951 TYPE_MODE (sizetype));
4953 else
4954 #endif
4955 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4956 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4957 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4958 startbit_rtx, TYPE_MODE (sizetype),
4959 endbit_rtx, TYPE_MODE (sizetype));
4961 if (REG_P (target))
4962 emit_move_insn (target, targetx);
4966 else
4967 abort ();
4970 /* Store the value of EXP (an expression tree)
4971 into a subfield of TARGET which has mode MODE and occupies
4972 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4973 If MODE is VOIDmode, it means that we are storing into a bit-field.
4975 If VALUE_MODE is VOIDmode, return nothing in particular.
4976 UNSIGNEDP is not used in this case.
4978 Otherwise, return an rtx for the value stored. This rtx
4979 has mode VALUE_MODE if that is convenient to do.
4980 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4982 ALIGN is the alignment that TARGET is known to have.
4983 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4985 ALIAS_SET is the alias set for the destination. This value will
4986 (in general) be different from that for TARGET, since TARGET is a
4987 reference to the containing structure. */
4989 static rtx
4990 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4991 unsignedp, align, total_size, alias_set)
4992 rtx target;
4993 HOST_WIDE_INT bitsize;
4994 HOST_WIDE_INT bitpos;
4995 enum machine_mode mode;
4996 tree exp;
4997 enum machine_mode value_mode;
4998 int unsignedp;
4999 unsigned int align;
5000 HOST_WIDE_INT total_size;
5001 int alias_set;
5003 HOST_WIDE_INT width_mask = 0;
5005 if (TREE_CODE (exp) == ERROR_MARK)
5006 return const0_rtx;
5008 if (bitsize < HOST_BITS_PER_WIDE_INT)
5009 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5011 /* If we are storing into an unaligned field of an aligned union that is
5012 in a register, we may have the mode of TARGET being an integer mode but
5013 MODE == BLKmode. In that case, get an aligned object whose size and
5014 alignment are the same as TARGET and store TARGET into it (we can avoid
5015 the store if the field being stored is the entire width of TARGET). Then
5016 call ourselves recursively to store the field into a BLKmode version of
5017 that object. Finally, load from the object into TARGET. This is not
5018 very efficient in general, but should only be slightly more expensive
5019 than the otherwise-required unaligned accesses. Perhaps this can be
5020 cleaned up later. */
5022 if (mode == BLKmode
5023 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5025 rtx object = assign_stack_temp (GET_MODE (target),
5026 GET_MODE_SIZE (GET_MODE (target)), 0);
5027 rtx blk_object = copy_rtx (object);
5029 MEM_SET_IN_STRUCT_P (object, 1);
5030 MEM_SET_IN_STRUCT_P (blk_object, 1);
5031 PUT_MODE (blk_object, BLKmode);
5033 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
5034 emit_move_insn (object, target);
5036 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5037 align, total_size, alias_set);
5039 /* Even though we aren't returning target, we need to
5040 give it the updated value. */
5041 emit_move_insn (target, object);
5043 return blk_object;
5046 if (GET_CODE (target) == CONCAT)
5048 /* We're storing into a struct containing a single __complex. */
5050 if (bitpos != 0)
5051 abort ();
5052 return store_expr (exp, target, 0);
5055 /* If the structure is in a register or if the component
5056 is a bit field, we cannot use addressing to access it.
5057 Use bit-field techniques or SUBREG to store in it. */
5059 if (mode == VOIDmode
5060 || (mode != BLKmode && ! direct_store[(int) mode]
5061 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5062 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5063 || GET_CODE (target) == REG
5064 || GET_CODE (target) == SUBREG
5065 /* If the field isn't aligned enough to store as an ordinary memref,
5066 store it as a bit field. */
5067 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5068 && (align < GET_MODE_ALIGNMENT (mode)
5069 || bitpos % GET_MODE_ALIGNMENT (mode)))
5070 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5071 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5072 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5073 /* If the RHS and field are a constant size and the size of the
5074 RHS isn't the same size as the bitfield, we must use bitfield
5075 operations. */
5076 || (bitsize >= 0
5077 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5078 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5080 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5082 /* If BITSIZE is narrower than the size of the type of EXP
5083 we will be narrowing TEMP. Normally, what's wanted are the
5084 low-order bits. However, if EXP's type is a record and this is
5085 big-endian machine, we want the upper BITSIZE bits. */
5086 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5087 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5088 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5089 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5090 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5091 - bitsize),
5092 temp, 1);
5094 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5095 MODE. */
5096 if (mode != VOIDmode && mode != BLKmode
5097 && mode != TYPE_MODE (TREE_TYPE (exp)))
5098 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5100 /* If the modes of TARGET and TEMP are both BLKmode, both
5101 must be in memory and BITPOS must be aligned on a byte
5102 boundary. If so, we simply do a block copy. */
5103 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5105 unsigned int exp_align = expr_align (exp);
5107 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5108 || bitpos % BITS_PER_UNIT != 0)
5109 abort ();
5111 target = change_address (target, VOIDmode,
5112 plus_constant (XEXP (target, 0),
5113 bitpos / BITS_PER_UNIT));
5115 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5116 align = MIN (exp_align, align);
5118 /* Find an alignment that is consistent with the bit position. */
5119 while ((bitpos % align) != 0)
5120 align >>= 1;
5122 emit_block_move (target, temp,
5123 bitsize == -1 ? expr_size (exp)
5124 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5125 / BITS_PER_UNIT),
5126 align);
5128 return value_mode == VOIDmode ? const0_rtx : target;
5131 /* Store the value in the bitfield. */
5132 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5133 if (value_mode != VOIDmode)
5135 /* The caller wants an rtx for the value. */
5136 /* If possible, avoid refetching from the bitfield itself. */
5137 if (width_mask != 0
5138 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5140 tree count;
5141 enum machine_mode tmode;
5143 if (unsignedp)
5144 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5145 tmode = GET_MODE (temp);
5146 if (tmode == VOIDmode)
5147 tmode = value_mode;
5148 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5149 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5150 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5152 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5153 NULL_RTX, value_mode, 0, align,
5154 total_size);
5156 return const0_rtx;
5158 else
5160 rtx addr = XEXP (target, 0);
5161 rtx to_rtx;
5163 /* If a value is wanted, it must be the lhs;
5164 so make the address stable for multiple use. */
5166 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5167 && ! CONSTANT_ADDRESS_P (addr)
5168 /* A frame-pointer reference is already stable. */
5169 && ! (GET_CODE (addr) == PLUS
5170 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5171 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5172 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5173 addr = copy_to_reg (addr);
5175 /* Now build a reference to just the desired component. */
5177 to_rtx = copy_rtx (change_address (target, mode,
5178 plus_constant (addr,
5179 (bitpos
5180 / BITS_PER_UNIT))));
5181 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5182 MEM_ALIAS_SET (to_rtx) = alias_set;
5184 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5188 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5189 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5190 ARRAY_REFs and find the ultimate containing object, which we return.
5192 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5193 bit position, and *PUNSIGNEDP to the signedness of the field.
5194 If the position of the field is variable, we store a tree
5195 giving the variable offset (in units) in *POFFSET.
5196 This offset is in addition to the bit position.
5197 If the position is not variable, we store 0 in *POFFSET.
5198 We set *PALIGNMENT to the alignment of the address that will be
5199 computed. This is the alignment of the thing we return if *POFFSET
5200 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5202 If any of the extraction expressions is volatile,
5203 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5205 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5206 is a mode that can be used to access the field. In that case, *PBITSIZE
5207 is redundant.
5209 If the field describes a variable-sized object, *PMODE is set to
5210 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5211 this case, but the address of the object can be found. */
5213 tree
5214 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5215 punsignedp, pvolatilep, palignment)
5216 tree exp;
5217 HOST_WIDE_INT *pbitsize;
5218 HOST_WIDE_INT *pbitpos;
5219 tree *poffset;
5220 enum machine_mode *pmode;
5221 int *punsignedp;
5222 int *pvolatilep;
5223 unsigned int *palignment;
5225 tree size_tree = 0;
5226 enum machine_mode mode = VOIDmode;
5227 tree offset = size_zero_node;
5228 tree bit_offset = bitsize_zero_node;
5229 unsigned int alignment = BIGGEST_ALIGNMENT;
5230 tree tem;
5232 /* First get the mode, signedness, and size. We do this from just the
5233 outermost expression. */
5234 if (TREE_CODE (exp) == COMPONENT_REF)
5236 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5237 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5238 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5240 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5242 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5244 size_tree = TREE_OPERAND (exp, 1);
5245 *punsignedp = TREE_UNSIGNED (exp);
5247 else
5249 mode = TYPE_MODE (TREE_TYPE (exp));
5250 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5252 if (mode == BLKmode)
5253 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5254 else
5255 *pbitsize = GET_MODE_BITSIZE (mode);
5258 if (size_tree != 0)
5260 if (! host_integerp (size_tree, 1))
5261 mode = BLKmode, *pbitsize = -1;
5262 else
5263 *pbitsize = tree_low_cst (size_tree, 1);
5266 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5267 and find the ultimate containing object. */
5268 while (1)
5270 if (TREE_CODE (exp) == BIT_FIELD_REF)
5271 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5272 else if (TREE_CODE (exp) == COMPONENT_REF)
5274 tree field = TREE_OPERAND (exp, 1);
5275 tree this_offset = DECL_FIELD_OFFSET (field);
5277 /* If this field hasn't been filled in yet, don't go
5278 past it. This should only happen when folding expressions
5279 made during type construction. */
5280 if (this_offset == 0)
5281 break;
5282 else if (! TREE_CONSTANT (this_offset)
5283 && contains_placeholder_p (this_offset))
5284 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5286 offset = size_binop (PLUS_EXPR, offset, this_offset);
5287 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5288 DECL_FIELD_BIT_OFFSET (field));
5290 if (! host_integerp (offset, 0))
5291 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5294 else if (TREE_CODE (exp) == ARRAY_REF)
5296 tree index = TREE_OPERAND (exp, 1);
5297 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5298 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5299 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5301 /* We assume all arrays have sizes that are a multiple of a byte.
5302 First subtract the lower bound, if any, in the type of the
5303 index, then convert to sizetype and multiply by the size of the
5304 array element. */
5305 if (low_bound != 0 && ! integer_zerop (low_bound))
5306 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5307 index, low_bound));
5309 /* If the index has a self-referential type, pass it to a
5310 WITH_RECORD_EXPR; if the component size is, pass our
5311 component to one. */
5312 if (! TREE_CONSTANT (index)
5313 && contains_placeholder_p (index))
5314 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5315 if (! TREE_CONSTANT (unit_size)
5316 && contains_placeholder_p (unit_size))
5317 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5318 TREE_OPERAND (exp, 0));
5320 offset = size_binop (PLUS_EXPR, offset,
5321 size_binop (MULT_EXPR,
5322 convert (sizetype, index),
5323 unit_size));
5326 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5327 && ! ((TREE_CODE (exp) == NOP_EXPR
5328 || TREE_CODE (exp) == CONVERT_EXPR)
5329 && (TYPE_MODE (TREE_TYPE (exp))
5330 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5331 break;
5333 /* If any reference in the chain is volatile, the effect is volatile. */
5334 if (TREE_THIS_VOLATILE (exp))
5335 *pvolatilep = 1;
5337 /* If the offset is non-constant already, then we can't assume any
5338 alignment more than the alignment here. */
5339 if (! TREE_CONSTANT (offset))
5340 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5342 exp = TREE_OPERAND (exp, 0);
5345 if (DECL_P (exp))
5346 alignment = MIN (alignment, DECL_ALIGN (exp));
5347 else if (TREE_TYPE (exp) != 0)
5348 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5350 /* If OFFSET is constant, see if we can return the whole thing as a
5351 constant bit position. Otherwise, split it up. */
5352 if (host_integerp (offset, 0)
5353 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5354 bitsize_unit_node))
5355 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5356 && host_integerp (tem, 0))
5357 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5358 else
5359 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5361 *pmode = mode;
5362 *palignment = alignment;
5363 return exp;
5366 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5368 static enum memory_use_mode
5369 get_memory_usage_from_modifier (modifier)
5370 enum expand_modifier modifier;
5372 switch (modifier)
5374 case EXPAND_NORMAL:
5375 case EXPAND_SUM:
5376 return MEMORY_USE_RO;
5377 break;
5378 case EXPAND_MEMORY_USE_WO:
5379 return MEMORY_USE_WO;
5380 break;
5381 case EXPAND_MEMORY_USE_RW:
5382 return MEMORY_USE_RW;
5383 break;
5384 case EXPAND_MEMORY_USE_DONT:
5385 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5386 MEMORY_USE_DONT, because they are modifiers to a call of
5387 expand_expr in the ADDR_EXPR case of expand_expr. */
5388 case EXPAND_CONST_ADDRESS:
5389 case EXPAND_INITIALIZER:
5390 return MEMORY_USE_DONT;
5391 case EXPAND_MEMORY_USE_BAD:
5392 default:
5393 abort ();
5397 /* Given an rtx VALUE that may contain additions and multiplications,
5398 return an equivalent value that just refers to a register or memory.
5399 This is done by generating instructions to perform the arithmetic
5400 and returning a pseudo-register containing the value.
5402 The returned value may be a REG, SUBREG, MEM or constant. */
5405 force_operand (value, target)
5406 rtx value, target;
5408 register optab binoptab = 0;
5409 /* Use a temporary to force order of execution of calls to
5410 `force_operand'. */
5411 rtx tmp;
5412 register rtx op2;
5413 /* Use subtarget as the target for operand 0 of a binary operation. */
5414 register rtx subtarget = get_subtarget (target);
5416 /* Check for a PIC address load. */
5417 if (flag_pic
5418 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5419 && XEXP (value, 0) == pic_offset_table_rtx
5420 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5421 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5422 || GET_CODE (XEXP (value, 1)) == CONST))
5424 if (!subtarget)
5425 subtarget = gen_reg_rtx (GET_MODE (value));
5426 emit_move_insn (subtarget, value);
5427 return subtarget;
5430 if (GET_CODE (value) == PLUS)
5431 binoptab = add_optab;
5432 else if (GET_CODE (value) == MINUS)
5433 binoptab = sub_optab;
5434 else if (GET_CODE (value) == MULT)
5436 op2 = XEXP (value, 1);
5437 if (!CONSTANT_P (op2)
5438 && !(GET_CODE (op2) == REG && op2 != subtarget))
5439 subtarget = 0;
5440 tmp = force_operand (XEXP (value, 0), subtarget);
5441 return expand_mult (GET_MODE (value), tmp,
5442 force_operand (op2, NULL_RTX),
5443 target, 1);
5446 if (binoptab)
5448 op2 = XEXP (value, 1);
5449 if (!CONSTANT_P (op2)
5450 && !(GET_CODE (op2) == REG && op2 != subtarget))
5451 subtarget = 0;
5452 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5454 binoptab = add_optab;
5455 op2 = negate_rtx (GET_MODE (value), op2);
5458 /* Check for an addition with OP2 a constant integer and our first
5459 operand a PLUS of a virtual register and something else. In that
5460 case, we want to emit the sum of the virtual register and the
5461 constant first and then add the other value. This allows virtual
5462 register instantiation to simply modify the constant rather than
5463 creating another one around this addition. */
5464 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5465 && GET_CODE (XEXP (value, 0)) == PLUS
5466 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5467 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5468 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5470 rtx temp = expand_binop (GET_MODE (value), binoptab,
5471 XEXP (XEXP (value, 0), 0), op2,
5472 subtarget, 0, OPTAB_LIB_WIDEN);
5473 return expand_binop (GET_MODE (value), binoptab, temp,
5474 force_operand (XEXP (XEXP (value, 0), 1), 0),
5475 target, 0, OPTAB_LIB_WIDEN);
5478 tmp = force_operand (XEXP (value, 0), subtarget);
5479 return expand_binop (GET_MODE (value), binoptab, tmp,
5480 force_operand (op2, NULL_RTX),
5481 target, 0, OPTAB_LIB_WIDEN);
5482 /* We give UNSIGNEDP = 0 to expand_binop
5483 because the only operations we are expanding here are signed ones. */
5485 return value;
5488 /* Subroutine of expand_expr:
5489 save the non-copied parts (LIST) of an expr (LHS), and return a list
5490 which can restore these values to their previous values,
5491 should something modify their storage. */
5493 static tree
5494 save_noncopied_parts (lhs, list)
5495 tree lhs;
5496 tree list;
5498 tree tail;
5499 tree parts = 0;
5501 for (tail = list; tail; tail = TREE_CHAIN (tail))
5502 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5503 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5504 else
5506 tree part = TREE_VALUE (tail);
5507 tree part_type = TREE_TYPE (part);
5508 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5509 rtx target = assign_temp (part_type, 0, 1, 1);
5510 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5511 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5512 parts = tree_cons (to_be_saved,
5513 build (RTL_EXPR, part_type, NULL_TREE,
5514 (tree) target),
5515 parts);
5516 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5518 return parts;
5521 /* Subroutine of expand_expr:
5522 record the non-copied parts (LIST) of an expr (LHS), and return a list
5523 which specifies the initial values of these parts. */
5525 static tree
5526 init_noncopied_parts (lhs, list)
5527 tree lhs;
5528 tree list;
5530 tree tail;
5531 tree parts = 0;
5533 for (tail = list; tail; tail = TREE_CHAIN (tail))
5534 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5535 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5536 else if (TREE_PURPOSE (tail))
5538 tree part = TREE_VALUE (tail);
5539 tree part_type = TREE_TYPE (part);
5540 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5541 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5543 return parts;
5546 /* Subroutine of expand_expr: return nonzero iff there is no way that
5547 EXP can reference X, which is being modified. TOP_P is nonzero if this
5548 call is going to be used to determine whether we need a temporary
5549 for EXP, as opposed to a recursive call to this function.
5551 It is always safe for this routine to return zero since it merely
5552 searches for optimization opportunities. */
5555 safe_from_p (x, exp, top_p)
5556 rtx x;
5557 tree exp;
5558 int top_p;
5560 rtx exp_rtl = 0;
5561 int i, nops;
5562 static int save_expr_count;
5563 static int save_expr_size = 0;
5564 static tree *save_expr_rewritten;
5565 static tree save_expr_trees[256];
5567 if (x == 0
5568 /* If EXP has varying size, we MUST use a target since we currently
5569 have no way of allocating temporaries of variable size
5570 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5571 So we assume here that something at a higher level has prevented a
5572 clash. This is somewhat bogus, but the best we can do. Only
5573 do this when X is BLKmode and when we are at the top level. */
5574 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5575 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5576 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5577 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5578 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5579 != INTEGER_CST)
5580 && GET_MODE (x) == BLKmode))
5581 return 1;
5583 if (top_p && save_expr_size == 0)
5585 int rtn;
5587 save_expr_count = 0;
5588 save_expr_size = ARRAY_SIZE (save_expr_trees);
5589 save_expr_rewritten = &save_expr_trees[0];
5591 rtn = safe_from_p (x, exp, 1);
5593 for (i = 0; i < save_expr_count; ++i)
5595 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5596 abort ();
5597 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5600 save_expr_size = 0;
5602 return rtn;
5605 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5606 find the underlying pseudo. */
5607 if (GET_CODE (x) == SUBREG)
5609 x = SUBREG_REG (x);
5610 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5611 return 0;
5614 /* If X is a location in the outgoing argument area, it is always safe. */
5615 if (GET_CODE (x) == MEM
5616 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5617 || (GET_CODE (XEXP (x, 0)) == PLUS
5618 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5619 return 1;
5621 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5623 case 'd':
5624 exp_rtl = DECL_RTL (exp);
5625 break;
5627 case 'c':
5628 return 1;
5630 case 'x':
5631 if (TREE_CODE (exp) == TREE_LIST)
5632 return ((TREE_VALUE (exp) == 0
5633 || safe_from_p (x, TREE_VALUE (exp), 0))
5634 && (TREE_CHAIN (exp) == 0
5635 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5636 else if (TREE_CODE (exp) == ERROR_MARK)
5637 return 1; /* An already-visited SAVE_EXPR? */
5638 else
5639 return 0;
5641 case '1':
5642 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5644 case '2':
5645 case '<':
5646 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5647 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5649 case 'e':
5650 case 'r':
5651 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5652 the expression. If it is set, we conflict iff we are that rtx or
5653 both are in memory. Otherwise, we check all operands of the
5654 expression recursively. */
5656 switch (TREE_CODE (exp))
5658 case ADDR_EXPR:
5659 return (staticp (TREE_OPERAND (exp, 0))
5660 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5661 || TREE_STATIC (exp));
5663 case INDIRECT_REF:
5664 if (GET_CODE (x) == MEM)
5665 return 0;
5666 break;
5668 case CALL_EXPR:
5669 /* Assume that the call will clobber all hard registers and
5670 all of memory. */
5671 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5672 || GET_CODE (x) == MEM)
5673 return 0;
5674 break;
5676 case RTL_EXPR:
5677 /* If a sequence exists, we would have to scan every instruction
5678 in the sequence to see if it was safe. This is probably not
5679 worthwhile. */
5680 if (RTL_EXPR_SEQUENCE (exp))
5681 return 0;
5683 exp_rtl = RTL_EXPR_RTL (exp);
5684 break;
5686 case WITH_CLEANUP_EXPR:
5687 exp_rtl = RTL_EXPR_RTL (exp);
5688 break;
5690 case CLEANUP_POINT_EXPR:
5691 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5693 case SAVE_EXPR:
5694 exp_rtl = SAVE_EXPR_RTL (exp);
5695 if (exp_rtl)
5696 break;
5698 /* This SAVE_EXPR might appear many times in the top-level
5699 safe_from_p() expression, and if it has a complex
5700 subexpression, examining it multiple times could result
5701 in a combinatorial explosion. E.g. on an Alpha
5702 running at least 200MHz, a Fortran test case compiled with
5703 optimization took about 28 minutes to compile -- even though
5704 it was only a few lines long, and the complicated line causing
5705 so much time to be spent in the earlier version of safe_from_p()
5706 had only 293 or so unique nodes.
5708 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5709 where it is so we can turn it back in the top-level safe_from_p()
5710 when we're done. */
5712 /* For now, don't bother re-sizing the array. */
5713 if (save_expr_count >= save_expr_size)
5714 return 0;
5715 save_expr_rewritten[save_expr_count++] = exp;
5717 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5718 for (i = 0; i < nops; i++)
5720 tree operand = TREE_OPERAND (exp, i);
5721 if (operand == NULL_TREE)
5722 continue;
5723 TREE_SET_CODE (exp, ERROR_MARK);
5724 if (!safe_from_p (x, operand, 0))
5725 return 0;
5726 TREE_SET_CODE (exp, SAVE_EXPR);
5728 TREE_SET_CODE (exp, ERROR_MARK);
5729 return 1;
5731 case BIND_EXPR:
5732 /* The only operand we look at is operand 1. The rest aren't
5733 part of the expression. */
5734 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5736 case METHOD_CALL_EXPR:
5737 /* This takes a rtx argument, but shouldn't appear here. */
5738 abort ();
5740 default:
5741 break;
5744 /* If we have an rtx, we do not need to scan our operands. */
5745 if (exp_rtl)
5746 break;
5748 nops = first_rtl_op (TREE_CODE (exp));
5749 for (i = 0; i < nops; i++)
5750 if (TREE_OPERAND (exp, i) != 0
5751 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5752 return 0;
5754 /* If this is a language-specific tree code, it may require
5755 special handling. */
5756 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5757 && lang_safe_from_p
5758 && !(*lang_safe_from_p) (x, exp))
5759 return 0;
5762 /* If we have an rtl, find any enclosed object. Then see if we conflict
5763 with it. */
5764 if (exp_rtl)
5766 if (GET_CODE (exp_rtl) == SUBREG)
5768 exp_rtl = SUBREG_REG (exp_rtl);
5769 if (GET_CODE (exp_rtl) == REG
5770 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5771 return 0;
5774 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5775 are memory and EXP is not readonly. */
5776 return ! (rtx_equal_p (x, exp_rtl)
5777 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5778 && ! TREE_READONLY (exp)));
5781 /* If we reach here, it is safe. */
5782 return 1;
5785 /* Subroutine of expand_expr: return nonzero iff EXP is an
5786 expression whose type is statically determinable. */
5788 static int
5789 fixed_type_p (exp)
5790 tree exp;
5792 if (TREE_CODE (exp) == PARM_DECL
5793 || TREE_CODE (exp) == VAR_DECL
5794 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5795 || TREE_CODE (exp) == COMPONENT_REF
5796 || TREE_CODE (exp) == ARRAY_REF)
5797 return 1;
5798 return 0;
5801 /* Subroutine of expand_expr: return rtx if EXP is a
5802 variable or parameter; else return 0. */
5804 static rtx
5805 var_rtx (exp)
5806 tree exp;
5808 STRIP_NOPS (exp);
5809 switch (TREE_CODE (exp))
5811 case PARM_DECL:
5812 case VAR_DECL:
5813 return DECL_RTL (exp);
5814 default:
5815 return 0;
5819 #ifdef MAX_INTEGER_COMPUTATION_MODE
5820 void
5821 check_max_integer_computation_mode (exp)
5822 tree exp;
5824 enum tree_code code;
5825 enum machine_mode mode;
5827 /* Strip any NOPs that don't change the mode. */
5828 STRIP_NOPS (exp);
5829 code = TREE_CODE (exp);
5831 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5832 if (code == NOP_EXPR
5833 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5834 return;
5836 /* First check the type of the overall operation. We need only look at
5837 unary, binary and relational operations. */
5838 if (TREE_CODE_CLASS (code) == '1'
5839 || TREE_CODE_CLASS (code) == '2'
5840 || TREE_CODE_CLASS (code) == '<')
5842 mode = TYPE_MODE (TREE_TYPE (exp));
5843 if (GET_MODE_CLASS (mode) == MODE_INT
5844 && mode > MAX_INTEGER_COMPUTATION_MODE)
5845 fatal ("unsupported wide integer operation");
5848 /* Check operand of a unary op. */
5849 if (TREE_CODE_CLASS (code) == '1')
5851 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5852 if (GET_MODE_CLASS (mode) == MODE_INT
5853 && mode > MAX_INTEGER_COMPUTATION_MODE)
5854 fatal ("unsupported wide integer operation");
5857 /* Check operands of a binary/comparison op. */
5858 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5860 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5861 if (GET_MODE_CLASS (mode) == MODE_INT
5862 && mode > MAX_INTEGER_COMPUTATION_MODE)
5863 fatal ("unsupported wide integer operation");
5865 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5866 if (GET_MODE_CLASS (mode) == MODE_INT
5867 && mode > MAX_INTEGER_COMPUTATION_MODE)
5868 fatal ("unsupported wide integer operation");
5871 #endif
5873 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5874 has any readonly fields. If any of the fields have types that
5875 contain readonly fields, return true as well. */
5877 static int
5878 readonly_fields_p (type)
5879 tree type;
5881 tree field;
5883 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5884 if (TREE_CODE (field) == FIELD_DECL
5885 && (TREE_READONLY (field)
5886 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5887 && readonly_fields_p (TREE_TYPE (field)))))
5888 return 1;
5890 return 0;
5893 /* expand_expr: generate code for computing expression EXP.
5894 An rtx for the computed value is returned. The value is never null.
5895 In the case of a void EXP, const0_rtx is returned.
5897 The value may be stored in TARGET if TARGET is nonzero.
5898 TARGET is just a suggestion; callers must assume that
5899 the rtx returned may not be the same as TARGET.
5901 If TARGET is CONST0_RTX, it means that the value will be ignored.
5903 If TMODE is not VOIDmode, it suggests generating the
5904 result in mode TMODE. But this is done only when convenient.
5905 Otherwise, TMODE is ignored and the value generated in its natural mode.
5906 TMODE is just a suggestion; callers must assume that
5907 the rtx returned may not have mode TMODE.
5909 Note that TARGET may have neither TMODE nor MODE. In that case, it
5910 probably will not be used.
5912 If MODIFIER is EXPAND_SUM then when EXP is an addition
5913 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5914 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5915 products as above, or REG or MEM, or constant.
5916 Ordinarily in such cases we would output mul or add instructions
5917 and then return a pseudo reg containing the sum.
5919 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5920 it also marks a label as absolutely required (it can't be dead).
5921 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5922 This is used for outputting expressions used in initializers.
5924 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5925 with a constant address even if that address is not normally legitimate.
5926 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5929 expand_expr (exp, target, tmode, modifier)
5930 register tree exp;
5931 rtx target;
5932 enum machine_mode tmode;
5933 enum expand_modifier modifier;
5935 register rtx op0, op1, temp;
5936 tree type = TREE_TYPE (exp);
5937 int unsignedp = TREE_UNSIGNED (type);
5938 register enum machine_mode mode;
5939 register enum tree_code code = TREE_CODE (exp);
5940 optab this_optab;
5941 rtx subtarget, original_target;
5942 int ignore;
5943 tree context;
5944 /* Used by check-memory-usage to make modifier read only. */
5945 enum expand_modifier ro_modifier;
5947 /* Handle ERROR_MARK before anybody tries to access its type. */
5948 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5950 op0 = CONST0_RTX (tmode);
5951 if (op0 != 0)
5952 return op0;
5953 return const0_rtx;
5956 mode = TYPE_MODE (type);
5957 /* Use subtarget as the target for operand 0 of a binary operation. */
5958 subtarget = get_subtarget (target);
5959 original_target = target;
5960 ignore = (target == const0_rtx
5961 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5962 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5963 || code == COND_EXPR)
5964 && TREE_CODE (type) == VOID_TYPE));
5966 /* Make a read-only version of the modifier. */
5967 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5968 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5969 ro_modifier = modifier;
5970 else
5971 ro_modifier = EXPAND_NORMAL;
5973 /* If we are going to ignore this result, we need only do something
5974 if there is a side-effect somewhere in the expression. If there
5975 is, short-circuit the most common cases here. Note that we must
5976 not call expand_expr with anything but const0_rtx in case this
5977 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5979 if (ignore)
5981 if (! TREE_SIDE_EFFECTS (exp))
5982 return const0_rtx;
5984 /* Ensure we reference a volatile object even if value is ignored, but
5985 don't do this if all we are doing is taking its address. */
5986 if (TREE_THIS_VOLATILE (exp)
5987 && TREE_CODE (exp) != FUNCTION_DECL
5988 && mode != VOIDmode && mode != BLKmode
5989 && modifier != EXPAND_CONST_ADDRESS)
5991 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5992 if (GET_CODE (temp) == MEM)
5993 temp = copy_to_reg (temp);
5994 return const0_rtx;
5997 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5998 || code == INDIRECT_REF || code == BUFFER_REF)
5999 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6000 VOIDmode, ro_modifier);
6001 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6002 || code == ARRAY_REF)
6004 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6005 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6006 return const0_rtx;
6008 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6009 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6010 /* If the second operand has no side effects, just evaluate
6011 the first. */
6012 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6013 VOIDmode, ro_modifier);
6014 else if (code == BIT_FIELD_REF)
6016 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6017 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6018 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6019 return const0_rtx;
6022 target = 0;
6025 #ifdef MAX_INTEGER_COMPUTATION_MODE
6026 /* Only check stuff here if the mode we want is different from the mode
6027 of the expression; if it's the same, check_max_integer_computiation_mode
6028 will handle it. Do we really need to check this stuff at all? */
6030 if (target
6031 && GET_MODE (target) != mode
6032 && TREE_CODE (exp) != INTEGER_CST
6033 && TREE_CODE (exp) != PARM_DECL
6034 && TREE_CODE (exp) != ARRAY_REF
6035 && TREE_CODE (exp) != COMPONENT_REF
6036 && TREE_CODE (exp) != BIT_FIELD_REF
6037 && TREE_CODE (exp) != INDIRECT_REF
6038 && TREE_CODE (exp) != CALL_EXPR
6039 && TREE_CODE (exp) != VAR_DECL
6040 && TREE_CODE (exp) != RTL_EXPR)
6042 enum machine_mode mode = GET_MODE (target);
6044 if (GET_MODE_CLASS (mode) == MODE_INT
6045 && mode > MAX_INTEGER_COMPUTATION_MODE)
6046 fatal ("unsupported wide integer operation");
6049 if (tmode != mode
6050 && TREE_CODE (exp) != INTEGER_CST
6051 && TREE_CODE (exp) != PARM_DECL
6052 && TREE_CODE (exp) != ARRAY_REF
6053 && TREE_CODE (exp) != COMPONENT_REF
6054 && TREE_CODE (exp) != BIT_FIELD_REF
6055 && TREE_CODE (exp) != INDIRECT_REF
6056 && TREE_CODE (exp) != VAR_DECL
6057 && TREE_CODE (exp) != CALL_EXPR
6058 && TREE_CODE (exp) != RTL_EXPR
6059 && GET_MODE_CLASS (tmode) == MODE_INT
6060 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6061 fatal ("unsupported wide integer operation");
6063 check_max_integer_computation_mode (exp);
6064 #endif
6066 /* If will do cse, generate all results into pseudo registers
6067 since 1) that allows cse to find more things
6068 and 2) otherwise cse could produce an insn the machine
6069 cannot support. */
6071 if (! cse_not_expected && mode != BLKmode && target
6072 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6073 target = subtarget;
6075 switch (code)
6077 case LABEL_DECL:
6079 tree function = decl_function_context (exp);
6080 /* Handle using a label in a containing function. */
6081 if (function != current_function_decl
6082 && function != inline_function_decl && function != 0)
6084 struct function *p = find_function_data (function);
6085 p->expr->x_forced_labels
6086 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6087 p->expr->x_forced_labels);
6089 else
6091 if (modifier == EXPAND_INITIALIZER)
6092 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6093 label_rtx (exp),
6094 forced_labels);
6097 temp = gen_rtx_MEM (FUNCTION_MODE,
6098 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6099 if (function != current_function_decl
6100 && function != inline_function_decl && function != 0)
6101 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6102 return temp;
6105 case PARM_DECL:
6106 if (DECL_RTL (exp) == 0)
6108 error_with_decl (exp, "prior parameter's size depends on `%s'");
6109 return CONST0_RTX (mode);
6112 /* ... fall through ... */
6114 case VAR_DECL:
6115 /* If a static var's type was incomplete when the decl was written,
6116 but the type is complete now, lay out the decl now. */
6117 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6118 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6120 layout_decl (exp, 0);
6121 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6124 /* Although static-storage variables start off initialized, according to
6125 ANSI C, a memcpy could overwrite them with uninitialized values. So
6126 we check them too. This also lets us check for read-only variables
6127 accessed via a non-const declaration, in case it won't be detected
6128 any other way (e.g., in an embedded system or OS kernel without
6129 memory protection).
6131 Aggregates are not checked here; they're handled elsewhere. */
6132 if (cfun && current_function_check_memory_usage
6133 && code == VAR_DECL
6134 && GET_CODE (DECL_RTL (exp)) == MEM
6135 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6137 enum memory_use_mode memory_usage;
6138 memory_usage = get_memory_usage_from_modifier (modifier);
6140 in_check_memory_usage = 1;
6141 if (memory_usage != MEMORY_USE_DONT)
6142 emit_library_call (chkr_check_addr_libfunc,
6143 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6144 XEXP (DECL_RTL (exp), 0), Pmode,
6145 GEN_INT (int_size_in_bytes (type)),
6146 TYPE_MODE (sizetype),
6147 GEN_INT (memory_usage),
6148 TYPE_MODE (integer_type_node));
6149 in_check_memory_usage = 0;
6152 /* ... fall through ... */
6154 case FUNCTION_DECL:
6155 case RESULT_DECL:
6156 if (DECL_RTL (exp) == 0)
6157 abort ();
6159 /* Ensure variable marked as used even if it doesn't go through
6160 a parser. If it hasn't be used yet, write out an external
6161 definition. */
6162 if (! TREE_USED (exp))
6164 assemble_external (exp);
6165 TREE_USED (exp) = 1;
6168 /* Show we haven't gotten RTL for this yet. */
6169 temp = 0;
6171 /* Handle variables inherited from containing functions. */
6172 context = decl_function_context (exp);
6174 /* We treat inline_function_decl as an alias for the current function
6175 because that is the inline function whose vars, types, etc.
6176 are being merged into the current function.
6177 See expand_inline_function. */
6179 if (context != 0 && context != current_function_decl
6180 && context != inline_function_decl
6181 /* If var is static, we don't need a static chain to access it. */
6182 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6183 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6185 rtx addr;
6187 /* Mark as non-local and addressable. */
6188 DECL_NONLOCAL (exp) = 1;
6189 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6190 abort ();
6191 mark_addressable (exp);
6192 if (GET_CODE (DECL_RTL (exp)) != MEM)
6193 abort ();
6194 addr = XEXP (DECL_RTL (exp), 0);
6195 if (GET_CODE (addr) == MEM)
6196 addr = change_address (addr, Pmode,
6197 fix_lexical_addr (XEXP (addr, 0), exp));
6198 else
6199 addr = fix_lexical_addr (addr, exp);
6201 temp = change_address (DECL_RTL (exp), mode, addr);
6204 /* This is the case of an array whose size is to be determined
6205 from its initializer, while the initializer is still being parsed.
6206 See expand_decl. */
6208 else if (GET_CODE (DECL_RTL (exp)) == MEM
6209 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6210 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6211 XEXP (DECL_RTL (exp), 0));
6213 /* If DECL_RTL is memory, we are in the normal case and either
6214 the address is not valid or it is not a register and -fforce-addr
6215 is specified, get the address into a register. */
6217 else if (GET_CODE (DECL_RTL (exp)) == MEM
6218 && modifier != EXPAND_CONST_ADDRESS
6219 && modifier != EXPAND_SUM
6220 && modifier != EXPAND_INITIALIZER
6221 && (! memory_address_p (DECL_MODE (exp),
6222 XEXP (DECL_RTL (exp), 0))
6223 || (flag_force_addr
6224 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6225 temp = change_address (DECL_RTL (exp), VOIDmode,
6226 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6228 /* If we got something, return it. But first, set the alignment
6229 the address is a register. */
6230 if (temp != 0)
6232 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6233 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6235 return temp;
6238 /* If the mode of DECL_RTL does not match that of the decl, it
6239 must be a promoted value. We return a SUBREG of the wanted mode,
6240 but mark it so that we know that it was already extended. */
6242 if (GET_CODE (DECL_RTL (exp)) == REG
6243 && GET_MODE (DECL_RTL (exp)) != mode)
6245 /* Get the signedness used for this variable. Ensure we get the
6246 same mode we got when the variable was declared. */
6247 if (GET_MODE (DECL_RTL (exp))
6248 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6249 abort ();
6251 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6252 SUBREG_PROMOTED_VAR_P (temp) = 1;
6253 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6254 return temp;
6257 return DECL_RTL (exp);
6259 case INTEGER_CST:
6260 return immed_double_const (TREE_INT_CST_LOW (exp),
6261 TREE_INT_CST_HIGH (exp), mode);
6263 case CONST_DECL:
6264 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6265 EXPAND_MEMORY_USE_BAD);
6267 case REAL_CST:
6268 /* If optimized, generate immediate CONST_DOUBLE
6269 which will be turned into memory by reload if necessary.
6271 We used to force a register so that loop.c could see it. But
6272 this does not allow gen_* patterns to perform optimizations with
6273 the constants. It also produces two insns in cases like "x = 1.0;".
6274 On most machines, floating-point constants are not permitted in
6275 many insns, so we'd end up copying it to a register in any case.
6277 Now, we do the copying in expand_binop, if appropriate. */
6278 return immed_real_const (exp);
6280 case COMPLEX_CST:
6281 case STRING_CST:
6282 if (! TREE_CST_RTL (exp))
6283 output_constant_def (exp, 1);
6285 /* TREE_CST_RTL probably contains a constant address.
6286 On RISC machines where a constant address isn't valid,
6287 make some insns to get that address into a register. */
6288 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6289 && modifier != EXPAND_CONST_ADDRESS
6290 && modifier != EXPAND_INITIALIZER
6291 && modifier != EXPAND_SUM
6292 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6293 || (flag_force_addr
6294 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6295 return change_address (TREE_CST_RTL (exp), VOIDmode,
6296 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6297 return TREE_CST_RTL (exp);
6299 case EXPR_WITH_FILE_LOCATION:
6301 rtx to_return;
6302 const char *saved_input_filename = input_filename;
6303 int saved_lineno = lineno;
6304 input_filename = EXPR_WFL_FILENAME (exp);
6305 lineno = EXPR_WFL_LINENO (exp);
6306 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6307 emit_line_note (input_filename, lineno);
6308 /* Possibly avoid switching back and force here. */
6309 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6310 input_filename = saved_input_filename;
6311 lineno = saved_lineno;
6312 return to_return;
6315 case SAVE_EXPR:
6316 context = decl_function_context (exp);
6318 /* If this SAVE_EXPR was at global context, assume we are an
6319 initialization function and move it into our context. */
6320 if (context == 0)
6321 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6323 /* We treat inline_function_decl as an alias for the current function
6324 because that is the inline function whose vars, types, etc.
6325 are being merged into the current function.
6326 See expand_inline_function. */
6327 if (context == current_function_decl || context == inline_function_decl)
6328 context = 0;
6330 /* If this is non-local, handle it. */
6331 if (context)
6333 /* The following call just exists to abort if the context is
6334 not of a containing function. */
6335 find_function_data (context);
6337 temp = SAVE_EXPR_RTL (exp);
6338 if (temp && GET_CODE (temp) == REG)
6340 put_var_into_stack (exp);
6341 temp = SAVE_EXPR_RTL (exp);
6343 if (temp == 0 || GET_CODE (temp) != MEM)
6344 abort ();
6345 return change_address (temp, mode,
6346 fix_lexical_addr (XEXP (temp, 0), exp));
6348 if (SAVE_EXPR_RTL (exp) == 0)
6350 if (mode == VOIDmode)
6351 temp = const0_rtx;
6352 else
6354 temp = assign_temp (type, 3, 0, 0);
6355 if (GET_CODE (temp) == MEM)
6356 RTX_UNCHANGING_P (temp) = 1;
6359 SAVE_EXPR_RTL (exp) = temp;
6360 if (!optimize && GET_CODE (temp) == REG)
6361 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6362 save_expr_regs);
6364 /* If the mode of TEMP does not match that of the expression, it
6365 must be a promoted value. We pass store_expr a SUBREG of the
6366 wanted mode but mark it so that we know that it was already
6367 extended. Note that `unsignedp' was modified above in
6368 this case. */
6370 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6372 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6373 SUBREG_PROMOTED_VAR_P (temp) = 1;
6374 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6377 if (temp == const0_rtx)
6378 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6379 EXPAND_MEMORY_USE_BAD);
6380 else
6381 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6383 TREE_USED (exp) = 1;
6386 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6387 must be a promoted value. We return a SUBREG of the wanted mode,
6388 but mark it so that we know that it was already extended. */
6390 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6391 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6393 /* Compute the signedness and make the proper SUBREG. */
6394 promote_mode (type, mode, &unsignedp, 0);
6395 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6396 SUBREG_PROMOTED_VAR_P (temp) = 1;
6397 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6398 return temp;
6401 return SAVE_EXPR_RTL (exp);
6403 case UNSAVE_EXPR:
6405 rtx temp;
6406 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6407 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6408 return temp;
6411 case PLACEHOLDER_EXPR:
6413 tree placeholder_expr;
6415 /* If there is an object on the head of the placeholder list,
6416 see if some object in it of type TYPE or a pointer to it. For
6417 further information, see tree.def. */
6418 for (placeholder_expr = placeholder_list;
6419 placeholder_expr != 0;
6420 placeholder_expr = TREE_CHAIN (placeholder_expr))
6422 tree need_type = TYPE_MAIN_VARIANT (type);
6423 tree object = 0;
6424 tree old_list = placeholder_list;
6425 tree elt;
6427 /* Find the outermost reference that is of the type we want.
6428 If none, see if any object has a type that is a pointer to
6429 the type we want. */
6430 for (elt = TREE_PURPOSE (placeholder_expr);
6431 elt != 0 && object == 0;
6433 = ((TREE_CODE (elt) == COMPOUND_EXPR
6434 || TREE_CODE (elt) == COND_EXPR)
6435 ? TREE_OPERAND (elt, 1)
6436 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6437 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6438 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6439 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6440 ? TREE_OPERAND (elt, 0) : 0))
6441 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6442 object = elt;
6444 for (elt = TREE_PURPOSE (placeholder_expr);
6445 elt != 0 && object == 0;
6447 = ((TREE_CODE (elt) == COMPOUND_EXPR
6448 || TREE_CODE (elt) == COND_EXPR)
6449 ? TREE_OPERAND (elt, 1)
6450 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6451 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6452 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6453 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6454 ? TREE_OPERAND (elt, 0) : 0))
6455 if (POINTER_TYPE_P (TREE_TYPE (elt))
6456 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6457 == need_type))
6458 object = build1 (INDIRECT_REF, need_type, elt);
6460 if (object != 0)
6462 /* Expand this object skipping the list entries before
6463 it was found in case it is also a PLACEHOLDER_EXPR.
6464 In that case, we want to translate it using subsequent
6465 entries. */
6466 placeholder_list = TREE_CHAIN (placeholder_expr);
6467 temp = expand_expr (object, original_target, tmode,
6468 ro_modifier);
6469 placeholder_list = old_list;
6470 return temp;
6475 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6476 abort ();
6478 case WITH_RECORD_EXPR:
6479 /* Put the object on the placeholder list, expand our first operand,
6480 and pop the list. */
6481 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6482 placeholder_list);
6483 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6484 tmode, ro_modifier);
6485 placeholder_list = TREE_CHAIN (placeholder_list);
6486 return target;
6488 case GOTO_EXPR:
6489 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6490 expand_goto (TREE_OPERAND (exp, 0));
6491 else
6492 expand_computed_goto (TREE_OPERAND (exp, 0));
6493 return const0_rtx;
6495 case EXIT_EXPR:
6496 expand_exit_loop_if_false (NULL_PTR,
6497 invert_truthvalue (TREE_OPERAND (exp, 0)));
6498 return const0_rtx;
6500 case LABELED_BLOCK_EXPR:
6501 if (LABELED_BLOCK_BODY (exp))
6502 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6503 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6504 return const0_rtx;
6506 case EXIT_BLOCK_EXPR:
6507 if (EXIT_BLOCK_RETURN (exp))
6508 sorry ("returned value in block_exit_expr");
6509 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6510 return const0_rtx;
6512 case LOOP_EXPR:
6513 push_temp_slots ();
6514 expand_start_loop (1);
6515 expand_expr_stmt (TREE_OPERAND (exp, 0));
6516 expand_end_loop ();
6517 pop_temp_slots ();
6519 return const0_rtx;
6521 case BIND_EXPR:
6523 tree vars = TREE_OPERAND (exp, 0);
6524 int vars_need_expansion = 0;
6526 /* Need to open a binding contour here because
6527 if there are any cleanups they must be contained here. */
6528 expand_start_bindings (2);
6530 /* Mark the corresponding BLOCK for output in its proper place. */
6531 if (TREE_OPERAND (exp, 2) != 0
6532 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6533 insert_block (TREE_OPERAND (exp, 2));
6535 /* If VARS have not yet been expanded, expand them now. */
6536 while (vars)
6538 if (DECL_RTL (vars) == 0)
6540 vars_need_expansion = 1;
6541 expand_decl (vars);
6543 expand_decl_init (vars);
6544 vars = TREE_CHAIN (vars);
6547 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6549 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6551 return temp;
6554 case RTL_EXPR:
6555 if (RTL_EXPR_SEQUENCE (exp))
6557 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6558 abort ();
6559 emit_insns (RTL_EXPR_SEQUENCE (exp));
6560 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6562 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6563 free_temps_for_rtl_expr (exp);
6564 return RTL_EXPR_RTL (exp);
6566 case CONSTRUCTOR:
6567 /* If we don't need the result, just ensure we evaluate any
6568 subexpressions. */
6569 if (ignore)
6571 tree elt;
6572 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6573 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6574 EXPAND_MEMORY_USE_BAD);
6575 return const0_rtx;
6578 /* All elts simple constants => refer to a constant in memory. But
6579 if this is a non-BLKmode mode, let it store a field at a time
6580 since that should make a CONST_INT or CONST_DOUBLE when we
6581 fold. Likewise, if we have a target we can use, it is best to
6582 store directly into the target unless the type is large enough
6583 that memcpy will be used. If we are making an initializer and
6584 all operands are constant, put it in memory as well. */
6585 else if ((TREE_STATIC (exp)
6586 && ((mode == BLKmode
6587 && ! (target != 0 && safe_from_p (target, exp, 1)))
6588 || TREE_ADDRESSABLE (exp)
6589 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6590 && (! MOVE_BY_PIECES_P
6591 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6592 TYPE_ALIGN (type)))
6593 && ! mostly_zeros_p (exp))))
6594 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6596 rtx constructor = output_constant_def (exp, 1);
6598 if (modifier != EXPAND_CONST_ADDRESS
6599 && modifier != EXPAND_INITIALIZER
6600 && modifier != EXPAND_SUM
6601 && (! memory_address_p (GET_MODE (constructor),
6602 XEXP (constructor, 0))
6603 || (flag_force_addr
6604 && GET_CODE (XEXP (constructor, 0)) != REG)))
6605 constructor = change_address (constructor, VOIDmode,
6606 XEXP (constructor, 0));
6607 return constructor;
6610 else
6612 /* Handle calls that pass values in multiple non-contiguous
6613 locations. The Irix 6 ABI has examples of this. */
6614 if (target == 0 || ! safe_from_p (target, exp, 1)
6615 || GET_CODE (target) == PARALLEL)
6617 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6618 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6619 else
6620 target = assign_temp (type, 0, 1, 1);
6623 if (TREE_READONLY (exp))
6625 if (GET_CODE (target) == MEM)
6626 target = copy_rtx (target);
6628 RTX_UNCHANGING_P (target) = 1;
6631 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6632 int_size_in_bytes (TREE_TYPE (exp)));
6633 return target;
6636 case INDIRECT_REF:
6638 tree exp1 = TREE_OPERAND (exp, 0);
6639 tree index;
6640 tree string = string_constant (exp1, &index);
6642 /* Try to optimize reads from const strings. */
6643 if (string
6644 && TREE_CODE (string) == STRING_CST
6645 && TREE_CODE (index) == INTEGER_CST
6646 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6647 && GET_MODE_CLASS (mode) == MODE_INT
6648 && GET_MODE_SIZE (mode) == 1
6649 && modifier != EXPAND_MEMORY_USE_WO)
6650 return
6651 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6653 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6654 op0 = memory_address (mode, op0);
6656 if (cfun && current_function_check_memory_usage
6657 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6659 enum memory_use_mode memory_usage;
6660 memory_usage = get_memory_usage_from_modifier (modifier);
6662 if (memory_usage != MEMORY_USE_DONT)
6664 in_check_memory_usage = 1;
6665 emit_library_call (chkr_check_addr_libfunc,
6666 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6667 Pmode, GEN_INT (int_size_in_bytes (type)),
6668 TYPE_MODE (sizetype),
6669 GEN_INT (memory_usage),
6670 TYPE_MODE (integer_type_node));
6671 in_check_memory_usage = 0;
6675 temp = gen_rtx_MEM (mode, op0);
6676 set_mem_attributes (temp, exp, 0);
6678 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6679 here, because, in C and C++, the fact that a location is accessed
6680 through a pointer to const does not mean that the value there can
6681 never change. Languages where it can never change should
6682 also set TREE_STATIC. */
6683 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6685 /* If we are writing to this object and its type is a record with
6686 readonly fields, we must mark it as readonly so it will
6687 conflict with readonly references to those fields. */
6688 if (modifier == EXPAND_MEMORY_USE_WO
6689 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6690 RTX_UNCHANGING_P (temp) = 1;
6692 return temp;
6695 case ARRAY_REF:
6696 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6697 abort ();
6700 tree array = TREE_OPERAND (exp, 0);
6701 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6702 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6703 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6704 HOST_WIDE_INT i;
6706 /* Optimize the special-case of a zero lower bound.
6708 We convert the low_bound to sizetype to avoid some problems
6709 with constant folding. (E.g. suppose the lower bound is 1,
6710 and its mode is QI. Without the conversion, (ARRAY
6711 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6712 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6714 if (! integer_zerop (low_bound))
6715 index = size_diffop (index, convert (sizetype, low_bound));
6717 /* Fold an expression like: "foo"[2].
6718 This is not done in fold so it won't happen inside &.
6719 Don't fold if this is for wide characters since it's too
6720 difficult to do correctly and this is a very rare case. */
6722 if (TREE_CODE (array) == STRING_CST
6723 && TREE_CODE (index) == INTEGER_CST
6724 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6725 && GET_MODE_CLASS (mode) == MODE_INT
6726 && GET_MODE_SIZE (mode) == 1)
6727 return
6728 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6730 /* If this is a constant index into a constant array,
6731 just get the value from the array. Handle both the cases when
6732 we have an explicit constructor and when our operand is a variable
6733 that was declared const. */
6735 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6736 && TREE_CODE (index) == INTEGER_CST
6737 && 0 > compare_tree_int (index,
6738 list_length (CONSTRUCTOR_ELTS
6739 (TREE_OPERAND (exp, 0)))))
6741 tree elem;
6743 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6744 i = TREE_INT_CST_LOW (index);
6745 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6748 if (elem)
6749 return expand_expr (fold (TREE_VALUE (elem)), target,
6750 tmode, ro_modifier);
6753 else if (optimize >= 1
6754 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6755 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6756 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6758 if (TREE_CODE (index) == INTEGER_CST)
6760 tree init = DECL_INITIAL (array);
6762 if (TREE_CODE (init) == CONSTRUCTOR)
6764 tree elem;
6766 for (elem = CONSTRUCTOR_ELTS (init);
6767 (elem
6768 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6769 elem = TREE_CHAIN (elem))
6772 if (elem)
6773 return expand_expr (fold (TREE_VALUE (elem)), target,
6774 tmode, ro_modifier);
6776 else if (TREE_CODE (init) == STRING_CST
6777 && 0 > compare_tree_int (index,
6778 TREE_STRING_LENGTH (init)))
6780 tree type = TREE_TYPE (TREE_TYPE (init));
6781 enum machine_mode mode = TYPE_MODE (type);
6783 if (GET_MODE_CLASS (mode) == MODE_INT
6784 && GET_MODE_SIZE (mode) == 1)
6785 return (GEN_INT
6786 (TREE_STRING_POINTER
6787 (init)[TREE_INT_CST_LOW (index)]));
6792 /* Fall through. */
6794 case COMPONENT_REF:
6795 case BIT_FIELD_REF:
6796 /* If the operand is a CONSTRUCTOR, we can just extract the
6797 appropriate field if it is present. Don't do this if we have
6798 already written the data since we want to refer to that copy
6799 and varasm.c assumes that's what we'll do. */
6800 if (code != ARRAY_REF
6801 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6802 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6804 tree elt;
6806 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6807 elt = TREE_CHAIN (elt))
6808 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6809 /* We can normally use the value of the field in the
6810 CONSTRUCTOR. However, if this is a bitfield in
6811 an integral mode that we can fit in a HOST_WIDE_INT,
6812 we must mask only the number of bits in the bitfield,
6813 since this is done implicitly by the constructor. If
6814 the bitfield does not meet either of those conditions,
6815 we can't do this optimization. */
6816 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6817 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6818 == MODE_INT)
6819 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6820 <= HOST_BITS_PER_WIDE_INT))))
6822 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6823 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6825 HOST_WIDE_INT bitsize
6826 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6828 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6830 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6831 op0 = expand_and (op0, op1, target);
6833 else
6835 enum machine_mode imode
6836 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6837 tree count
6838 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6841 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6842 target, 0);
6843 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6844 target, 0);
6848 return op0;
6853 enum machine_mode mode1;
6854 HOST_WIDE_INT bitsize, bitpos;
6855 tree offset;
6856 int volatilep = 0;
6857 unsigned int alignment;
6858 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6859 &mode1, &unsignedp, &volatilep,
6860 &alignment);
6862 /* If we got back the original object, something is wrong. Perhaps
6863 we are evaluating an expression too early. In any event, don't
6864 infinitely recurse. */
6865 if (tem == exp)
6866 abort ();
6868 /* If TEM's type is a union of variable size, pass TARGET to the inner
6869 computation, since it will need a temporary and TARGET is known
6870 to have to do. This occurs in unchecked conversion in Ada. */
6872 op0 = expand_expr (tem,
6873 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6874 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6875 != INTEGER_CST)
6876 ? target : NULL_RTX),
6877 VOIDmode,
6878 (modifier == EXPAND_INITIALIZER
6879 || modifier == EXPAND_CONST_ADDRESS)
6880 ? modifier : EXPAND_NORMAL);
6882 /* If this is a constant, put it into a register if it is a
6883 legitimate constant and OFFSET is 0 and memory if it isn't. */
6884 if (CONSTANT_P (op0))
6886 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6887 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6888 && offset == 0)
6889 op0 = force_reg (mode, op0);
6890 else
6891 op0 = validize_mem (force_const_mem (mode, op0));
6894 if (offset != 0)
6896 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6898 /* If this object is in memory, put it into a register.
6899 This case can't occur in C, but can in Ada if we have
6900 unchecked conversion of an expression from a scalar type to
6901 an array or record type. */
6902 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6903 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6905 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6907 mark_temp_addr_taken (memloc);
6908 emit_move_insn (memloc, op0);
6909 op0 = memloc;
6912 if (GET_CODE (op0) != MEM)
6913 abort ();
6915 if (GET_MODE (offset_rtx) != ptr_mode)
6917 #ifdef POINTERS_EXTEND_UNSIGNED
6918 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6919 #else
6920 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6921 #endif
6924 /* A constant address in OP0 can have VOIDmode, we must not try
6925 to call force_reg for that case. Avoid that case. */
6926 if (GET_CODE (op0) == MEM
6927 && GET_MODE (op0) == BLKmode
6928 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6929 && bitsize != 0
6930 && (bitpos % bitsize) == 0
6931 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6932 && alignment == GET_MODE_ALIGNMENT (mode1))
6934 rtx temp = change_address (op0, mode1,
6935 plus_constant (XEXP (op0, 0),
6936 (bitpos /
6937 BITS_PER_UNIT)));
6938 if (GET_CODE (XEXP (temp, 0)) == REG)
6939 op0 = temp;
6940 else
6941 op0 = change_address (op0, mode1,
6942 force_reg (GET_MODE (XEXP (temp, 0)),
6943 XEXP (temp, 0)));
6944 bitpos = 0;
6947 op0 = change_address (op0, VOIDmode,
6948 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6949 force_reg (ptr_mode,
6950 offset_rtx)));
6953 /* Don't forget about volatility even if this is a bitfield. */
6954 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6956 op0 = copy_rtx (op0);
6957 MEM_VOLATILE_P (op0) = 1;
6960 /* Check the access. */
6961 if (cfun != 0 && current_function_check_memory_usage
6962 && GET_CODE (op0) == MEM)
6964 enum memory_use_mode memory_usage;
6965 memory_usage = get_memory_usage_from_modifier (modifier);
6967 if (memory_usage != MEMORY_USE_DONT)
6969 rtx to;
6970 int size;
6972 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6973 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6975 /* Check the access right of the pointer. */
6976 in_check_memory_usage = 1;
6977 if (size > BITS_PER_UNIT)
6978 emit_library_call (chkr_check_addr_libfunc,
6979 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6980 Pmode, GEN_INT (size / BITS_PER_UNIT),
6981 TYPE_MODE (sizetype),
6982 GEN_INT (memory_usage),
6983 TYPE_MODE (integer_type_node));
6984 in_check_memory_usage = 0;
6988 /* In cases where an aligned union has an unaligned object
6989 as a field, we might be extracting a BLKmode value from
6990 an integer-mode (e.g., SImode) object. Handle this case
6991 by doing the extract into an object as wide as the field
6992 (which we know to be the width of a basic mode), then
6993 storing into memory, and changing the mode to BLKmode.
6994 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6995 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6996 if (mode1 == VOIDmode
6997 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6998 || (modifier != EXPAND_CONST_ADDRESS
6999 && modifier != EXPAND_INITIALIZER
7000 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
7001 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7002 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7003 /* If the field isn't aligned enough to fetch as a memref,
7004 fetch it as a bit field. */
7005 || (mode1 != BLKmode
7006 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7007 && ((TYPE_ALIGN (TREE_TYPE (tem))
7008 < GET_MODE_ALIGNMENT (mode))
7009 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7010 /* If the type and the field are a constant size and the
7011 size of the type isn't the same size as the bitfield,
7012 we must use bitfield operations. */
7013 || ((bitsize >= 0
7014 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7015 == INTEGER_CST)
7016 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7017 bitsize)))))
7018 || (modifier != EXPAND_CONST_ADDRESS
7019 && modifier != EXPAND_INITIALIZER
7020 && mode == BLKmode
7021 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7022 && (TYPE_ALIGN (type) > alignment
7023 || bitpos % TYPE_ALIGN (type) != 0)))
7025 enum machine_mode ext_mode = mode;
7027 if (ext_mode == BLKmode
7028 && ! (target != 0 && GET_CODE (op0) == MEM
7029 && GET_CODE (target) == MEM
7030 && bitpos % BITS_PER_UNIT == 0))
7031 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7033 if (ext_mode == BLKmode)
7035 /* In this case, BITPOS must start at a byte boundary and
7036 TARGET, if specified, must be a MEM. */
7037 if (GET_CODE (op0) != MEM
7038 || (target != 0 && GET_CODE (target) != MEM)
7039 || bitpos % BITS_PER_UNIT != 0)
7040 abort ();
7042 op0 = change_address (op0, VOIDmode,
7043 plus_constant (XEXP (op0, 0),
7044 bitpos / BITS_PER_UNIT));
7045 if (target == 0)
7046 target = assign_temp (type, 0, 1, 1);
7048 emit_block_move (target, op0,
7049 bitsize == -1 ? expr_size (exp)
7050 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7051 / BITS_PER_UNIT),
7052 BITS_PER_UNIT);
7054 return target;
7057 op0 = validize_mem (op0);
7059 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7060 mark_reg_pointer (XEXP (op0, 0), alignment);
7062 op0 = extract_bit_field (op0, bitsize, bitpos,
7063 unsignedp, target, ext_mode, ext_mode,
7064 alignment,
7065 int_size_in_bytes (TREE_TYPE (tem)));
7067 /* If the result is a record type and BITSIZE is narrower than
7068 the mode of OP0, an integral mode, and this is a big endian
7069 machine, we must put the field into the high-order bits. */
7070 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7071 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7072 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7073 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7074 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7075 - bitsize),
7076 op0, 1);
7078 if (mode == BLKmode)
7080 rtx new = assign_stack_temp (ext_mode,
7081 bitsize / BITS_PER_UNIT, 0);
7083 emit_move_insn (new, op0);
7084 op0 = copy_rtx (new);
7085 PUT_MODE (op0, BLKmode);
7086 MEM_SET_IN_STRUCT_P (op0, 1);
7089 return op0;
7092 /* If the result is BLKmode, use that to access the object
7093 now as well. */
7094 if (mode == BLKmode)
7095 mode1 = BLKmode;
7097 /* Get a reference to just this component. */
7098 if (modifier == EXPAND_CONST_ADDRESS
7099 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7101 rtx new = gen_rtx_MEM (mode1,
7102 plus_constant (XEXP (op0, 0),
7103 (bitpos / BITS_PER_UNIT)));
7105 MEM_COPY_ATTRIBUTES (new, op0);
7106 op0 = new;
7108 else
7109 op0 = change_address (op0, mode1,
7110 plus_constant (XEXP (op0, 0),
7111 (bitpos / BITS_PER_UNIT)));
7113 set_mem_attributes (op0, exp, 0);
7114 if (GET_CODE (XEXP (op0, 0)) == REG)
7115 mark_reg_pointer (XEXP (op0, 0), alignment);
7117 MEM_VOLATILE_P (op0) |= volatilep;
7118 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7119 || modifier == EXPAND_CONST_ADDRESS
7120 || modifier == EXPAND_INITIALIZER)
7121 return op0;
7122 else if (target == 0)
7123 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7125 convert_move (target, op0, unsignedp);
7126 return target;
7129 /* Intended for a reference to a buffer of a file-object in Pascal.
7130 But it's not certain that a special tree code will really be
7131 necessary for these. INDIRECT_REF might work for them. */
7132 case BUFFER_REF:
7133 abort ();
7135 case IN_EXPR:
7137 /* Pascal set IN expression.
7139 Algorithm:
7140 rlo = set_low - (set_low%bits_per_word);
7141 the_word = set [ (index - rlo)/bits_per_word ];
7142 bit_index = index % bits_per_word;
7143 bitmask = 1 << bit_index;
7144 return !!(the_word & bitmask); */
7146 tree set = TREE_OPERAND (exp, 0);
7147 tree index = TREE_OPERAND (exp, 1);
7148 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7149 tree set_type = TREE_TYPE (set);
7150 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7151 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7152 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7153 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7154 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7155 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7156 rtx setaddr = XEXP (setval, 0);
7157 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7158 rtx rlow;
7159 rtx diff, quo, rem, addr, bit, result;
7161 /* If domain is empty, answer is no. Likewise if index is constant
7162 and out of bounds. */
7163 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7164 && TREE_CODE (set_low_bound) == INTEGER_CST
7165 && tree_int_cst_lt (set_high_bound, set_low_bound))
7166 || (TREE_CODE (index) == INTEGER_CST
7167 && TREE_CODE (set_low_bound) == INTEGER_CST
7168 && tree_int_cst_lt (index, set_low_bound))
7169 || (TREE_CODE (set_high_bound) == INTEGER_CST
7170 && TREE_CODE (index) == INTEGER_CST
7171 && tree_int_cst_lt (set_high_bound, index))))
7172 return const0_rtx;
7174 if (target == 0)
7175 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7177 /* If we get here, we have to generate the code for both cases
7178 (in range and out of range). */
7180 op0 = gen_label_rtx ();
7181 op1 = gen_label_rtx ();
7183 if (! (GET_CODE (index_val) == CONST_INT
7184 && GET_CODE (lo_r) == CONST_INT))
7186 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7187 GET_MODE (index_val), iunsignedp, 0, op1);
7190 if (! (GET_CODE (index_val) == CONST_INT
7191 && GET_CODE (hi_r) == CONST_INT))
7193 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7194 GET_MODE (index_val), iunsignedp, 0, op1);
7197 /* Calculate the element number of bit zero in the first word
7198 of the set. */
7199 if (GET_CODE (lo_r) == CONST_INT)
7200 rlow = GEN_INT (INTVAL (lo_r)
7201 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7202 else
7203 rlow = expand_binop (index_mode, and_optab, lo_r,
7204 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7205 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7207 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7208 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7210 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7211 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7212 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7213 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7215 addr = memory_address (byte_mode,
7216 expand_binop (index_mode, add_optab, diff,
7217 setaddr, NULL_RTX, iunsignedp,
7218 OPTAB_LIB_WIDEN));
7220 /* Extract the bit we want to examine. */
7221 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7222 gen_rtx_MEM (byte_mode, addr),
7223 make_tree (TREE_TYPE (index), rem),
7224 NULL_RTX, 1);
7225 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7226 GET_MODE (target) == byte_mode ? target : 0,
7227 1, OPTAB_LIB_WIDEN);
7229 if (result != target)
7230 convert_move (target, result, 1);
7232 /* Output the code to handle the out-of-range case. */
7233 emit_jump (op0);
7234 emit_label (op1);
7235 emit_move_insn (target, const0_rtx);
7236 emit_label (op0);
7237 return target;
7240 case WITH_CLEANUP_EXPR:
7241 if (RTL_EXPR_RTL (exp) == 0)
7243 RTL_EXPR_RTL (exp)
7244 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7245 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7247 /* That's it for this cleanup. */
7248 TREE_OPERAND (exp, 2) = 0;
7250 return RTL_EXPR_RTL (exp);
7252 case CLEANUP_POINT_EXPR:
7254 /* Start a new binding layer that will keep track of all cleanup
7255 actions to be performed. */
7256 expand_start_bindings (2);
7258 target_temp_slot_level = temp_slot_level;
7260 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7261 /* If we're going to use this value, load it up now. */
7262 if (! ignore)
7263 op0 = force_not_mem (op0);
7264 preserve_temp_slots (op0);
7265 expand_end_bindings (NULL_TREE, 0, 0);
7267 return op0;
7269 case CALL_EXPR:
7270 /* Check for a built-in function. */
7271 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7272 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7273 == FUNCTION_DECL)
7274 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7276 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7277 == BUILT_IN_FRONTEND)
7278 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7279 else
7280 return expand_builtin (exp, target, subtarget, tmode, ignore);
7283 return expand_call (exp, target, ignore);
7285 case NON_LVALUE_EXPR:
7286 case NOP_EXPR:
7287 case CONVERT_EXPR:
7288 case REFERENCE_EXPR:
7289 if (TREE_OPERAND (exp, 0) == error_mark_node)
7290 return const0_rtx;
7292 if (TREE_CODE (type) == UNION_TYPE)
7294 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7296 /* If both input and output are BLKmode, this conversion
7297 isn't actually doing anything unless we need to make the
7298 alignment stricter. */
7299 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7300 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7301 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7302 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7303 modifier);
7305 if (target == 0)
7307 if (mode != BLKmode)
7308 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7309 else
7310 target = assign_temp (type, 0, 1, 1);
7313 if (GET_CODE (target) == MEM)
7314 /* Store data into beginning of memory target. */
7315 store_expr (TREE_OPERAND (exp, 0),
7316 change_address (target, TYPE_MODE (valtype), 0), 0);
7318 else if (GET_CODE (target) == REG)
7319 /* Store this field into a union of the proper type. */
7320 store_field (target,
7321 MIN ((int_size_in_bytes (TREE_TYPE
7322 (TREE_OPERAND (exp, 0)))
7323 * BITS_PER_UNIT),
7324 GET_MODE_BITSIZE (mode)),
7325 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7326 VOIDmode, 0, BITS_PER_UNIT,
7327 int_size_in_bytes (type), 0);
7328 else
7329 abort ();
7331 /* Return the entire union. */
7332 return target;
7335 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7337 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7338 ro_modifier);
7340 /* If the signedness of the conversion differs and OP0 is
7341 a promoted SUBREG, clear that indication since we now
7342 have to do the proper extension. */
7343 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7344 && GET_CODE (op0) == SUBREG)
7345 SUBREG_PROMOTED_VAR_P (op0) = 0;
7347 return op0;
7350 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7351 if (GET_MODE (op0) == mode)
7352 return op0;
7354 /* If OP0 is a constant, just convert it into the proper mode. */
7355 if (CONSTANT_P (op0))
7356 return
7357 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7358 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7360 if (modifier == EXPAND_INITIALIZER)
7361 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7363 if (target == 0)
7364 return
7365 convert_to_mode (mode, op0,
7366 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7367 else
7368 convert_move (target, op0,
7369 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7370 return target;
7372 case PLUS_EXPR:
7373 /* We come here from MINUS_EXPR when the second operand is a
7374 constant. */
7375 plus_expr:
7376 this_optab = ! unsignedp && flag_trapv
7377 && (GET_MODE_CLASS(mode) == MODE_INT)
7378 ? addv_optab : add_optab;
7380 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7381 something else, make sure we add the register to the constant and
7382 then to the other thing. This case can occur during strength
7383 reduction and doing it this way will produce better code if the
7384 frame pointer or argument pointer is eliminated.
7386 fold-const.c will ensure that the constant is always in the inner
7387 PLUS_EXPR, so the only case we need to do anything about is if
7388 sp, ap, or fp is our second argument, in which case we must swap
7389 the innermost first argument and our second argument. */
7391 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7392 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7393 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7394 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7395 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7396 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7398 tree t = TREE_OPERAND (exp, 1);
7400 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7401 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7404 /* If the result is to be ptr_mode and we are adding an integer to
7405 something, we might be forming a constant. So try to use
7406 plus_constant. If it produces a sum and we can't accept it,
7407 use force_operand. This allows P = &ARR[const] to generate
7408 efficient code on machines where a SYMBOL_REF is not a valid
7409 address.
7411 If this is an EXPAND_SUM call, always return the sum. */
7412 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7413 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7415 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7416 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7417 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7419 rtx constant_part;
7421 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7422 EXPAND_SUM);
7423 /* Use immed_double_const to ensure that the constant is
7424 truncated according to the mode of OP1, then sign extended
7425 to a HOST_WIDE_INT. Using the constant directly can result
7426 in non-canonical RTL in a 64x32 cross compile. */
7427 constant_part
7428 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7429 (HOST_WIDE_INT) 0,
7430 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7431 op1 = plus_constant (op1, INTVAL (constant_part));
7432 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7433 op1 = force_operand (op1, target);
7434 return op1;
7437 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7438 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7439 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7441 rtx constant_part;
7443 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7444 EXPAND_SUM);
7445 if (! CONSTANT_P (op0))
7447 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7448 VOIDmode, modifier);
7449 /* Don't go to both_summands if modifier
7450 says it's not right to return a PLUS. */
7451 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7452 goto binop2;
7453 goto both_summands;
7455 /* Use immed_double_const to ensure that the constant is
7456 truncated according to the mode of OP1, then sign extended
7457 to a HOST_WIDE_INT. Using the constant directly can result
7458 in non-canonical RTL in a 64x32 cross compile. */
7459 constant_part
7460 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7461 (HOST_WIDE_INT) 0,
7462 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7463 op0 = plus_constant (op0, INTVAL (constant_part));
7464 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7465 op0 = force_operand (op0, target);
7466 return op0;
7470 /* No sense saving up arithmetic to be done
7471 if it's all in the wrong mode to form part of an address.
7472 And force_operand won't know whether to sign-extend or
7473 zero-extend. */
7474 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7475 || mode != ptr_mode)
7476 goto binop;
7478 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7479 subtarget = 0;
7481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7482 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7484 both_summands:
7485 /* Make sure any term that's a sum with a constant comes last. */
7486 if (GET_CODE (op0) == PLUS
7487 && CONSTANT_P (XEXP (op0, 1)))
7489 temp = op0;
7490 op0 = op1;
7491 op1 = temp;
7493 /* If adding to a sum including a constant,
7494 associate it to put the constant outside. */
7495 if (GET_CODE (op1) == PLUS
7496 && CONSTANT_P (XEXP (op1, 1)))
7498 rtx constant_term = const0_rtx;
7500 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7501 if (temp != 0)
7502 op0 = temp;
7503 /* Ensure that MULT comes first if there is one. */
7504 else if (GET_CODE (op0) == MULT)
7505 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7506 else
7507 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7509 /* Let's also eliminate constants from op0 if possible. */
7510 op0 = eliminate_constant_term (op0, &constant_term);
7512 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7513 their sum should be a constant. Form it into OP1, since the
7514 result we want will then be OP0 + OP1. */
7516 temp = simplify_binary_operation (PLUS, mode, constant_term,
7517 XEXP (op1, 1));
7518 if (temp != 0)
7519 op1 = temp;
7520 else
7521 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7524 /* Put a constant term last and put a multiplication first. */
7525 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7526 temp = op1, op1 = op0, op0 = temp;
7528 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7529 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7531 case MINUS_EXPR:
7532 /* For initializers, we are allowed to return a MINUS of two
7533 symbolic constants. Here we handle all cases when both operands
7534 are constant. */
7535 /* Handle difference of two symbolic constants,
7536 for the sake of an initializer. */
7537 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7538 && really_constant_p (TREE_OPERAND (exp, 0))
7539 && really_constant_p (TREE_OPERAND (exp, 1)))
7541 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7542 VOIDmode, ro_modifier);
7543 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7544 VOIDmode, ro_modifier);
7546 /* If the last operand is a CONST_INT, use plus_constant of
7547 the negated constant. Else make the MINUS. */
7548 if (GET_CODE (op1) == CONST_INT)
7549 return plus_constant (op0, - INTVAL (op1));
7550 else
7551 return gen_rtx_MINUS (mode, op0, op1);
7553 /* Convert A - const to A + (-const). */
7554 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7556 tree negated = fold (build1 (NEGATE_EXPR, type,
7557 TREE_OPERAND (exp, 1)));
7559 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7560 /* If we can't negate the constant in TYPE, leave it alone and
7561 expand_binop will negate it for us. We used to try to do it
7562 here in the signed version of TYPE, but that doesn't work
7563 on POINTER_TYPEs. */;
7564 else
7566 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7567 goto plus_expr;
7570 this_optab = ! unsignedp && flag_trapv
7571 && (GET_MODE_CLASS(mode) == MODE_INT)
7572 ? subv_optab : sub_optab;
7573 goto binop;
7575 case MULT_EXPR:
7576 /* If first operand is constant, swap them.
7577 Thus the following special case checks need only
7578 check the second operand. */
7579 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7581 register tree t1 = TREE_OPERAND (exp, 0);
7582 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7583 TREE_OPERAND (exp, 1) = t1;
7586 /* Attempt to return something suitable for generating an
7587 indexed address, for machines that support that. */
7589 if (modifier == EXPAND_SUM && mode == ptr_mode
7590 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7591 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7593 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7594 EXPAND_SUM);
7596 /* Apply distributive law if OP0 is x+c. */
7597 if (GET_CODE (op0) == PLUS
7598 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7599 return
7600 gen_rtx_PLUS
7601 (mode,
7602 gen_rtx_MULT
7603 (mode, XEXP (op0, 0),
7604 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7605 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7606 * INTVAL (XEXP (op0, 1))));
7608 if (GET_CODE (op0) != REG)
7609 op0 = force_operand (op0, NULL_RTX);
7610 if (GET_CODE (op0) != REG)
7611 op0 = copy_to_mode_reg (mode, op0);
7613 return
7614 gen_rtx_MULT (mode, op0,
7615 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7618 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7619 subtarget = 0;
7621 /* Check for multiplying things that have been extended
7622 from a narrower type. If this machine supports multiplying
7623 in that narrower type with a result in the desired type,
7624 do it that way, and avoid the explicit type-conversion. */
7625 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7626 && TREE_CODE (type) == INTEGER_TYPE
7627 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7628 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7629 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7630 && int_fits_type_p (TREE_OPERAND (exp, 1),
7631 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7632 /* Don't use a widening multiply if a shift will do. */
7633 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7634 > HOST_BITS_PER_WIDE_INT)
7635 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7637 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7638 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7640 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7641 /* If both operands are extended, they must either both
7642 be zero-extended or both be sign-extended. */
7643 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7645 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7647 enum machine_mode innermode
7648 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7649 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7650 ? smul_widen_optab : umul_widen_optab);
7651 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7652 ? umul_widen_optab : smul_widen_optab);
7653 if (mode == GET_MODE_WIDER_MODE (innermode))
7655 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7657 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7658 NULL_RTX, VOIDmode, 0);
7659 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7660 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7661 VOIDmode, 0);
7662 else
7663 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7664 NULL_RTX, VOIDmode, 0);
7665 goto binop2;
7667 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7668 && innermode == word_mode)
7670 rtx htem;
7671 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7672 NULL_RTX, VOIDmode, 0);
7673 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7674 op1 = convert_modes (innermode, mode,
7675 expand_expr (TREE_OPERAND (exp, 1),
7676 NULL_RTX, VOIDmode, 0),
7677 unsignedp);
7678 else
7679 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7680 NULL_RTX, VOIDmode, 0);
7681 temp = expand_binop (mode, other_optab, op0, op1, target,
7682 unsignedp, OPTAB_LIB_WIDEN);
7683 htem = expand_mult_highpart_adjust (innermode,
7684 gen_highpart (innermode, temp),
7685 op0, op1,
7686 gen_highpart (innermode, temp),
7687 unsignedp);
7688 emit_move_insn (gen_highpart (innermode, temp), htem);
7689 return temp;
7693 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7694 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7695 return expand_mult (mode, op0, op1, target, unsignedp);
7697 case TRUNC_DIV_EXPR:
7698 case FLOOR_DIV_EXPR:
7699 case CEIL_DIV_EXPR:
7700 case ROUND_DIV_EXPR:
7701 case EXACT_DIV_EXPR:
7702 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7703 subtarget = 0;
7704 /* Possible optimization: compute the dividend with EXPAND_SUM
7705 then if the divisor is constant can optimize the case
7706 where some terms of the dividend have coeffs divisible by it. */
7707 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7708 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7709 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7711 case RDIV_EXPR:
7712 this_optab = flodiv_optab;
7713 goto binop;
7715 case TRUNC_MOD_EXPR:
7716 case FLOOR_MOD_EXPR:
7717 case CEIL_MOD_EXPR:
7718 case ROUND_MOD_EXPR:
7719 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7720 subtarget = 0;
7721 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7722 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7723 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7725 case FIX_ROUND_EXPR:
7726 case FIX_FLOOR_EXPR:
7727 case FIX_CEIL_EXPR:
7728 abort (); /* Not used for C. */
7730 case FIX_TRUNC_EXPR:
7731 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7732 if (target == 0)
7733 target = gen_reg_rtx (mode);
7734 expand_fix (target, op0, unsignedp);
7735 return target;
7737 case FLOAT_EXPR:
7738 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7739 if (target == 0)
7740 target = gen_reg_rtx (mode);
7741 /* expand_float can't figure out what to do if FROM has VOIDmode.
7742 So give it the correct mode. With -O, cse will optimize this. */
7743 if (GET_MODE (op0) == VOIDmode)
7744 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7745 op0);
7746 expand_float (target, op0,
7747 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7748 return target;
7750 case NEGATE_EXPR:
7751 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7752 temp = expand_unop (mode,
7753 ! unsignedp && flag_trapv
7754 && (GET_MODE_CLASS(mode) == MODE_INT)
7755 ? negv_optab : neg_optab, op0, target, 0);
7756 if (temp == 0)
7757 abort ();
7758 return temp;
7760 case ABS_EXPR:
7761 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7763 /* Handle complex values specially. */
7764 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7765 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7766 return expand_complex_abs (mode, op0, target, unsignedp);
7768 /* Unsigned abs is simply the operand. Testing here means we don't
7769 risk generating incorrect code below. */
7770 if (TREE_UNSIGNED (type))
7771 return op0;
7773 return expand_abs (mode, op0, target, unsignedp,
7774 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7776 case MAX_EXPR:
7777 case MIN_EXPR:
7778 target = original_target;
7779 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7780 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7781 || GET_MODE (target) != mode
7782 || (GET_CODE (target) == REG
7783 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7784 target = gen_reg_rtx (mode);
7785 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7786 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7788 /* First try to do it with a special MIN or MAX instruction.
7789 If that does not win, use a conditional jump to select the proper
7790 value. */
7791 this_optab = (TREE_UNSIGNED (type)
7792 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7793 : (code == MIN_EXPR ? smin_optab : smax_optab));
7795 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7796 OPTAB_WIDEN);
7797 if (temp != 0)
7798 return temp;
7800 /* At this point, a MEM target is no longer useful; we will get better
7801 code without it. */
7803 if (GET_CODE (target) == MEM)
7804 target = gen_reg_rtx (mode);
7806 if (target != op0)
7807 emit_move_insn (target, op0);
7809 op0 = gen_label_rtx ();
7811 /* If this mode is an integer too wide to compare properly,
7812 compare word by word. Rely on cse to optimize constant cases. */
7813 if (GET_MODE_CLASS (mode) == MODE_INT
7814 && ! can_compare_p (GE, mode, ccp_jump))
7816 if (code == MAX_EXPR)
7817 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7818 target, op1, NULL_RTX, op0);
7819 else
7820 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7821 op1, target, NULL_RTX, op0);
7823 else
7825 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7826 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7827 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7828 op0);
7830 emit_move_insn (target, op1);
7831 emit_label (op0);
7832 return target;
7834 case BIT_NOT_EXPR:
7835 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7836 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7837 if (temp == 0)
7838 abort ();
7839 return temp;
7841 case FFS_EXPR:
7842 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7843 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7844 if (temp == 0)
7845 abort ();
7846 return temp;
7848 /* ??? Can optimize bitwise operations with one arg constant.
7849 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7850 and (a bitwise1 b) bitwise2 b (etc)
7851 but that is probably not worth while. */
7853 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7854 boolean values when we want in all cases to compute both of them. In
7855 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7856 as actual zero-or-1 values and then bitwise anding. In cases where
7857 there cannot be any side effects, better code would be made by
7858 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7859 how to recognize those cases. */
7861 case TRUTH_AND_EXPR:
7862 case BIT_AND_EXPR:
7863 this_optab = and_optab;
7864 goto binop;
7866 case TRUTH_OR_EXPR:
7867 case BIT_IOR_EXPR:
7868 this_optab = ior_optab;
7869 goto binop;
7871 case TRUTH_XOR_EXPR:
7872 case BIT_XOR_EXPR:
7873 this_optab = xor_optab;
7874 goto binop;
7876 case LSHIFT_EXPR:
7877 case RSHIFT_EXPR:
7878 case LROTATE_EXPR:
7879 case RROTATE_EXPR:
7880 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7881 subtarget = 0;
7882 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7883 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7884 unsignedp);
7886 /* Could determine the answer when only additive constants differ. Also,
7887 the addition of one can be handled by changing the condition. */
7888 case LT_EXPR:
7889 case LE_EXPR:
7890 case GT_EXPR:
7891 case GE_EXPR:
7892 case EQ_EXPR:
7893 case NE_EXPR:
7894 case UNORDERED_EXPR:
7895 case ORDERED_EXPR:
7896 case UNLT_EXPR:
7897 case UNLE_EXPR:
7898 case UNGT_EXPR:
7899 case UNGE_EXPR:
7900 case UNEQ_EXPR:
7901 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7902 if (temp != 0)
7903 return temp;
7905 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7906 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7907 && original_target
7908 && GET_CODE (original_target) == REG
7909 && (GET_MODE (original_target)
7910 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7912 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7913 VOIDmode, 0);
7915 if (temp != original_target)
7916 temp = copy_to_reg (temp);
7918 op1 = gen_label_rtx ();
7919 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7920 GET_MODE (temp), unsignedp, 0, op1);
7921 emit_move_insn (temp, const1_rtx);
7922 emit_label (op1);
7923 return temp;
7926 /* If no set-flag instruction, must generate a conditional
7927 store into a temporary variable. Drop through
7928 and handle this like && and ||. */
7930 case TRUTH_ANDIF_EXPR:
7931 case TRUTH_ORIF_EXPR:
7932 if (! ignore
7933 && (target == 0 || ! safe_from_p (target, exp, 1)
7934 /* Make sure we don't have a hard reg (such as function's return
7935 value) live across basic blocks, if not optimizing. */
7936 || (!optimize && GET_CODE (target) == REG
7937 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7938 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7940 if (target)
7941 emit_clr_insn (target);
7943 op1 = gen_label_rtx ();
7944 jumpifnot (exp, op1);
7946 if (target)
7947 emit_0_to_1_insn (target);
7949 emit_label (op1);
7950 return ignore ? const0_rtx : target;
7952 case TRUTH_NOT_EXPR:
7953 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7954 /* The parser is careful to generate TRUTH_NOT_EXPR
7955 only with operands that are always zero or one. */
7956 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7957 target, 1, OPTAB_LIB_WIDEN);
7958 if (temp == 0)
7959 abort ();
7960 return temp;
7962 case COMPOUND_EXPR:
7963 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7964 emit_queue ();
7965 return expand_expr (TREE_OPERAND (exp, 1),
7966 (ignore ? const0_rtx : target),
7967 VOIDmode, 0);
7969 case COND_EXPR:
7970 /* If we would have a "singleton" (see below) were it not for a
7971 conversion in each arm, bring that conversion back out. */
7972 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7973 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7974 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7975 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7977 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7978 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7980 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7981 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7982 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7983 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7984 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7985 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7986 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7987 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7988 return expand_expr (build1 (NOP_EXPR, type,
7989 build (COND_EXPR, TREE_TYPE (true),
7990 TREE_OPERAND (exp, 0),
7991 true, false)),
7992 target, tmode, modifier);
7996 /* Note that COND_EXPRs whose type is a structure or union
7997 are required to be constructed to contain assignments of
7998 a temporary variable, so that we can evaluate them here
7999 for side effect only. If type is void, we must do likewise. */
8001 /* If an arm of the branch requires a cleanup,
8002 only that cleanup is performed. */
8004 tree singleton = 0;
8005 tree binary_op = 0, unary_op = 0;
8007 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8008 convert it to our mode, if necessary. */
8009 if (integer_onep (TREE_OPERAND (exp, 1))
8010 && integer_zerop (TREE_OPERAND (exp, 2))
8011 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8013 if (ignore)
8015 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8016 ro_modifier);
8017 return const0_rtx;
8020 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8021 if (GET_MODE (op0) == mode)
8022 return op0;
8024 if (target == 0)
8025 target = gen_reg_rtx (mode);
8026 convert_move (target, op0, unsignedp);
8027 return target;
8030 /* Check for X ? A + B : A. If we have this, we can copy A to the
8031 output and conditionally add B. Similarly for unary operations.
8032 Don't do this if X has side-effects because those side effects
8033 might affect A or B and the "?" operation is a sequence point in
8034 ANSI. (operand_equal_p tests for side effects.) */
8036 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8037 && operand_equal_p (TREE_OPERAND (exp, 2),
8038 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8039 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8040 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8041 && operand_equal_p (TREE_OPERAND (exp, 1),
8042 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8043 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8044 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8045 && operand_equal_p (TREE_OPERAND (exp, 2),
8046 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8047 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8048 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8049 && operand_equal_p (TREE_OPERAND (exp, 1),
8050 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8051 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8053 /* If we are not to produce a result, we have no target. Otherwise,
8054 if a target was specified use it; it will not be used as an
8055 intermediate target unless it is safe. If no target, use a
8056 temporary. */
8058 if (ignore)
8059 temp = 0;
8060 else if (original_target
8061 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8062 || (singleton && GET_CODE (original_target) == REG
8063 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8064 && original_target == var_rtx (singleton)))
8065 && GET_MODE (original_target) == mode
8066 #ifdef HAVE_conditional_move
8067 && (! can_conditionally_move_p (mode)
8068 || GET_CODE (original_target) == REG
8069 || TREE_ADDRESSABLE (type))
8070 #endif
8071 && ! (GET_CODE (original_target) == MEM
8072 && MEM_VOLATILE_P (original_target)))
8073 temp = original_target;
8074 else if (TREE_ADDRESSABLE (type))
8075 abort ();
8076 else
8077 temp = assign_temp (type, 0, 0, 1);
8079 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8080 do the test of X as a store-flag operation, do this as
8081 A + ((X != 0) << log C). Similarly for other simple binary
8082 operators. Only do for C == 1 if BRANCH_COST is low. */
8083 if (temp && singleton && binary_op
8084 && (TREE_CODE (binary_op) == PLUS_EXPR
8085 || TREE_CODE (binary_op) == MINUS_EXPR
8086 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8087 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8088 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8089 : integer_onep (TREE_OPERAND (binary_op, 1)))
8090 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8092 rtx result;
8093 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8094 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8095 ? addv_optab : add_optab)
8096 : TREE_CODE (binary_op) == MINUS_EXPR
8097 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8098 ? subv_optab : sub_optab)
8099 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8100 : xor_optab);
8102 /* If we had X ? A : A + 1, do this as A + (X == 0).
8104 We have to invert the truth value here and then put it
8105 back later if do_store_flag fails. We cannot simply copy
8106 TREE_OPERAND (exp, 0) to another variable and modify that
8107 because invert_truthvalue can modify the tree pointed to
8108 by its argument. */
8109 if (singleton == TREE_OPERAND (exp, 1))
8110 TREE_OPERAND (exp, 0)
8111 = invert_truthvalue (TREE_OPERAND (exp, 0));
8113 result = do_store_flag (TREE_OPERAND (exp, 0),
8114 (safe_from_p (temp, singleton, 1)
8115 ? temp : NULL_RTX),
8116 mode, BRANCH_COST <= 1);
8118 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8119 result = expand_shift (LSHIFT_EXPR, mode, result,
8120 build_int_2 (tree_log2
8121 (TREE_OPERAND
8122 (binary_op, 1)),
8124 (safe_from_p (temp, singleton, 1)
8125 ? temp : NULL_RTX), 0);
8127 if (result)
8129 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8130 return expand_binop (mode, boptab, op1, result, temp,
8131 unsignedp, OPTAB_LIB_WIDEN);
8133 else if (singleton == TREE_OPERAND (exp, 1))
8134 TREE_OPERAND (exp, 0)
8135 = invert_truthvalue (TREE_OPERAND (exp, 0));
8138 do_pending_stack_adjust ();
8139 NO_DEFER_POP;
8140 op0 = gen_label_rtx ();
8142 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8144 if (temp != 0)
8146 /* If the target conflicts with the other operand of the
8147 binary op, we can't use it. Also, we can't use the target
8148 if it is a hard register, because evaluating the condition
8149 might clobber it. */
8150 if ((binary_op
8151 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8152 || (GET_CODE (temp) == REG
8153 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8154 temp = gen_reg_rtx (mode);
8155 store_expr (singleton, temp, 0);
8157 else
8158 expand_expr (singleton,
8159 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8160 if (singleton == TREE_OPERAND (exp, 1))
8161 jumpif (TREE_OPERAND (exp, 0), op0);
8162 else
8163 jumpifnot (TREE_OPERAND (exp, 0), op0);
8165 start_cleanup_deferral ();
8166 if (binary_op && temp == 0)
8167 /* Just touch the other operand. */
8168 expand_expr (TREE_OPERAND (binary_op, 1),
8169 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8170 else if (binary_op)
8171 store_expr (build (TREE_CODE (binary_op), type,
8172 make_tree (type, temp),
8173 TREE_OPERAND (binary_op, 1)),
8174 temp, 0);
8175 else
8176 store_expr (build1 (TREE_CODE (unary_op), type,
8177 make_tree (type, temp)),
8178 temp, 0);
8179 op1 = op0;
8181 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8182 comparison operator. If we have one of these cases, set the
8183 output to A, branch on A (cse will merge these two references),
8184 then set the output to FOO. */
8185 else if (temp
8186 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8187 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8188 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8189 TREE_OPERAND (exp, 1), 0)
8190 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8191 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8192 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8194 if (GET_CODE (temp) == REG
8195 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8196 temp = gen_reg_rtx (mode);
8197 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8198 jumpif (TREE_OPERAND (exp, 0), op0);
8200 start_cleanup_deferral ();
8201 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8202 op1 = op0;
8204 else if (temp
8205 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8206 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8207 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8208 TREE_OPERAND (exp, 2), 0)
8209 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8210 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8211 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8213 if (GET_CODE (temp) == REG
8214 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8215 temp = gen_reg_rtx (mode);
8216 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8217 jumpifnot (TREE_OPERAND (exp, 0), op0);
8219 start_cleanup_deferral ();
8220 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8221 op1 = op0;
8223 else
8225 op1 = gen_label_rtx ();
8226 jumpifnot (TREE_OPERAND (exp, 0), op0);
8228 start_cleanup_deferral ();
8230 /* One branch of the cond can be void, if it never returns. For
8231 example A ? throw : E */
8232 if (temp != 0
8233 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8234 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8235 else
8236 expand_expr (TREE_OPERAND (exp, 1),
8237 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8238 end_cleanup_deferral ();
8239 emit_queue ();
8240 emit_jump_insn (gen_jump (op1));
8241 emit_barrier ();
8242 emit_label (op0);
8243 start_cleanup_deferral ();
8244 if (temp != 0
8245 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8246 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8247 else
8248 expand_expr (TREE_OPERAND (exp, 2),
8249 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8252 end_cleanup_deferral ();
8254 emit_queue ();
8255 emit_label (op1);
8256 OK_DEFER_POP;
8258 return temp;
8261 case TARGET_EXPR:
8263 /* Something needs to be initialized, but we didn't know
8264 where that thing was when building the tree. For example,
8265 it could be the return value of a function, or a parameter
8266 to a function which lays down in the stack, or a temporary
8267 variable which must be passed by reference.
8269 We guarantee that the expression will either be constructed
8270 or copied into our original target. */
8272 tree slot = TREE_OPERAND (exp, 0);
8273 tree cleanups = NULL_TREE;
8274 tree exp1;
8276 if (TREE_CODE (slot) != VAR_DECL)
8277 abort ();
8279 if (! ignore)
8280 target = original_target;
8282 /* Set this here so that if we get a target that refers to a
8283 register variable that's already been used, put_reg_into_stack
8284 knows that it should fix up those uses. */
8285 TREE_USED (slot) = 1;
8287 if (target == 0)
8289 if (DECL_RTL (slot) != 0)
8291 target = DECL_RTL (slot);
8292 /* If we have already expanded the slot, so don't do
8293 it again. (mrs) */
8294 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8295 return target;
8297 else
8299 target = assign_temp (type, 2, 0, 1);
8300 /* All temp slots at this level must not conflict. */
8301 preserve_temp_slots (target);
8302 DECL_RTL (slot) = target;
8303 if (TREE_ADDRESSABLE (slot))
8304 put_var_into_stack (slot);
8306 /* Since SLOT is not known to the called function
8307 to belong to its stack frame, we must build an explicit
8308 cleanup. This case occurs when we must build up a reference
8309 to pass the reference as an argument. In this case,
8310 it is very likely that such a reference need not be
8311 built here. */
8313 if (TREE_OPERAND (exp, 2) == 0)
8314 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8315 cleanups = TREE_OPERAND (exp, 2);
8318 else
8320 /* This case does occur, when expanding a parameter which
8321 needs to be constructed on the stack. The target
8322 is the actual stack address that we want to initialize.
8323 The function we call will perform the cleanup in this case. */
8325 /* If we have already assigned it space, use that space,
8326 not target that we were passed in, as our target
8327 parameter is only a hint. */
8328 if (DECL_RTL (slot) != 0)
8330 target = DECL_RTL (slot);
8331 /* If we have already expanded the slot, so don't do
8332 it again. (mrs) */
8333 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8334 return target;
8336 else
8338 DECL_RTL (slot) = target;
8339 /* If we must have an addressable slot, then make sure that
8340 the RTL that we just stored in slot is OK. */
8341 if (TREE_ADDRESSABLE (slot))
8342 put_var_into_stack (slot);
8346 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8347 /* Mark it as expanded. */
8348 TREE_OPERAND (exp, 1) = NULL_TREE;
8350 store_expr (exp1, target, 0);
8352 expand_decl_cleanup (NULL_TREE, cleanups);
8354 return target;
8357 case INIT_EXPR:
8359 tree lhs = TREE_OPERAND (exp, 0);
8360 tree rhs = TREE_OPERAND (exp, 1);
8361 tree noncopied_parts = 0;
8362 tree lhs_type = TREE_TYPE (lhs);
8364 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8365 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8366 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8367 TYPE_NONCOPIED_PARTS (lhs_type));
8368 while (noncopied_parts != 0)
8370 expand_assignment (TREE_VALUE (noncopied_parts),
8371 TREE_PURPOSE (noncopied_parts), 0, 0);
8372 noncopied_parts = TREE_CHAIN (noncopied_parts);
8374 return temp;
8377 case MODIFY_EXPR:
8379 /* If lhs is complex, expand calls in rhs before computing it.
8380 That's so we don't compute a pointer and save it over a call.
8381 If lhs is simple, compute it first so we can give it as a
8382 target if the rhs is just a call. This avoids an extra temp and copy
8383 and that prevents a partial-subsumption which makes bad code.
8384 Actually we could treat component_ref's of vars like vars. */
8386 tree lhs = TREE_OPERAND (exp, 0);
8387 tree rhs = TREE_OPERAND (exp, 1);
8388 tree noncopied_parts = 0;
8389 tree lhs_type = TREE_TYPE (lhs);
8391 temp = 0;
8393 if (TREE_CODE (lhs) != VAR_DECL
8394 && TREE_CODE (lhs) != RESULT_DECL
8395 && TREE_CODE (lhs) != PARM_DECL
8396 && ! (TREE_CODE (lhs) == INDIRECT_REF
8397 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8399 /* Check for |= or &= of a bitfield of size one into another bitfield
8400 of size 1. In this case, (unless we need the result of the
8401 assignment) we can do this more efficiently with a
8402 test followed by an assignment, if necessary.
8404 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8405 things change so we do, this code should be enhanced to
8406 support it. */
8407 if (ignore
8408 && TREE_CODE (lhs) == COMPONENT_REF
8409 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8410 || TREE_CODE (rhs) == BIT_AND_EXPR)
8411 && TREE_OPERAND (rhs, 0) == lhs
8412 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8413 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8414 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8416 rtx label = gen_label_rtx ();
8418 do_jump (TREE_OPERAND (rhs, 1),
8419 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8420 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8421 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8422 (TREE_CODE (rhs) == BIT_IOR_EXPR
8423 ? integer_one_node
8424 : integer_zero_node)),
8425 0, 0);
8426 do_pending_stack_adjust ();
8427 emit_label (label);
8428 return const0_rtx;
8431 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8432 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8433 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8434 TYPE_NONCOPIED_PARTS (lhs_type));
8436 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8437 while (noncopied_parts != 0)
8439 expand_assignment (TREE_PURPOSE (noncopied_parts),
8440 TREE_VALUE (noncopied_parts), 0, 0);
8441 noncopied_parts = TREE_CHAIN (noncopied_parts);
8443 return temp;
8446 case RETURN_EXPR:
8447 if (!TREE_OPERAND (exp, 0))
8448 expand_null_return ();
8449 else
8450 expand_return (TREE_OPERAND (exp, 0));
8451 return const0_rtx;
8453 case PREINCREMENT_EXPR:
8454 case PREDECREMENT_EXPR:
8455 return expand_increment (exp, 0, ignore);
8457 case POSTINCREMENT_EXPR:
8458 case POSTDECREMENT_EXPR:
8459 /* Faster to treat as pre-increment if result is not used. */
8460 return expand_increment (exp, ! ignore, ignore);
8462 case ADDR_EXPR:
8463 /* If nonzero, TEMP will be set to the address of something that might
8464 be a MEM corresponding to a stack slot. */
8465 temp = 0;
8467 /* Are we taking the address of a nested function? */
8468 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8469 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8470 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8471 && ! TREE_STATIC (exp))
8473 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8474 op0 = force_operand (op0, target);
8476 /* If we are taking the address of something erroneous, just
8477 return a zero. */
8478 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8479 return const0_rtx;
8480 else
8482 /* We make sure to pass const0_rtx down if we came in with
8483 ignore set, to avoid doing the cleanups twice for something. */
8484 op0 = expand_expr (TREE_OPERAND (exp, 0),
8485 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8486 (modifier == EXPAND_INITIALIZER
8487 ? modifier : EXPAND_CONST_ADDRESS));
8489 /* If we are going to ignore the result, OP0 will have been set
8490 to const0_rtx, so just return it. Don't get confused and
8491 think we are taking the address of the constant. */
8492 if (ignore)
8493 return op0;
8495 op0 = protect_from_queue (op0, 0);
8497 /* We would like the object in memory. If it is a constant, we can
8498 have it be statically allocated into memory. For a non-constant,
8499 we need to allocate some memory and store the value into it. */
8501 if (CONSTANT_P (op0))
8502 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8503 op0);
8504 else if (GET_CODE (op0) == MEM)
8506 mark_temp_addr_taken (op0);
8507 temp = XEXP (op0, 0);
8510 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8511 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8512 || GET_CODE (op0) == PARALLEL)
8514 /* If this object is in a register, it must be not
8515 be BLKmode. */
8516 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8517 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8519 mark_temp_addr_taken (memloc);
8520 if (GET_CODE (op0) == PARALLEL)
8521 /* Handle calls that pass values in multiple non-contiguous
8522 locations. The Irix 6 ABI has examples of this. */
8523 emit_group_store (memloc, op0,
8524 int_size_in_bytes (inner_type),
8525 TYPE_ALIGN (inner_type));
8526 else
8527 emit_move_insn (memloc, op0);
8528 op0 = memloc;
8531 if (GET_CODE (op0) != MEM)
8532 abort ();
8534 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8536 temp = XEXP (op0, 0);
8537 #ifdef POINTERS_EXTEND_UNSIGNED
8538 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8539 && mode == ptr_mode)
8540 temp = convert_memory_address (ptr_mode, temp);
8541 #endif
8542 return temp;
8545 op0 = force_operand (XEXP (op0, 0), target);
8548 if (flag_force_addr && GET_CODE (op0) != REG)
8549 op0 = force_reg (Pmode, op0);
8551 if (GET_CODE (op0) == REG
8552 && ! REG_USERVAR_P (op0))
8553 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8555 /* If we might have had a temp slot, add an equivalent address
8556 for it. */
8557 if (temp != 0)
8558 update_temp_slot_address (temp, op0);
8560 #ifdef POINTERS_EXTEND_UNSIGNED
8561 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8562 && mode == ptr_mode)
8563 op0 = convert_memory_address (ptr_mode, op0);
8564 #endif
8566 return op0;
8568 case ENTRY_VALUE_EXPR:
8569 abort ();
8571 /* COMPLEX type for Extended Pascal & Fortran */
8572 case COMPLEX_EXPR:
8574 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8575 rtx insns;
8577 /* Get the rtx code of the operands. */
8578 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8579 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8581 if (! target)
8582 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8584 start_sequence ();
8586 /* Move the real (op0) and imaginary (op1) parts to their location. */
8587 emit_move_insn (gen_realpart (mode, target), op0);
8588 emit_move_insn (gen_imagpart (mode, target), op1);
8590 insns = get_insns ();
8591 end_sequence ();
8593 /* Complex construction should appear as a single unit. */
8594 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8595 each with a separate pseudo as destination.
8596 It's not correct for flow to treat them as a unit. */
8597 if (GET_CODE (target) != CONCAT)
8598 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8599 else
8600 emit_insns (insns);
8602 return target;
8605 case REALPART_EXPR:
8606 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8607 return gen_realpart (mode, op0);
8609 case IMAGPART_EXPR:
8610 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8611 return gen_imagpart (mode, op0);
8613 case CONJ_EXPR:
8615 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8616 rtx imag_t;
8617 rtx insns;
8619 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8621 if (! target)
8622 target = gen_reg_rtx (mode);
8624 start_sequence ();
8626 /* Store the realpart and the negated imagpart to target. */
8627 emit_move_insn (gen_realpart (partmode, target),
8628 gen_realpart (partmode, op0));
8630 imag_t = gen_imagpart (partmode, target);
8631 temp = expand_unop (partmode,
8632 ! unsignedp && flag_trapv
8633 && (GET_MODE_CLASS(partmode) == MODE_INT)
8634 ? negv_optab : neg_optab,
8635 gen_imagpart (partmode, op0), imag_t, 0);
8636 if (temp != imag_t)
8637 emit_move_insn (imag_t, temp);
8639 insns = get_insns ();
8640 end_sequence ();
8642 /* Conjugate should appear as a single unit
8643 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8644 each with a separate pseudo as destination.
8645 It's not correct for flow to treat them as a unit. */
8646 if (GET_CODE (target) != CONCAT)
8647 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8648 else
8649 emit_insns (insns);
8651 return target;
8654 case TRY_CATCH_EXPR:
8656 tree handler = TREE_OPERAND (exp, 1);
8658 expand_eh_region_start ();
8660 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8662 expand_eh_region_end (handler);
8664 return op0;
8667 case TRY_FINALLY_EXPR:
8669 tree try_block = TREE_OPERAND (exp, 0);
8670 tree finally_block = TREE_OPERAND (exp, 1);
8671 rtx finally_label = gen_label_rtx ();
8672 rtx done_label = gen_label_rtx ();
8673 rtx return_link = gen_reg_rtx (Pmode);
8674 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8675 (tree) finally_label, (tree) return_link);
8676 TREE_SIDE_EFFECTS (cleanup) = 1;
8678 /* Start a new binding layer that will keep track of all cleanup
8679 actions to be performed. */
8680 expand_start_bindings (2);
8682 target_temp_slot_level = temp_slot_level;
8684 expand_decl_cleanup (NULL_TREE, cleanup);
8685 op0 = expand_expr (try_block, target, tmode, modifier);
8687 preserve_temp_slots (op0);
8688 expand_end_bindings (NULL_TREE, 0, 0);
8689 emit_jump (done_label);
8690 emit_label (finally_label);
8691 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8692 emit_indirect_jump (return_link);
8693 emit_label (done_label);
8694 return op0;
8697 case GOTO_SUBROUTINE_EXPR:
8699 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8700 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8701 rtx return_address = gen_label_rtx ();
8702 emit_move_insn (return_link,
8703 gen_rtx_LABEL_REF (Pmode, return_address));
8704 emit_jump (subr);
8705 emit_label (return_address);
8706 return const0_rtx;
8709 case POPDCC_EXPR:
8711 rtx dcc = get_dynamic_cleanup_chain ();
8712 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8713 return const0_rtx;
8716 case POPDHC_EXPR:
8718 rtx dhc = get_dynamic_handler_chain ();
8719 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8720 return const0_rtx;
8723 case VA_ARG_EXPR:
8724 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8726 default:
8727 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8730 /* Here to do an ordinary binary operator, generating an instruction
8731 from the optab already placed in `this_optab'. */
8732 binop:
8733 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8734 subtarget = 0;
8735 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8736 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8737 binop2:
8738 temp = expand_binop (mode, this_optab, op0, op1, target,
8739 unsignedp, OPTAB_LIB_WIDEN);
8740 if (temp == 0)
8741 abort ();
8742 return temp;
8745 /* Similar to expand_expr, except that we don't specify a target, target
8746 mode, or modifier and we return the alignment of the inner type. This is
8747 used in cases where it is not necessary to align the result to the
8748 alignment of its type as long as we know the alignment of the result, for
8749 example for comparisons of BLKmode values. */
8751 static rtx
8752 expand_expr_unaligned (exp, palign)
8753 register tree exp;
8754 unsigned int *palign;
8756 register rtx op0;
8757 tree type = TREE_TYPE (exp);
8758 register enum machine_mode mode = TYPE_MODE (type);
8760 /* Default the alignment we return to that of the type. */
8761 *palign = TYPE_ALIGN (type);
8763 /* The only cases in which we do anything special is if the resulting mode
8764 is BLKmode. */
8765 if (mode != BLKmode)
8766 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8768 switch (TREE_CODE (exp))
8770 case CONVERT_EXPR:
8771 case NOP_EXPR:
8772 case NON_LVALUE_EXPR:
8773 /* Conversions between BLKmode values don't change the underlying
8774 alignment or value. */
8775 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8776 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8777 break;
8779 case ARRAY_REF:
8780 /* Much of the code for this case is copied directly from expand_expr.
8781 We need to duplicate it here because we will do something different
8782 in the fall-through case, so we need to handle the same exceptions
8783 it does. */
8785 tree array = TREE_OPERAND (exp, 0);
8786 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8787 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8788 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8789 HOST_WIDE_INT i;
8791 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8792 abort ();
8794 /* Optimize the special-case of a zero lower bound.
8796 We convert the low_bound to sizetype to avoid some problems
8797 with constant folding. (E.g. suppose the lower bound is 1,
8798 and its mode is QI. Without the conversion, (ARRAY
8799 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8800 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8802 if (! integer_zerop (low_bound))
8803 index = size_diffop (index, convert (sizetype, low_bound));
8805 /* If this is a constant index into a constant array,
8806 just get the value from the array. Handle both the cases when
8807 we have an explicit constructor and when our operand is a variable
8808 that was declared const. */
8810 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8811 && host_integerp (index, 0)
8812 && 0 > compare_tree_int (index,
8813 list_length (CONSTRUCTOR_ELTS
8814 (TREE_OPERAND (exp, 0)))))
8816 tree elem;
8818 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8819 i = tree_low_cst (index, 0);
8820 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8823 if (elem)
8824 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8827 else if (optimize >= 1
8828 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8829 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8830 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8832 if (TREE_CODE (index) == INTEGER_CST)
8834 tree init = DECL_INITIAL (array);
8836 if (TREE_CODE (init) == CONSTRUCTOR)
8838 tree elem;
8840 for (elem = CONSTRUCTOR_ELTS (init);
8841 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8842 elem = TREE_CHAIN (elem))
8845 if (elem)
8846 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8847 palign);
8852 /* Fall through. */
8854 case COMPONENT_REF:
8855 case BIT_FIELD_REF:
8856 /* If the operand is a CONSTRUCTOR, we can just extract the
8857 appropriate field if it is present. Don't do this if we have
8858 already written the data since we want to refer to that copy
8859 and varasm.c assumes that's what we'll do. */
8860 if (TREE_CODE (exp) != ARRAY_REF
8861 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8862 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8864 tree elt;
8866 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8867 elt = TREE_CHAIN (elt))
8868 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8869 /* Note that unlike the case in expand_expr, we know this is
8870 BLKmode and hence not an integer. */
8871 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8875 enum machine_mode mode1;
8876 HOST_WIDE_INT bitsize, bitpos;
8877 tree offset;
8878 int volatilep = 0;
8879 unsigned int alignment;
8880 int unsignedp;
8881 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8882 &mode1, &unsignedp, &volatilep,
8883 &alignment);
8885 /* If we got back the original object, something is wrong. Perhaps
8886 we are evaluating an expression too early. In any event, don't
8887 infinitely recurse. */
8888 if (tem == exp)
8889 abort ();
8891 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8893 /* If this is a constant, put it into a register if it is a
8894 legitimate constant and OFFSET is 0 and memory if it isn't. */
8895 if (CONSTANT_P (op0))
8897 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8899 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8900 && offset == 0)
8901 op0 = force_reg (inner_mode, op0);
8902 else
8903 op0 = validize_mem (force_const_mem (inner_mode, op0));
8906 if (offset != 0)
8908 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8910 /* If this object is in a register, put it into memory.
8911 This case can't occur in C, but can in Ada if we have
8912 unchecked conversion of an expression from a scalar type to
8913 an array or record type. */
8914 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8915 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8917 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8919 mark_temp_addr_taken (memloc);
8920 emit_move_insn (memloc, op0);
8921 op0 = memloc;
8924 if (GET_CODE (op0) != MEM)
8925 abort ();
8927 if (GET_MODE (offset_rtx) != ptr_mode)
8929 #ifdef POINTERS_EXTEND_UNSIGNED
8930 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8931 #else
8932 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8933 #endif
8936 op0 = change_address (op0, VOIDmode,
8937 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8938 force_reg (ptr_mode,
8939 offset_rtx)));
8942 /* Don't forget about volatility even if this is a bitfield. */
8943 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8945 op0 = copy_rtx (op0);
8946 MEM_VOLATILE_P (op0) = 1;
8949 /* Check the access. */
8950 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8952 rtx to;
8953 int size;
8955 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8956 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8958 /* Check the access right of the pointer. */
8959 in_check_memory_usage = 1;
8960 if (size > BITS_PER_UNIT)
8961 emit_library_call (chkr_check_addr_libfunc,
8962 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8963 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8964 TYPE_MODE (sizetype),
8965 GEN_INT (MEMORY_USE_RO),
8966 TYPE_MODE (integer_type_node));
8967 in_check_memory_usage = 0;
8970 /* In cases where an aligned union has an unaligned object
8971 as a field, we might be extracting a BLKmode value from
8972 an integer-mode (e.g., SImode) object. Handle this case
8973 by doing the extract into an object as wide as the field
8974 (which we know to be the width of a basic mode), then
8975 storing into memory, and changing the mode to BLKmode.
8976 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8977 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8978 if (mode1 == VOIDmode
8979 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8980 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8981 && (TYPE_ALIGN (type) > alignment
8982 || bitpos % TYPE_ALIGN (type) != 0)))
8984 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8986 if (ext_mode == BLKmode)
8988 /* In this case, BITPOS must start at a byte boundary. */
8989 if (GET_CODE (op0) != MEM
8990 || bitpos % BITS_PER_UNIT != 0)
8991 abort ();
8993 op0 = change_address (op0, VOIDmode,
8994 plus_constant (XEXP (op0, 0),
8995 bitpos / BITS_PER_UNIT));
8997 else
8999 rtx new = assign_stack_temp (ext_mode,
9000 bitsize / BITS_PER_UNIT, 0);
9002 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9003 unsignedp, NULL_RTX, ext_mode,
9004 ext_mode, alignment,
9005 int_size_in_bytes (TREE_TYPE (tem)));
9007 /* If the result is a record type and BITSIZE is narrower than
9008 the mode of OP0, an integral mode, and this is a big endian
9009 machine, we must put the field into the high-order bits. */
9010 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9011 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9012 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9013 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9014 size_int (GET_MODE_BITSIZE
9015 (GET_MODE (op0))
9016 - bitsize),
9017 op0, 1);
9019 emit_move_insn (new, op0);
9020 op0 = copy_rtx (new);
9021 PUT_MODE (op0, BLKmode);
9024 else
9025 /* Get a reference to just this component. */
9026 op0 = change_address (op0, mode1,
9027 plus_constant (XEXP (op0, 0),
9028 (bitpos / BITS_PER_UNIT)));
9030 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9032 /* Adjust the alignment in case the bit position is not
9033 a multiple of the alignment of the inner object. */
9034 while (bitpos % alignment != 0)
9035 alignment >>= 1;
9037 if (GET_CODE (XEXP (op0, 0)) == REG)
9038 mark_reg_pointer (XEXP (op0, 0), alignment);
9040 MEM_IN_STRUCT_P (op0) = 1;
9041 MEM_VOLATILE_P (op0) |= volatilep;
9043 *palign = alignment;
9044 return op0;
9047 default:
9048 break;
9052 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9055 /* Return the tree node if a ARG corresponds to a string constant or zero
9056 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9057 in bytes within the string that ARG is accessing. The type of the
9058 offset will be `sizetype'. */
9060 tree
9061 string_constant (arg, ptr_offset)
9062 tree arg;
9063 tree *ptr_offset;
9065 STRIP_NOPS (arg);
9067 if (TREE_CODE (arg) == ADDR_EXPR
9068 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9070 *ptr_offset = size_zero_node;
9071 return TREE_OPERAND (arg, 0);
9073 else if (TREE_CODE (arg) == PLUS_EXPR)
9075 tree arg0 = TREE_OPERAND (arg, 0);
9076 tree arg1 = TREE_OPERAND (arg, 1);
9078 STRIP_NOPS (arg0);
9079 STRIP_NOPS (arg1);
9081 if (TREE_CODE (arg0) == ADDR_EXPR
9082 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9084 *ptr_offset = convert (sizetype, arg1);
9085 return TREE_OPERAND (arg0, 0);
9087 else if (TREE_CODE (arg1) == ADDR_EXPR
9088 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9090 *ptr_offset = convert (sizetype, arg0);
9091 return TREE_OPERAND (arg1, 0);
9095 return 0;
9098 /* Expand code for a post- or pre- increment or decrement
9099 and return the RTX for the result.
9100 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9102 static rtx
9103 expand_increment (exp, post, ignore)
9104 register tree exp;
9105 int post, ignore;
9107 register rtx op0, op1;
9108 register rtx temp, value;
9109 register tree incremented = TREE_OPERAND (exp, 0);
9110 optab this_optab = add_optab;
9111 int icode;
9112 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9113 int op0_is_copy = 0;
9114 int single_insn = 0;
9115 /* 1 means we can't store into OP0 directly,
9116 because it is a subreg narrower than a word,
9117 and we don't dare clobber the rest of the word. */
9118 int bad_subreg = 0;
9120 /* Stabilize any component ref that might need to be
9121 evaluated more than once below. */
9122 if (!post
9123 || TREE_CODE (incremented) == BIT_FIELD_REF
9124 || (TREE_CODE (incremented) == COMPONENT_REF
9125 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9126 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9127 incremented = stabilize_reference (incremented);
9128 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9129 ones into save exprs so that they don't accidentally get evaluated
9130 more than once by the code below. */
9131 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9132 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9133 incremented = save_expr (incremented);
9135 /* Compute the operands as RTX.
9136 Note whether OP0 is the actual lvalue or a copy of it:
9137 I believe it is a copy iff it is a register or subreg
9138 and insns were generated in computing it. */
9140 temp = get_last_insn ();
9141 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9143 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9144 in place but instead must do sign- or zero-extension during assignment,
9145 so we copy it into a new register and let the code below use it as
9146 a copy.
9148 Note that we can safely modify this SUBREG since it is know not to be
9149 shared (it was made by the expand_expr call above). */
9151 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9153 if (post)
9154 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9155 else
9156 bad_subreg = 1;
9158 else if (GET_CODE (op0) == SUBREG
9159 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9161 /* We cannot increment this SUBREG in place. If we are
9162 post-incrementing, get a copy of the old value. Otherwise,
9163 just mark that we cannot increment in place. */
9164 if (post)
9165 op0 = copy_to_reg (op0);
9166 else
9167 bad_subreg = 1;
9170 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9171 && temp != get_last_insn ());
9172 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9173 EXPAND_MEMORY_USE_BAD);
9175 /* Decide whether incrementing or decrementing. */
9176 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9177 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9178 this_optab = sub_optab;
9180 /* Convert decrement by a constant into a negative increment. */
9181 if (this_optab == sub_optab
9182 && GET_CODE (op1) == CONST_INT)
9184 op1 = GEN_INT (-INTVAL (op1));
9185 this_optab = add_optab;
9188 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9189 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9191 /* For a preincrement, see if we can do this with a single instruction. */
9192 if (!post)
9194 icode = (int) this_optab->handlers[(int) mode].insn_code;
9195 if (icode != (int) CODE_FOR_nothing
9196 /* Make sure that OP0 is valid for operands 0 and 1
9197 of the insn we want to queue. */
9198 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9199 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9200 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9201 single_insn = 1;
9204 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9205 then we cannot just increment OP0. We must therefore contrive to
9206 increment the original value. Then, for postincrement, we can return
9207 OP0 since it is a copy of the old value. For preincrement, expand here
9208 unless we can do it with a single insn.
9210 Likewise if storing directly into OP0 would clobber high bits
9211 we need to preserve (bad_subreg). */
9212 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9214 /* This is the easiest way to increment the value wherever it is.
9215 Problems with multiple evaluation of INCREMENTED are prevented
9216 because either (1) it is a component_ref or preincrement,
9217 in which case it was stabilized above, or (2) it is an array_ref
9218 with constant index in an array in a register, which is
9219 safe to reevaluate. */
9220 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9221 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9222 ? MINUS_EXPR : PLUS_EXPR),
9223 TREE_TYPE (exp),
9224 incremented,
9225 TREE_OPERAND (exp, 1));
9227 while (TREE_CODE (incremented) == NOP_EXPR
9228 || TREE_CODE (incremented) == CONVERT_EXPR)
9230 newexp = convert (TREE_TYPE (incremented), newexp);
9231 incremented = TREE_OPERAND (incremented, 0);
9234 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9235 return post ? op0 : temp;
9238 if (post)
9240 /* We have a true reference to the value in OP0.
9241 If there is an insn to add or subtract in this mode, queue it.
9242 Queueing the increment insn avoids the register shuffling
9243 that often results if we must increment now and first save
9244 the old value for subsequent use. */
9246 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9247 op0 = stabilize (op0);
9248 #endif
9250 icode = (int) this_optab->handlers[(int) mode].insn_code;
9251 if (icode != (int) CODE_FOR_nothing
9252 /* Make sure that OP0 is valid for operands 0 and 1
9253 of the insn we want to queue. */
9254 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9255 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9257 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9258 op1 = force_reg (mode, op1);
9260 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9262 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9264 rtx addr = (general_operand (XEXP (op0, 0), mode)
9265 ? force_reg (Pmode, XEXP (op0, 0))
9266 : copy_to_reg (XEXP (op0, 0)));
9267 rtx temp, result;
9269 op0 = change_address (op0, VOIDmode, addr);
9270 temp = force_reg (GET_MODE (op0), op0);
9271 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9272 op1 = force_reg (mode, op1);
9274 /* The increment queue is LIFO, thus we have to `queue'
9275 the instructions in reverse order. */
9276 enqueue_insn (op0, gen_move_insn (op0, temp));
9277 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9278 return result;
9282 /* Preincrement, or we can't increment with one simple insn. */
9283 if (post)
9284 /* Save a copy of the value before inc or dec, to return it later. */
9285 temp = value = copy_to_reg (op0);
9286 else
9287 /* Arrange to return the incremented value. */
9288 /* Copy the rtx because expand_binop will protect from the queue,
9289 and the results of that would be invalid for us to return
9290 if our caller does emit_queue before using our result. */
9291 temp = copy_rtx (value = op0);
9293 /* Increment however we can. */
9294 op1 = expand_binop (mode, this_optab, value, op1,
9295 current_function_check_memory_usage ? NULL_RTX : op0,
9296 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9297 /* Make sure the value is stored into OP0. */
9298 if (op1 != op0)
9299 emit_move_insn (op0, op1);
9301 return temp;
9304 /* At the start of a function, record that we have no previously-pushed
9305 arguments waiting to be popped. */
9307 void
9308 init_pending_stack_adjust ()
9310 pending_stack_adjust = 0;
9313 /* When exiting from function, if safe, clear out any pending stack adjust
9314 so the adjustment won't get done.
9316 Note, if the current function calls alloca, then it must have a
9317 frame pointer regardless of the value of flag_omit_frame_pointer. */
9319 void
9320 clear_pending_stack_adjust ()
9322 #ifdef EXIT_IGNORE_STACK
9323 if (optimize > 0
9324 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9325 && EXIT_IGNORE_STACK
9326 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9327 && ! flag_inline_functions)
9329 stack_pointer_delta -= pending_stack_adjust,
9330 pending_stack_adjust = 0;
9332 #endif
9335 /* Pop any previously-pushed arguments that have not been popped yet. */
9337 void
9338 do_pending_stack_adjust ()
9340 if (inhibit_defer_pop == 0)
9342 if (pending_stack_adjust != 0)
9343 adjust_stack (GEN_INT (pending_stack_adjust));
9344 pending_stack_adjust = 0;
9348 /* Expand conditional expressions. */
9350 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9351 LABEL is an rtx of code CODE_LABEL, in this function and all the
9352 functions here. */
9354 void
9355 jumpifnot (exp, label)
9356 tree exp;
9357 rtx label;
9359 do_jump (exp, label, NULL_RTX);
9362 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9364 void
9365 jumpif (exp, label)
9366 tree exp;
9367 rtx label;
9369 do_jump (exp, NULL_RTX, label);
9372 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9373 the result is zero, or IF_TRUE_LABEL if the result is one.
9374 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9375 meaning fall through in that case.
9377 do_jump always does any pending stack adjust except when it does not
9378 actually perform a jump. An example where there is no jump
9379 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9381 This function is responsible for optimizing cases such as
9382 &&, || and comparison operators in EXP. */
9384 void
9385 do_jump (exp, if_false_label, if_true_label)
9386 tree exp;
9387 rtx if_false_label, if_true_label;
9389 register enum tree_code code = TREE_CODE (exp);
9390 /* Some cases need to create a label to jump to
9391 in order to properly fall through.
9392 These cases set DROP_THROUGH_LABEL nonzero. */
9393 rtx drop_through_label = 0;
9394 rtx temp;
9395 int i;
9396 tree type;
9397 enum machine_mode mode;
9399 #ifdef MAX_INTEGER_COMPUTATION_MODE
9400 check_max_integer_computation_mode (exp);
9401 #endif
9403 emit_queue ();
9405 switch (code)
9407 case ERROR_MARK:
9408 break;
9410 case INTEGER_CST:
9411 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9412 if (temp)
9413 emit_jump (temp);
9414 break;
9416 #if 0
9417 /* This is not true with #pragma weak */
9418 case ADDR_EXPR:
9419 /* The address of something can never be zero. */
9420 if (if_true_label)
9421 emit_jump (if_true_label);
9422 break;
9423 #endif
9425 case NOP_EXPR:
9426 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9427 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9428 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9429 goto normal;
9430 case CONVERT_EXPR:
9431 /* If we are narrowing the operand, we have to do the compare in the
9432 narrower mode. */
9433 if ((TYPE_PRECISION (TREE_TYPE (exp))
9434 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9435 goto normal;
9436 case NON_LVALUE_EXPR:
9437 case REFERENCE_EXPR:
9438 case ABS_EXPR:
9439 case NEGATE_EXPR:
9440 case LROTATE_EXPR:
9441 case RROTATE_EXPR:
9442 /* These cannot change zero->non-zero or vice versa. */
9443 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9444 break;
9446 case WITH_RECORD_EXPR:
9447 /* Put the object on the placeholder list, recurse through our first
9448 operand, and pop the list. */
9449 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9450 placeholder_list);
9451 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9452 placeholder_list = TREE_CHAIN (placeholder_list);
9453 break;
9455 #if 0
9456 /* This is never less insns than evaluating the PLUS_EXPR followed by
9457 a test and can be longer if the test is eliminated. */
9458 case PLUS_EXPR:
9459 /* Reduce to minus. */
9460 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9461 TREE_OPERAND (exp, 0),
9462 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9463 TREE_OPERAND (exp, 1))));
9464 /* Process as MINUS. */
9465 #endif
9467 case MINUS_EXPR:
9468 /* Non-zero iff operands of minus differ. */
9469 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9470 TREE_OPERAND (exp, 0),
9471 TREE_OPERAND (exp, 1)),
9472 NE, NE, if_false_label, if_true_label);
9473 break;
9475 case BIT_AND_EXPR:
9476 /* If we are AND'ing with a small constant, do this comparison in the
9477 smallest type that fits. If the machine doesn't have comparisons
9478 that small, it will be converted back to the wider comparison.
9479 This helps if we are testing the sign bit of a narrower object.
9480 combine can't do this for us because it can't know whether a
9481 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9483 if (! SLOW_BYTE_ACCESS
9484 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9485 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9486 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9487 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9488 && (type = type_for_mode (mode, 1)) != 0
9489 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9490 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9491 != CODE_FOR_nothing))
9493 do_jump (convert (type, exp), if_false_label, if_true_label);
9494 break;
9496 goto normal;
9498 case TRUTH_NOT_EXPR:
9499 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9500 break;
9502 case TRUTH_ANDIF_EXPR:
9503 if (if_false_label == 0)
9504 if_false_label = drop_through_label = gen_label_rtx ();
9505 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9506 start_cleanup_deferral ();
9507 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9508 end_cleanup_deferral ();
9509 break;
9511 case TRUTH_ORIF_EXPR:
9512 if (if_true_label == 0)
9513 if_true_label = drop_through_label = gen_label_rtx ();
9514 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9515 start_cleanup_deferral ();
9516 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9517 end_cleanup_deferral ();
9518 break;
9520 case COMPOUND_EXPR:
9521 push_temp_slots ();
9522 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9523 preserve_temp_slots (NULL_RTX);
9524 free_temp_slots ();
9525 pop_temp_slots ();
9526 emit_queue ();
9527 do_pending_stack_adjust ();
9528 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9529 break;
9531 case COMPONENT_REF:
9532 case BIT_FIELD_REF:
9533 case ARRAY_REF:
9535 HOST_WIDE_INT bitsize, bitpos;
9536 int unsignedp;
9537 enum machine_mode mode;
9538 tree type;
9539 tree offset;
9540 int volatilep = 0;
9541 unsigned int alignment;
9543 /* Get description of this reference. We don't actually care
9544 about the underlying object here. */
9545 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9546 &unsignedp, &volatilep, &alignment);
9548 type = type_for_size (bitsize, unsignedp);
9549 if (! SLOW_BYTE_ACCESS
9550 && type != 0 && bitsize >= 0
9551 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9552 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9553 != CODE_FOR_nothing))
9555 do_jump (convert (type, exp), if_false_label, if_true_label);
9556 break;
9558 goto normal;
9561 case COND_EXPR:
9562 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9563 if (integer_onep (TREE_OPERAND (exp, 1))
9564 && integer_zerop (TREE_OPERAND (exp, 2)))
9565 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9567 else if (integer_zerop (TREE_OPERAND (exp, 1))
9568 && integer_onep (TREE_OPERAND (exp, 2)))
9569 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9571 else
9573 register rtx label1 = gen_label_rtx ();
9574 drop_through_label = gen_label_rtx ();
9576 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9578 start_cleanup_deferral ();
9579 /* Now the THEN-expression. */
9580 do_jump (TREE_OPERAND (exp, 1),
9581 if_false_label ? if_false_label : drop_through_label,
9582 if_true_label ? if_true_label : drop_through_label);
9583 /* In case the do_jump just above never jumps. */
9584 do_pending_stack_adjust ();
9585 emit_label (label1);
9587 /* Now the ELSE-expression. */
9588 do_jump (TREE_OPERAND (exp, 2),
9589 if_false_label ? if_false_label : drop_through_label,
9590 if_true_label ? if_true_label : drop_through_label);
9591 end_cleanup_deferral ();
9593 break;
9595 case EQ_EXPR:
9597 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9599 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9600 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9602 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9603 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9604 do_jump
9605 (fold
9606 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9607 fold (build (EQ_EXPR, TREE_TYPE (exp),
9608 fold (build1 (REALPART_EXPR,
9609 TREE_TYPE (inner_type),
9610 exp0)),
9611 fold (build1 (REALPART_EXPR,
9612 TREE_TYPE (inner_type),
9613 exp1)))),
9614 fold (build (EQ_EXPR, TREE_TYPE (exp),
9615 fold (build1 (IMAGPART_EXPR,
9616 TREE_TYPE (inner_type),
9617 exp0)),
9618 fold (build1 (IMAGPART_EXPR,
9619 TREE_TYPE (inner_type),
9620 exp1)))))),
9621 if_false_label, if_true_label);
9624 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9625 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9627 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9628 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9629 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9630 else
9631 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9632 break;
9635 case NE_EXPR:
9637 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9639 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9640 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9642 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9643 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9644 do_jump
9645 (fold
9646 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9647 fold (build (NE_EXPR, TREE_TYPE (exp),
9648 fold (build1 (REALPART_EXPR,
9649 TREE_TYPE (inner_type),
9650 exp0)),
9651 fold (build1 (REALPART_EXPR,
9652 TREE_TYPE (inner_type),
9653 exp1)))),
9654 fold (build (NE_EXPR, TREE_TYPE (exp),
9655 fold (build1 (IMAGPART_EXPR,
9656 TREE_TYPE (inner_type),
9657 exp0)),
9658 fold (build1 (IMAGPART_EXPR,
9659 TREE_TYPE (inner_type),
9660 exp1)))))),
9661 if_false_label, if_true_label);
9664 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9665 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9667 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9668 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9669 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9670 else
9671 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9672 break;
9675 case LT_EXPR:
9676 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9677 if (GET_MODE_CLASS (mode) == MODE_INT
9678 && ! can_compare_p (LT, mode, ccp_jump))
9679 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9680 else
9681 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9682 break;
9684 case LE_EXPR:
9685 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9686 if (GET_MODE_CLASS (mode) == MODE_INT
9687 && ! can_compare_p (LE, mode, ccp_jump))
9688 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9689 else
9690 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9691 break;
9693 case GT_EXPR:
9694 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9695 if (GET_MODE_CLASS (mode) == MODE_INT
9696 && ! can_compare_p (GT, mode, ccp_jump))
9697 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9698 else
9699 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9700 break;
9702 case GE_EXPR:
9703 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9704 if (GET_MODE_CLASS (mode) == MODE_INT
9705 && ! can_compare_p (GE, mode, ccp_jump))
9706 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9707 else
9708 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9709 break;
9711 case UNORDERED_EXPR:
9712 case ORDERED_EXPR:
9714 enum rtx_code cmp, rcmp;
9715 int do_rev;
9717 if (code == UNORDERED_EXPR)
9718 cmp = UNORDERED, rcmp = ORDERED;
9719 else
9720 cmp = ORDERED, rcmp = UNORDERED;
9721 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9723 do_rev = 0;
9724 if (! can_compare_p (cmp, mode, ccp_jump)
9725 && (can_compare_p (rcmp, mode, ccp_jump)
9726 /* If the target doesn't provide either UNORDERED or ORDERED
9727 comparisons, canonicalize on UNORDERED for the library. */
9728 || rcmp == UNORDERED))
9729 do_rev = 1;
9731 if (! do_rev)
9732 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9733 else
9734 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9736 break;
9739 enum rtx_code rcode1;
9740 enum tree_code tcode2;
9742 case UNLT_EXPR:
9743 rcode1 = UNLT;
9744 tcode2 = LT_EXPR;
9745 goto unordered_bcc;
9746 case UNLE_EXPR:
9747 rcode1 = UNLE;
9748 tcode2 = LE_EXPR;
9749 goto unordered_bcc;
9750 case UNGT_EXPR:
9751 rcode1 = UNGT;
9752 tcode2 = GT_EXPR;
9753 goto unordered_bcc;
9754 case UNGE_EXPR:
9755 rcode1 = UNGE;
9756 tcode2 = GE_EXPR;
9757 goto unordered_bcc;
9758 case UNEQ_EXPR:
9759 rcode1 = UNEQ;
9760 tcode2 = EQ_EXPR;
9761 goto unordered_bcc;
9763 unordered_bcc:
9764 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9765 if (can_compare_p (rcode1, mode, ccp_jump))
9766 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9767 if_true_label);
9768 else
9770 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9771 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9772 tree cmp0, cmp1;
9774 /* If the target doesn't support combined unordered
9775 compares, decompose into UNORDERED + comparison. */
9776 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9777 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9778 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9779 do_jump (exp, if_false_label, if_true_label);
9782 break;
9784 default:
9785 normal:
9786 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9787 #if 0
9788 /* This is not needed any more and causes poor code since it causes
9789 comparisons and tests from non-SI objects to have different code
9790 sequences. */
9791 /* Copy to register to avoid generating bad insns by cse
9792 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9793 if (!cse_not_expected && GET_CODE (temp) == MEM)
9794 temp = copy_to_reg (temp);
9795 #endif
9796 do_pending_stack_adjust ();
9797 /* Do any postincrements in the expression that was tested. */
9798 emit_queue ();
9800 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9802 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9803 if (target)
9804 emit_jump (target);
9806 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9807 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9808 /* Note swapping the labels gives us not-equal. */
9809 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9810 else if (GET_MODE (temp) != VOIDmode)
9811 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9812 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9813 GET_MODE (temp), NULL_RTX, 0,
9814 if_false_label, if_true_label);
9815 else
9816 abort ();
9819 if (drop_through_label)
9821 /* If do_jump produces code that might be jumped around,
9822 do any stack adjusts from that code, before the place
9823 where control merges in. */
9824 do_pending_stack_adjust ();
9825 emit_label (drop_through_label);
9829 /* Given a comparison expression EXP for values too wide to be compared
9830 with one insn, test the comparison and jump to the appropriate label.
9831 The code of EXP is ignored; we always test GT if SWAP is 0,
9832 and LT if SWAP is 1. */
9834 static void
9835 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9836 tree exp;
9837 int swap;
9838 rtx if_false_label, if_true_label;
9840 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9841 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9842 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9843 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9845 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9848 /* Compare OP0 with OP1, word at a time, in mode MODE.
9849 UNSIGNEDP says to do unsigned comparison.
9850 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9852 void
9853 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9854 enum machine_mode mode;
9855 int unsignedp;
9856 rtx op0, op1;
9857 rtx if_false_label, if_true_label;
9859 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9860 rtx drop_through_label = 0;
9861 int i;
9863 if (! if_true_label || ! if_false_label)
9864 drop_through_label = gen_label_rtx ();
9865 if (! if_true_label)
9866 if_true_label = drop_through_label;
9867 if (! if_false_label)
9868 if_false_label = drop_through_label;
9870 /* Compare a word at a time, high order first. */
9871 for (i = 0; i < nwords; i++)
9873 rtx op0_word, op1_word;
9875 if (WORDS_BIG_ENDIAN)
9877 op0_word = operand_subword_force (op0, i, mode);
9878 op1_word = operand_subword_force (op1, i, mode);
9880 else
9882 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9883 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9886 /* All but high-order word must be compared as unsigned. */
9887 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9888 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9889 NULL_RTX, if_true_label);
9891 /* Consider lower words only if these are equal. */
9892 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9893 NULL_RTX, 0, NULL_RTX, if_false_label);
9896 if (if_false_label)
9897 emit_jump (if_false_label);
9898 if (drop_through_label)
9899 emit_label (drop_through_label);
9902 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9903 with one insn, test the comparison and jump to the appropriate label. */
9905 static void
9906 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9907 tree exp;
9908 rtx if_false_label, if_true_label;
9910 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9911 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9912 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9913 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9914 int i;
9915 rtx drop_through_label = 0;
9917 if (! if_false_label)
9918 drop_through_label = if_false_label = gen_label_rtx ();
9920 for (i = 0; i < nwords; i++)
9921 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9922 operand_subword_force (op1, i, mode),
9923 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9924 word_mode, NULL_RTX, 0, if_false_label,
9925 NULL_RTX);
9927 if (if_true_label)
9928 emit_jump (if_true_label);
9929 if (drop_through_label)
9930 emit_label (drop_through_label);
9933 /* Jump according to whether OP0 is 0.
9934 We assume that OP0 has an integer mode that is too wide
9935 for the available compare insns. */
9937 void
9938 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9939 rtx op0;
9940 rtx if_false_label, if_true_label;
9942 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9943 rtx part;
9944 int i;
9945 rtx drop_through_label = 0;
9947 /* The fastest way of doing this comparison on almost any machine is to
9948 "or" all the words and compare the result. If all have to be loaded
9949 from memory and this is a very wide item, it's possible this may
9950 be slower, but that's highly unlikely. */
9952 part = gen_reg_rtx (word_mode);
9953 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9954 for (i = 1; i < nwords && part != 0; i++)
9955 part = expand_binop (word_mode, ior_optab, part,
9956 operand_subword_force (op0, i, GET_MODE (op0)),
9957 part, 1, OPTAB_WIDEN);
9959 if (part != 0)
9961 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9962 NULL_RTX, 0, if_false_label, if_true_label);
9964 return;
9967 /* If we couldn't do the "or" simply, do this with a series of compares. */
9968 if (! if_false_label)
9969 drop_through_label = if_false_label = gen_label_rtx ();
9971 for (i = 0; i < nwords; i++)
9972 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9973 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9974 if_false_label, NULL_RTX);
9976 if (if_true_label)
9977 emit_jump (if_true_label);
9979 if (drop_through_label)
9980 emit_label (drop_through_label);
9983 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9984 (including code to compute the values to be compared)
9985 and set (CC0) according to the result.
9986 The decision as to signed or unsigned comparison must be made by the caller.
9988 We force a stack adjustment unless there are currently
9989 things pushed on the stack that aren't yet used.
9991 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9992 compared.
9994 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9995 size of MODE should be used. */
9998 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9999 register rtx op0, op1;
10000 enum rtx_code code;
10001 int unsignedp;
10002 enum machine_mode mode;
10003 rtx size;
10004 unsigned int align;
10006 rtx tem;
10008 /* If one operand is constant, make it the second one. Only do this
10009 if the other operand is not constant as well. */
10011 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10012 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10014 tem = op0;
10015 op0 = op1;
10016 op1 = tem;
10017 code = swap_condition (code);
10020 if (flag_force_mem)
10022 op0 = force_not_mem (op0);
10023 op1 = force_not_mem (op1);
10026 do_pending_stack_adjust ();
10028 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10029 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10030 return tem;
10032 #if 0
10033 /* There's no need to do this now that combine.c can eliminate lots of
10034 sign extensions. This can be less efficient in certain cases on other
10035 machines. */
10037 /* If this is a signed equality comparison, we can do it as an
10038 unsigned comparison since zero-extension is cheaper than sign
10039 extension and comparisons with zero are done as unsigned. This is
10040 the case even on machines that can do fast sign extension, since
10041 zero-extension is easier to combine with other operations than
10042 sign-extension is. If we are comparing against a constant, we must
10043 convert it to what it would look like unsigned. */
10044 if ((code == EQ || code == NE) && ! unsignedp
10045 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10047 if (GET_CODE (op1) == CONST_INT
10048 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10049 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10050 unsignedp = 1;
10052 #endif
10054 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10056 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10059 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10060 The decision as to signed or unsigned comparison must be made by the caller.
10062 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10063 compared.
10065 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10066 size of MODE should be used. */
10068 void
10069 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10070 if_false_label, if_true_label)
10071 register rtx op0, op1;
10072 enum rtx_code code;
10073 int unsignedp;
10074 enum machine_mode mode;
10075 rtx size;
10076 unsigned int align;
10077 rtx if_false_label, if_true_label;
10079 rtx tem;
10080 int dummy_true_label = 0;
10082 /* Reverse the comparison if that is safe and we want to jump if it is
10083 false. */
10084 if (! if_true_label && ! FLOAT_MODE_P (mode))
10086 if_true_label = if_false_label;
10087 if_false_label = 0;
10088 code = reverse_condition (code);
10091 /* If one operand is constant, make it the second one. Only do this
10092 if the other operand is not constant as well. */
10094 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10095 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10097 tem = op0;
10098 op0 = op1;
10099 op1 = tem;
10100 code = swap_condition (code);
10103 if (flag_force_mem)
10105 op0 = force_not_mem (op0);
10106 op1 = force_not_mem (op1);
10109 do_pending_stack_adjust ();
10111 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10112 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10114 if (tem == const_true_rtx)
10116 if (if_true_label)
10117 emit_jump (if_true_label);
10119 else
10121 if (if_false_label)
10122 emit_jump (if_false_label);
10124 return;
10127 #if 0
10128 /* There's no need to do this now that combine.c can eliminate lots of
10129 sign extensions. This can be less efficient in certain cases on other
10130 machines. */
10132 /* If this is a signed equality comparison, we can do it as an
10133 unsigned comparison since zero-extension is cheaper than sign
10134 extension and comparisons with zero are done as unsigned. This is
10135 the case even on machines that can do fast sign extension, since
10136 zero-extension is easier to combine with other operations than
10137 sign-extension is. If we are comparing against a constant, we must
10138 convert it to what it would look like unsigned. */
10139 if ((code == EQ || code == NE) && ! unsignedp
10140 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10142 if (GET_CODE (op1) == CONST_INT
10143 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10144 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10145 unsignedp = 1;
10147 #endif
10149 if (! if_true_label)
10151 dummy_true_label = 1;
10152 if_true_label = gen_label_rtx ();
10155 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10156 if_true_label);
10158 if (if_false_label)
10159 emit_jump (if_false_label);
10160 if (dummy_true_label)
10161 emit_label (if_true_label);
10164 /* Generate code for a comparison expression EXP (including code to compute
10165 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10166 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10167 generated code will drop through.
10168 SIGNED_CODE should be the rtx operation for this comparison for
10169 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10171 We force a stack adjustment unless there are currently
10172 things pushed on the stack that aren't yet used. */
10174 static void
10175 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10176 if_true_label)
10177 register tree exp;
10178 enum rtx_code signed_code, unsigned_code;
10179 rtx if_false_label, if_true_label;
10181 unsigned int align0, align1;
10182 register rtx op0, op1;
10183 register tree type;
10184 register enum machine_mode mode;
10185 int unsignedp;
10186 enum rtx_code code;
10188 /* Don't crash if the comparison was erroneous. */
10189 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10191 return;
10193 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10194 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10195 return;
10197 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10198 mode = TYPE_MODE (type);
10199 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10200 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10201 || (GET_MODE_BITSIZE (mode)
10202 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10203 1)))))))
10205 /* op0 might have been replaced by promoted constant, in which
10206 case the type of second argument should be used. */
10207 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10208 mode = TYPE_MODE (type);
10210 unsignedp = TREE_UNSIGNED (type);
10211 code = unsignedp ? unsigned_code : signed_code;
10213 #ifdef HAVE_canonicalize_funcptr_for_compare
10214 /* If function pointers need to be "canonicalized" before they can
10215 be reliably compared, then canonicalize them. */
10216 if (HAVE_canonicalize_funcptr_for_compare
10217 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10218 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10219 == FUNCTION_TYPE))
10221 rtx new_op0 = gen_reg_rtx (mode);
10223 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10224 op0 = new_op0;
10227 if (HAVE_canonicalize_funcptr_for_compare
10228 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10229 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10230 == FUNCTION_TYPE))
10232 rtx new_op1 = gen_reg_rtx (mode);
10234 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10235 op1 = new_op1;
10237 #endif
10239 /* Do any postincrements in the expression that was tested. */
10240 emit_queue ();
10242 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10243 ((mode == BLKmode)
10244 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10245 MIN (align0, align1),
10246 if_false_label, if_true_label);
10249 /* Generate code to calculate EXP using a store-flag instruction
10250 and return an rtx for the result. EXP is either a comparison
10251 or a TRUTH_NOT_EXPR whose operand is a comparison.
10253 If TARGET is nonzero, store the result there if convenient.
10255 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10256 cheap.
10258 Return zero if there is no suitable set-flag instruction
10259 available on this machine.
10261 Once expand_expr has been called on the arguments of the comparison,
10262 we are committed to doing the store flag, since it is not safe to
10263 re-evaluate the expression. We emit the store-flag insn by calling
10264 emit_store_flag, but only expand the arguments if we have a reason
10265 to believe that emit_store_flag will be successful. If we think that
10266 it will, but it isn't, we have to simulate the store-flag with a
10267 set/jump/set sequence. */
10269 static rtx
10270 do_store_flag (exp, target, mode, only_cheap)
10271 tree exp;
10272 rtx target;
10273 enum machine_mode mode;
10274 int only_cheap;
10276 enum rtx_code code;
10277 tree arg0, arg1, type;
10278 tree tem;
10279 enum machine_mode operand_mode;
10280 int invert = 0;
10281 int unsignedp;
10282 rtx op0, op1;
10283 enum insn_code icode;
10284 rtx subtarget = target;
10285 rtx result, label;
10287 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10288 result at the end. We can't simply invert the test since it would
10289 have already been inverted if it were valid. This case occurs for
10290 some floating-point comparisons. */
10292 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10293 invert = 1, exp = TREE_OPERAND (exp, 0);
10295 arg0 = TREE_OPERAND (exp, 0);
10296 arg1 = TREE_OPERAND (exp, 1);
10298 /* Don't crash if the comparison was erroneous. */
10299 if (arg0 == error_mark_node || arg1 == error_mark_node)
10300 return const0_rtx;
10302 type = TREE_TYPE (arg0);
10303 operand_mode = TYPE_MODE (type);
10304 unsignedp = TREE_UNSIGNED (type);
10306 /* We won't bother with BLKmode store-flag operations because it would mean
10307 passing a lot of information to emit_store_flag. */
10308 if (operand_mode == BLKmode)
10309 return 0;
10311 /* We won't bother with store-flag operations involving function pointers
10312 when function pointers must be canonicalized before comparisons. */
10313 #ifdef HAVE_canonicalize_funcptr_for_compare
10314 if (HAVE_canonicalize_funcptr_for_compare
10315 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10316 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10317 == FUNCTION_TYPE))
10318 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10319 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10320 == FUNCTION_TYPE))))
10321 return 0;
10322 #endif
10324 STRIP_NOPS (arg0);
10325 STRIP_NOPS (arg1);
10327 /* Get the rtx comparison code to use. We know that EXP is a comparison
10328 operation of some type. Some comparisons against 1 and -1 can be
10329 converted to comparisons with zero. Do so here so that the tests
10330 below will be aware that we have a comparison with zero. These
10331 tests will not catch constants in the first operand, but constants
10332 are rarely passed as the first operand. */
10334 switch (TREE_CODE (exp))
10336 case EQ_EXPR:
10337 code = EQ;
10338 break;
10339 case NE_EXPR:
10340 code = NE;
10341 break;
10342 case LT_EXPR:
10343 if (integer_onep (arg1))
10344 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10345 else
10346 code = unsignedp ? LTU : LT;
10347 break;
10348 case LE_EXPR:
10349 if (! unsignedp && integer_all_onesp (arg1))
10350 arg1 = integer_zero_node, code = LT;
10351 else
10352 code = unsignedp ? LEU : LE;
10353 break;
10354 case GT_EXPR:
10355 if (! unsignedp && integer_all_onesp (arg1))
10356 arg1 = integer_zero_node, code = GE;
10357 else
10358 code = unsignedp ? GTU : GT;
10359 break;
10360 case GE_EXPR:
10361 if (integer_onep (arg1))
10362 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10363 else
10364 code = unsignedp ? GEU : GE;
10365 break;
10367 case UNORDERED_EXPR:
10368 code = UNORDERED;
10369 break;
10370 case ORDERED_EXPR:
10371 code = ORDERED;
10372 break;
10373 case UNLT_EXPR:
10374 code = UNLT;
10375 break;
10376 case UNLE_EXPR:
10377 code = UNLE;
10378 break;
10379 case UNGT_EXPR:
10380 code = UNGT;
10381 break;
10382 case UNGE_EXPR:
10383 code = UNGE;
10384 break;
10385 case UNEQ_EXPR:
10386 code = UNEQ;
10387 break;
10389 default:
10390 abort ();
10393 /* Put a constant second. */
10394 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10396 tem = arg0; arg0 = arg1; arg1 = tem;
10397 code = swap_condition (code);
10400 /* If this is an equality or inequality test of a single bit, we can
10401 do this by shifting the bit being tested to the low-order bit and
10402 masking the result with the constant 1. If the condition was EQ,
10403 we xor it with 1. This does not require an scc insn and is faster
10404 than an scc insn even if we have it. */
10406 if ((code == NE || code == EQ)
10407 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10408 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10410 tree inner = TREE_OPERAND (arg0, 0);
10411 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10412 int ops_unsignedp;
10414 /* If INNER is a right shift of a constant and it plus BITNUM does
10415 not overflow, adjust BITNUM and INNER. */
10417 if (TREE_CODE (inner) == RSHIFT_EXPR
10418 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10419 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10420 && bitnum < TYPE_PRECISION (type)
10421 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10422 bitnum - TYPE_PRECISION (type)))
10424 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10425 inner = TREE_OPERAND (inner, 0);
10428 /* If we are going to be able to omit the AND below, we must do our
10429 operations as unsigned. If we must use the AND, we have a choice.
10430 Normally unsigned is faster, but for some machines signed is. */
10431 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10432 #ifdef LOAD_EXTEND_OP
10433 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10434 #else
10436 #endif
10439 if (! get_subtarget (subtarget)
10440 || GET_MODE (subtarget) != operand_mode
10441 || ! safe_from_p (subtarget, inner, 1))
10442 subtarget = 0;
10444 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10446 if (bitnum != 0)
10447 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10448 size_int (bitnum), subtarget, ops_unsignedp);
10450 if (GET_MODE (op0) != mode)
10451 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10453 if ((code == EQ && ! invert) || (code == NE && invert))
10454 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10455 ops_unsignedp, OPTAB_LIB_WIDEN);
10457 /* Put the AND last so it can combine with more things. */
10458 if (bitnum != TYPE_PRECISION (type) - 1)
10459 op0 = expand_and (op0, const1_rtx, subtarget);
10461 return op0;
10464 /* Now see if we are likely to be able to do this. Return if not. */
10465 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10466 return 0;
10468 icode = setcc_gen_code[(int) code];
10469 if (icode == CODE_FOR_nothing
10470 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10472 /* We can only do this if it is one of the special cases that
10473 can be handled without an scc insn. */
10474 if ((code == LT && integer_zerop (arg1))
10475 || (! only_cheap && code == GE && integer_zerop (arg1)))
10477 else if (BRANCH_COST >= 0
10478 && ! only_cheap && (code == NE || code == EQ)
10479 && TREE_CODE (type) != REAL_TYPE
10480 && ((abs_optab->handlers[(int) operand_mode].insn_code
10481 != CODE_FOR_nothing)
10482 || (ffs_optab->handlers[(int) operand_mode].insn_code
10483 != CODE_FOR_nothing)))
10485 else
10486 return 0;
10489 if (! get_subtarget (target)
10490 || GET_MODE (subtarget) != operand_mode
10491 || ! safe_from_p (subtarget, arg1, 1))
10492 subtarget = 0;
10494 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10495 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10497 if (target == 0)
10498 target = gen_reg_rtx (mode);
10500 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10501 because, if the emit_store_flag does anything it will succeed and
10502 OP0 and OP1 will not be used subsequently. */
10504 result = emit_store_flag (target, code,
10505 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10506 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10507 operand_mode, unsignedp, 1);
10509 if (result)
10511 if (invert)
10512 result = expand_binop (mode, xor_optab, result, const1_rtx,
10513 result, 0, OPTAB_LIB_WIDEN);
10514 return result;
10517 /* If this failed, we have to do this with set/compare/jump/set code. */
10518 if (GET_CODE (target) != REG
10519 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10520 target = gen_reg_rtx (GET_MODE (target));
10522 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10523 result = compare_from_rtx (op0, op1, code, unsignedp,
10524 operand_mode, NULL_RTX, 0);
10525 if (GET_CODE (result) == CONST_INT)
10526 return (((result == const0_rtx && ! invert)
10527 || (result != const0_rtx && invert))
10528 ? const0_rtx : const1_rtx);
10530 label = gen_label_rtx ();
10531 if (bcc_gen_fctn[(int) code] == 0)
10532 abort ();
10534 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10535 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10536 emit_label (label);
10538 return target;
10541 /* Generate a tablejump instruction (used for switch statements). */
10543 #ifdef HAVE_tablejump
10545 /* INDEX is the value being switched on, with the lowest value
10546 in the table already subtracted.
10547 MODE is its expected mode (needed if INDEX is constant).
10548 RANGE is the length of the jump table.
10549 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10551 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10552 index value is out of range. */
10554 void
10555 do_tablejump (index, mode, range, table_label, default_label)
10556 rtx index, range, table_label, default_label;
10557 enum machine_mode mode;
10559 register rtx temp, vector;
10561 /* Do an unsigned comparison (in the proper mode) between the index
10562 expression and the value which represents the length of the range.
10563 Since we just finished subtracting the lower bound of the range
10564 from the index expression, this comparison allows us to simultaneously
10565 check that the original index expression value is both greater than
10566 or equal to the minimum value of the range and less than or equal to
10567 the maximum value of the range. */
10569 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10570 0, default_label);
10572 /* If index is in range, it must fit in Pmode.
10573 Convert to Pmode so we can index with it. */
10574 if (mode != Pmode)
10575 index = convert_to_mode (Pmode, index, 1);
10577 /* Don't let a MEM slip thru, because then INDEX that comes
10578 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10579 and break_out_memory_refs will go to work on it and mess it up. */
10580 #ifdef PIC_CASE_VECTOR_ADDRESS
10581 if (flag_pic && GET_CODE (index) != REG)
10582 index = copy_to_mode_reg (Pmode, index);
10583 #endif
10585 /* If flag_force_addr were to affect this address
10586 it could interfere with the tricky assumptions made
10587 about addresses that contain label-refs,
10588 which may be valid only very near the tablejump itself. */
10589 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10590 GET_MODE_SIZE, because this indicates how large insns are. The other
10591 uses should all be Pmode, because they are addresses. This code
10592 could fail if addresses and insns are not the same size. */
10593 index = gen_rtx_PLUS (Pmode,
10594 gen_rtx_MULT (Pmode, index,
10595 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10596 gen_rtx_LABEL_REF (Pmode, table_label));
10597 #ifdef PIC_CASE_VECTOR_ADDRESS
10598 if (flag_pic)
10599 index = PIC_CASE_VECTOR_ADDRESS (index);
10600 else
10601 #endif
10602 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10603 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10604 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10605 RTX_UNCHANGING_P (vector) = 1;
10606 convert_move (temp, vector, 0);
10608 emit_jump_insn (gen_tablejump (temp, table_label));
10610 /* If we are generating PIC code or if the table is PC-relative, the
10611 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10612 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10613 emit_barrier ();
10616 #endif /* HAVE_tablejump */