Import final gcc2 snapshot (990109)
[official-gcc.git] / gcc / config / i386 / i386.c
blob244d28a952120412d982f5b7d59a7e03ae9f26ad
1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include <setjmp.h>
24 #include "rtl.h"
25 #include "regs.h"
26 #include "hard-reg-set.h"
27 #include "real.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "tree.h"
34 #include "flags.h"
35 #include "except.h"
36 #include "function.h"
38 #ifdef EXTRA_CONSTRAINT
39 /* If EXTRA_CONSTRAINT is defined, then the 'S'
40 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
41 asm statements that need 'S' for class SIREG will break. */
42 error EXTRA_CONSTRAINT conflicts with S constraint letter
43 /* The previous line used to be #error, but some compilers barf
44 even if the conditional was untrue. */
45 #endif
47 #ifndef CHECK_STACK_LIMIT
48 #define CHECK_STACK_LIMIT -1
49 #endif
51 /* Type of an operand for ix86_{binary,unary}_operator_ok */
52 enum reg_mem
54 reg_p,
55 mem_p,
56 imm_p
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost = { /* 386 specific costs */
61 1, /* cost of an add instruction */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
70 struct processor_costs i486_cost = { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
80 struct processor_costs pentium_cost = {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 4, /* variable shift costs */
84 1, /* constant shift costs */
85 11, /* cost of starting a multiply */
86 0, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
90 struct processor_costs pentiumpro_cost = {
91 1, /* cost of an add instruction */
92 1, /* cost of a lea instruction */
93 3, /* variable shift costs */
94 1, /* constant shift costs */
95 4, /* cost of starting a multiply */
96 0, /* cost of multiply per each bit set */
97 17 /* cost of a divide/mod */
100 struct processor_costs *ix86_cost = &pentium_cost;
102 #define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
104 extern FILE *asm_out_file;
105 extern char *strcat ();
107 static void ix86_epilogue PROTO((int));
108 static void ix86_prologue PROTO((int));
110 char *singlemove_string ();
111 char *output_move_const_single ();
112 char *output_fp_cc0_set ();
114 char *hi_reg_name[] = HI_REGISTER_NAMES;
115 char *qi_reg_name[] = QI_REGISTER_NAMES;
116 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
118 /* Array of the smallest class containing reg number REGNO, indexed by
119 REGNO. Used by REGNO_REG_CLASS in i386.h. */
121 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
123 /* ax, dx, cx, bx */
124 AREG, DREG, CREG, BREG,
125 /* si, di, bp, sp */
126 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
127 /* FP registers */
128 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
129 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
130 /* arg pointer */
131 INDEX_REGS
134 /* Test and compare insns in i386.md store the information needed to
135 generate branch and scc insns here. */
137 struct rtx_def *i386_compare_op0 = NULL_RTX;
138 struct rtx_def *i386_compare_op1 = NULL_RTX;
139 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
141 /* which cpu are we scheduling for */
142 enum processor_type ix86_cpu;
144 /* which instruction set architecture to use. */
145 int ix86_arch;
147 /* Strings to hold which cpu and instruction set architecture to use. */
148 char *ix86_cpu_string; /* for -mcpu=<xxx> */
149 char *ix86_arch_string; /* for -march=<xxx> */
151 /* Register allocation order */
152 char *i386_reg_alloc_order;
153 static char regs_allocated[FIRST_PSEUDO_REGISTER];
155 /* # of registers to use to pass arguments. */
156 char *i386_regparm_string;
158 /* i386_regparm_string as a number */
159 int i386_regparm;
161 /* Alignment to use for loops and jumps: */
163 /* Power of two alignment for loops. */
164 char *i386_align_loops_string;
166 /* Power of two alignment for non-loop jumps. */
167 char *i386_align_jumps_string;
169 /* Values 1-5: see jump.c */
170 int i386_branch_cost;
171 char *i386_branch_cost_string;
173 /* Power of two alignment for functions. */
174 int i386_align_funcs;
175 char *i386_align_funcs_string;
177 /* Power of two alignment for loops. */
178 int i386_align_loops;
180 /* Power of two alignment for non-loop jumps. */
181 int i386_align_jumps;
183 /* Sometimes certain combinations of command options do not make
184 sense on a particular target machine. You can define a macro
185 `OVERRIDE_OPTIONS' to take account of this. This macro, if
186 defined, is executed once just after all the command options have
187 been parsed.
189 Don't use this macro to turn on various extra optimizations for
190 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
192 void
193 override_options ()
195 int ch, i, j, regno;
196 char *p;
197 int def_align;
199 static struct ptt
201 char *name; /* Canonical processor name. */
202 enum processor_type processor; /* Processor type enum value. */
203 struct processor_costs *cost; /* Processor costs */
204 int target_enable; /* Target flags to enable. */
205 int target_disable; /* Target flags to disable. */
206 } processor_target_table[]
207 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
208 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
209 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
210 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
211 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost,
212 0, 0},
213 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO,
214 &pentiumpro_cost, 0, 0}};
216 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
218 #ifdef SUBTARGET_OVERRIDE_OPTIONS
219 SUBTARGET_OVERRIDE_OPTIONS;
220 #endif
222 /* Validate registers in register allocation order. */
223 if (i386_reg_alloc_order)
225 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
227 switch (ch)
229 case 'a': regno = 0; break;
230 case 'd': regno = 1; break;
231 case 'c': regno = 2; break;
232 case 'b': regno = 3; break;
233 case 'S': regno = 4; break;
234 case 'D': regno = 5; break;
235 case 'B': regno = 6; break;
237 default: fatal ("Register '%c' is unknown", ch);
240 if (regs_allocated[regno])
241 fatal ("Register '%c' already specified in allocation order", ch);
243 regs_allocated[regno] = 1;
247 if (ix86_arch_string == 0)
249 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
250 if (ix86_cpu_string == 0)
251 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
254 for (i = 0; i < ptt_size; i++)
255 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
257 ix86_arch = processor_target_table[i].processor;
258 if (ix86_cpu_string == 0)
259 ix86_cpu_string = processor_target_table[i].name;
260 break;
263 if (i == ptt_size)
265 error ("bad value (%s) for -march= switch", ix86_arch_string);
266 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
267 ix86_arch = PROCESSOR_DEFAULT;
270 if (ix86_cpu_string == 0)
271 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
273 for (j = 0; j < ptt_size; j++)
274 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
276 ix86_cpu = processor_target_table[j].processor;
277 ix86_cost = processor_target_table[j].cost;
278 if (i > j && (int) ix86_arch >= (int) PROCESSOR_PENTIUMPRO)
279 error ("-mcpu=%s does not support -march=%s",
280 ix86_cpu_string, ix86_arch_string);
282 target_flags |= processor_target_table[j].target_enable;
283 target_flags &= ~processor_target_table[j].target_disable;
284 break;
287 if (j == ptt_size)
289 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
290 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
291 ix86_cpu = PROCESSOR_DEFAULT;
294 /* Validate -mregparm= value. */
295 if (i386_regparm_string)
297 i386_regparm = atoi (i386_regparm_string);
298 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
299 fatal ("-mregparm=%d is not between 0 and %d",
300 i386_regparm, REGPARM_MAX);
303 /* The 486 suffers more from non-aligned cache line fills, and the
304 larger code size results in a larger cache foot-print and more misses.
305 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
306 cache line. */
307 def_align = (TARGET_486) ? 4 : 2;
309 /* Validate -malign-loops= value, or provide default. */
310 if (i386_align_loops_string)
312 i386_align_loops = atoi (i386_align_loops_string);
313 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
314 fatal ("-malign-loops=%d is not between 0 and %d",
315 i386_align_loops, MAX_CODE_ALIGN);
317 else
318 i386_align_loops = 2;
320 /* Validate -malign-jumps= value, or provide default. */
321 if (i386_align_jumps_string)
323 i386_align_jumps = atoi (i386_align_jumps_string);
324 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
325 fatal ("-malign-jumps=%d is not between 0 and %d",
326 i386_align_jumps, MAX_CODE_ALIGN);
328 else
329 i386_align_jumps = def_align;
331 /* Validate -malign-functions= value, or provide default. */
332 if (i386_align_funcs_string)
334 i386_align_funcs = atoi (i386_align_funcs_string);
335 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
336 fatal ("-malign-functions=%d is not between 0 and %d",
337 i386_align_funcs, MAX_CODE_ALIGN);
339 else
340 i386_align_funcs = def_align;
342 /* Validate -mbranch-cost= value, or provide default. */
343 if (i386_branch_cost_string)
345 i386_branch_cost = atoi (i386_branch_cost_string);
346 if (i386_branch_cost < 0 || i386_branch_cost > 5)
347 fatal ("-mbranch-cost=%d is not between 0 and 5",
348 i386_branch_cost);
350 else
351 i386_branch_cost = 1;
353 /* Keep nonleaf frame pointers. */
354 if (TARGET_OMIT_LEAF_FRAME_POINTER)
355 flag_omit_frame_pointer = 1;
358 /* A C statement (sans semicolon) to choose the order in which to
359 allocate hard registers for pseudo-registers local to a basic
360 block.
362 Store the desired register order in the array `reg_alloc_order'.
363 Element 0 should be the register to allocate first; element 1, the
364 next register; and so on.
366 The macro body should not assume anything about the contents of
367 `reg_alloc_order' before execution of the macro.
369 On most machines, it is not necessary to define this macro. */
371 void
372 order_regs_for_local_alloc ()
374 int i, ch, order, regno;
376 /* User specified the register allocation order. */
378 if (i386_reg_alloc_order)
380 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
382 switch (ch)
384 case 'a': regno = 0; break;
385 case 'd': regno = 1; break;
386 case 'c': regno = 2; break;
387 case 'b': regno = 3; break;
388 case 'S': regno = 4; break;
389 case 'D': regno = 5; break;
390 case 'B': regno = 6; break;
393 reg_alloc_order[order++] = regno;
396 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
398 if (! regs_allocated[i])
399 reg_alloc_order[order++] = i;
403 /* If user did not specify a register allocation order, use natural order. */
404 else
406 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
407 reg_alloc_order[i] = i;
411 void
412 optimization_options (level)
413 int level;
415 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
416 make the problem with not enough registers even worse. */
417 #ifdef INSN_SCHEDULING
418 if (level > 1)
419 flag_schedule_insns = 0;
420 #endif
423 /* Sign-extend a 16-bit constant */
425 struct rtx_def *
426 i386_sext16_if_const (op)
427 struct rtx_def *op;
429 if (GET_CODE (op) == CONST_INT)
431 HOST_WIDE_INT val = INTVAL (op);
432 HOST_WIDE_INT sext_val;
433 if (val & 0x8000)
434 sext_val = val | ~0xffff;
435 else
436 sext_val = val & 0xffff;
437 if (sext_val != val)
438 op = GEN_INT (sext_val);
440 return op;
443 /* Return nonzero if the rtx is aligned */
445 static int
446 i386_aligned_reg_p (regno)
447 int regno;
449 return (regno == STACK_POINTER_REGNUM
450 || (! flag_omit_frame_pointer && regno == FRAME_POINTER_REGNUM));
454 i386_aligned_p (op)
455 rtx op;
457 /* Registers and immediate operands are always "aligned". */
458 if (GET_CODE (op) != MEM)
459 return 1;
461 /* Don't even try to do any aligned optimizations with volatiles. */
462 if (MEM_VOLATILE_P (op))
463 return 0;
465 /* Get address of memory operand. */
466 op = XEXP (op, 0);
468 switch (GET_CODE (op))
470 case CONST_INT:
471 if (INTVAL (op) & 3)
472 break;
473 return 1;
475 /* Match "reg + offset" */
476 case PLUS:
477 if (GET_CODE (XEXP (op, 1)) != CONST_INT)
478 break;
479 if (INTVAL (XEXP (op, 1)) & 3)
480 break;
482 op = XEXP (op, 0);
483 if (GET_CODE (op) != REG)
484 break;
486 /* ... fall through ... */
488 case REG:
489 return i386_aligned_reg_p (REGNO (op));
492 return 0;
495 /* Return nonzero if INSN looks like it won't compute useful cc bits
496 as a side effect. This information is only a hint. */
499 i386_cc_probably_useless_p (insn)
500 rtx insn;
502 return ! next_cc0_user (insn);
505 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
506 attribute for DECL. The attributes in ATTRIBUTES have previously been
507 assigned to DECL. */
510 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
511 tree decl;
512 tree attributes;
513 tree identifier;
514 tree args;
516 return 0;
519 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
520 attribute for TYPE. The attributes in ATTRIBUTES have previously been
521 assigned to TYPE. */
524 i386_valid_type_attribute_p (type, attributes, identifier, args)
525 tree type;
526 tree attributes;
527 tree identifier;
528 tree args;
530 if (TREE_CODE (type) != FUNCTION_TYPE
531 && TREE_CODE (type) != FIELD_DECL
532 && TREE_CODE (type) != TYPE_DECL)
533 return 0;
535 /* Stdcall attribute says callee is responsible for popping arguments
536 if they are not variable. */
537 if (is_attribute_p ("stdcall", identifier))
538 return (args == NULL_TREE);
540 /* Cdecl attribute says the callee is a normal C declaration. */
541 if (is_attribute_p ("cdecl", identifier))
542 return (args == NULL_TREE);
544 /* Regparm attribute specifies how many integer arguments are to be
545 passed in registers. */
546 if (is_attribute_p ("regparm", identifier))
548 tree cst;
550 if (! args || TREE_CODE (args) != TREE_LIST
551 || TREE_CHAIN (args) != NULL_TREE
552 || TREE_VALUE (args) == NULL_TREE)
553 return 0;
555 cst = TREE_VALUE (args);
556 if (TREE_CODE (cst) != INTEGER_CST)
557 return 0;
559 if (TREE_INT_CST_HIGH (cst) != 0
560 || TREE_INT_CST_LOW (cst) < 0
561 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
562 return 0;
564 return 1;
567 return 0;
570 /* Return 0 if the attributes for two types are incompatible, 1 if they
571 are compatible, and 2 if they are nearly compatible (which causes a
572 warning to be generated). */
575 i386_comp_type_attributes (type1, type2)
576 tree type1;
577 tree type2;
579 return 1;
583 /* Value is the number of bytes of arguments automatically
584 popped when returning from a subroutine call.
585 FUNDECL is the declaration node of the function (as a tree),
586 FUNTYPE is the data type of the function (as a tree),
587 or for a library call it is an identifier node for the subroutine name.
588 SIZE is the number of bytes of arguments passed on the stack.
590 On the 80386, the RTD insn may be used to pop them if the number
591 of args is fixed, but if the number is variable then the caller
592 must pop them all. RTD can't be used for library calls now
593 because the library is compiled with the Unix compiler.
594 Use of RTD is a selectable option, since it is incompatible with
595 standard Unix calling sequences. If the option is not selected,
596 the caller must always pop the args.
598 The attribute stdcall is equivalent to RTD on a per module basis. */
601 i386_return_pops_args (fundecl, funtype, size)
602 tree fundecl;
603 tree funtype;
604 int size;
606 int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE);
608 /* Cdecl functions override -mrtd, and never pop the stack. */
609 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
611 /* Stdcall functions will pop the stack if not variable args. */
612 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
613 rtd = 1;
615 if (rtd
616 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
617 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype)))
618 == void_type_node)))
619 return size;
622 /* Lose any fake structure return argument. */
623 if (aggregate_value_p (TREE_TYPE (funtype)))
624 return GET_MODE_SIZE (Pmode);
626 return 0;
630 /* Argument support functions. */
632 /* Initialize a variable CUM of type CUMULATIVE_ARGS
633 for a call to a function whose data type is FNTYPE.
634 For a library call, FNTYPE is 0. */
636 void
637 init_cumulative_args (cum, fntype, libname)
638 CUMULATIVE_ARGS *cum; /* Argument info to initialize */
639 tree fntype; /* tree ptr for function decl */
640 rtx libname; /* SYMBOL_REF of library name or 0 */
642 static CUMULATIVE_ARGS zero_cum;
643 tree param, next_param;
645 if (TARGET_DEBUG_ARG)
647 fprintf (stderr, "\ninit_cumulative_args (");
648 if (fntype)
649 fprintf (stderr, "fntype code = %s, ret code = %s",
650 tree_code_name[(int) TREE_CODE (fntype)],
651 tree_code_name[(int) TREE_CODE (TREE_TYPE (fntype))]);
652 else
653 fprintf (stderr, "no fntype");
655 if (libname)
656 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
659 *cum = zero_cum;
661 /* Set up the number of registers to use for passing arguments. */
662 cum->nregs = i386_regparm;
663 if (fntype)
665 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
667 if (attr)
668 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
671 /* Determine if this function has variable arguments. This is
672 indicated by the last argument being 'void_type_mode' if there
673 are no variable arguments. If there are variable arguments, then
674 we won't pass anything in registers */
676 if (cum->nregs)
678 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
679 param != 0; param = next_param)
681 next_param = TREE_CHAIN (param);
682 if (next_param == 0 && TREE_VALUE (param) != void_type_node)
683 cum->nregs = 0;
687 if (TARGET_DEBUG_ARG)
688 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
690 return;
693 /* Update the data in CUM to advance over an argument
694 of mode MODE and data type TYPE.
695 (TYPE is null for libcalls where that information may not be available.) */
697 void
698 function_arg_advance (cum, mode, type, named)
699 CUMULATIVE_ARGS *cum; /* current arg information */
700 enum machine_mode mode; /* current arg mode */
701 tree type; /* type of the argument or 0 if lib support */
702 int named; /* whether or not the argument was named */
704 int bytes
705 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
706 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
708 if (TARGET_DEBUG_ARG)
709 fprintf (stderr,
710 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
711 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
713 cum->words += words;
714 cum->nregs -= words;
715 cum->regno += words;
717 if (cum->nregs <= 0)
719 cum->nregs = 0;
720 cum->regno = 0;
723 return;
726 /* Define where to put the arguments to a function.
727 Value is zero to push the argument on the stack,
728 or a hard register in which to store the argument.
730 MODE is the argument's machine mode.
731 TYPE is the data type of the argument (as a tree).
732 This is null for libcalls where that information may
733 not be available.
734 CUM is a variable of type CUMULATIVE_ARGS which gives info about
735 the preceding args and about the function being called.
736 NAMED is nonzero if this argument is a named parameter
737 (otherwise it is an extra parameter matching an ellipsis). */
739 struct rtx_def *
740 function_arg (cum, mode, type, named)
741 CUMULATIVE_ARGS *cum; /* current arg information */
742 enum machine_mode mode; /* current arg mode */
743 tree type; /* type of the argument or 0 if lib support */
744 int named; /* != 0 for normal args, == 0 for ... args */
746 rtx ret = NULL_RTX;
747 int bytes
748 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
749 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
751 switch (mode)
753 /* For now, pass fp/complex values on the stack. */
754 default:
755 break;
757 case BLKmode:
758 case DImode:
759 case SImode:
760 case HImode:
761 case QImode:
762 if (words <= cum->nregs)
763 ret = gen_rtx_REG (mode, cum->regno);
764 break;
767 if (TARGET_DEBUG_ARG)
769 fprintf (stderr,
770 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
771 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
773 if (ret)
774 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
775 else
776 fprintf (stderr, ", stack");
778 fprintf (stderr, " )\n");
781 return ret;
784 /* For an arg passed partly in registers and partly in memory,
785 this is the number of registers used.
786 For args passed entirely in registers or entirely in memory, zero. */
789 function_arg_partial_nregs (cum, mode, type, named)
790 CUMULATIVE_ARGS *cum; /* current arg information */
791 enum machine_mode mode; /* current arg mode */
792 tree type; /* type of the argument or 0 if lib support */
793 int named; /* != 0 for normal args, == 0 for ... args */
795 return 0;
798 /* Output an insn whose source is a 386 integer register. SRC is the
799 rtx for the register, and TEMPLATE is the op-code template. SRC may
800 be either SImode or DImode.
802 The template will be output with operands[0] as SRC, and operands[1]
803 as a pointer to the top of the 386 stack. So a call from floatsidf2
804 would look like this:
806 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
808 where %z0 corresponds to the caller's operands[1], and is used to
809 emit the proper size suffix.
811 ??? Extend this to handle HImode - a 387 can load and store HImode
812 values directly. */
814 void
815 output_op_from_reg (src, template)
816 rtx src;
817 char *template;
819 rtx xops[4];
820 int size = GET_MODE_SIZE (GET_MODE (src));
822 xops[0] = src;
823 xops[1] = AT_SP (Pmode);
824 xops[2] = GEN_INT (size);
825 xops[3] = stack_pointer_rtx;
827 if (size > UNITS_PER_WORD)
829 rtx high;
831 if (size > 2 * UNITS_PER_WORD)
833 high = gen_rtx_REG (SImode, REGNO (src) + 2);
834 output_asm_insn (AS1 (push%L0,%0), &high);
837 high = gen_rtx_REG (SImode, REGNO (src) + 1);
838 output_asm_insn (AS1 (push%L0,%0), &high);
841 output_asm_insn (AS1 (push%L0,%0), &src);
842 output_asm_insn (template, xops);
843 output_asm_insn (AS2 (add%L3,%2,%3), xops);
846 /* Output an insn to pop an value from the 387 top-of-stack to 386
847 register DEST. The 387 register stack is popped if DIES is true. If
848 the mode of DEST is an integer mode, a `fist' integer store is done,
849 otherwise a `fst' float store is done. */
851 void
852 output_to_reg (dest, dies, scratch_mem)
853 rtx dest;
854 int dies;
855 rtx scratch_mem;
857 rtx xops[4];
858 int size = GET_MODE_SIZE (GET_MODE (dest));
860 if (! scratch_mem)
861 xops[0] = AT_SP (Pmode);
862 else
863 xops[0] = scratch_mem;
865 xops[1] = stack_pointer_rtx;
866 xops[2] = GEN_INT (size);
867 xops[3] = dest;
869 if (! scratch_mem)
870 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
872 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
874 if (dies)
875 output_asm_insn (AS1 (fistp%z3,%y0), xops);
876 else
877 output_asm_insn (AS1 (fist%z3,%y0), xops);
880 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
882 if (dies)
883 output_asm_insn (AS1 (fstp%z3,%y0), xops);
884 else
886 if (GET_MODE (dest) == XFmode)
888 output_asm_insn (AS1 (fstp%z3,%y0), xops);
889 output_asm_insn (AS1 (fld%z3,%y0), xops);
891 else
892 output_asm_insn (AS1 (fst%z3,%y0), xops);
896 else
897 abort ();
899 if (! scratch_mem)
900 output_asm_insn (AS1 (pop%L0,%0), &dest);
901 else
902 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
905 if (size > UNITS_PER_WORD)
907 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
908 if (! scratch_mem)
909 output_asm_insn (AS1 (pop%L0,%0), &dest);
910 else
912 xops[0] = adj_offsettable_operand (xops[0], 4);
913 xops[3] = dest;
914 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
917 if (size > 2 * UNITS_PER_WORD)
919 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
920 if (! scratch_mem)
921 output_asm_insn (AS1 (pop%L0,%0), &dest);
922 else
924 xops[0] = adj_offsettable_operand (xops[0], 4);
925 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
931 char *
932 singlemove_string (operands)
933 rtx *operands;
935 rtx x;
936 if (GET_CODE (operands[0]) == MEM
937 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
939 if (XEXP (x, 0) != stack_pointer_rtx)
940 abort ();
941 return "push%L1 %1";
943 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
944 return output_move_const_single (operands);
945 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
946 return AS2 (mov%L0,%1,%0);
947 else if (CONSTANT_P (operands[1]))
948 return AS2 (mov%L0,%1,%0);
949 else
951 output_asm_insn ("push%L1 %1", operands);
952 return "pop%L0 %0";
956 /* Return a REG that occurs in ADDR with coefficient 1.
957 ADDR can be effectively incremented by incrementing REG. */
959 static rtx
960 find_addr_reg (addr)
961 rtx addr;
963 while (GET_CODE (addr) == PLUS)
965 if (GET_CODE (XEXP (addr, 0)) == REG)
966 addr = XEXP (addr, 0);
967 else if (GET_CODE (XEXP (addr, 1)) == REG)
968 addr = XEXP (addr, 1);
969 else if (CONSTANT_P (XEXP (addr, 0)))
970 addr = XEXP (addr, 1);
971 else if (CONSTANT_P (XEXP (addr, 1)))
972 addr = XEXP (addr, 0);
973 else
974 abort ();
977 if (GET_CODE (addr) == REG)
978 return addr;
979 abort ();
982 /* Output an insn to add the constant N to the register X. */
984 static void
985 asm_add (n, x)
986 int n;
987 rtx x;
989 rtx xops[2];
990 xops[0] = x;
992 if (n == -1)
993 output_asm_insn (AS1 (dec%L0,%0), xops);
994 else if (n == 1)
995 output_asm_insn (AS1 (inc%L0,%0), xops);
996 else if (n < 0 || n == 128)
998 xops[1] = GEN_INT (-n);
999 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
1001 else if (n > 0)
1003 xops[1] = GEN_INT (n);
1004 output_asm_insn (AS2 (add%L0,%1,%0), xops);
1008 /* Output assembler code to perform a doubleword move insn
1009 with operands OPERANDS. */
1011 char *
1012 output_move_double (operands)
1013 rtx *operands;
1015 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
1016 rtx latehalf[2];
1017 rtx middlehalf[2];
1018 rtx xops[2];
1019 rtx addreg0 = 0, addreg1 = 0;
1020 int dest_overlapped_low = 0;
1021 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
1023 middlehalf[0] = 0;
1024 middlehalf[1] = 0;
1026 /* First classify both operands. */
1028 if (REG_P (operands[0]))
1029 optype0 = REGOP;
1030 else if (offsettable_memref_p (operands[0]))
1031 optype0 = OFFSOP;
1032 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
1033 optype0 = POPOP;
1034 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1035 optype0 = PUSHOP;
1036 else if (GET_CODE (operands[0]) == MEM)
1037 optype0 = MEMOP;
1038 else
1039 optype0 = RNDOP;
1041 if (REG_P (operands[1]))
1042 optype1 = REGOP;
1043 else if (CONSTANT_P (operands[1]))
1044 optype1 = CNSTOP;
1045 else if (offsettable_memref_p (operands[1]))
1046 optype1 = OFFSOP;
1047 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
1048 optype1 = POPOP;
1049 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
1050 optype1 = PUSHOP;
1051 else if (GET_CODE (operands[1]) == MEM)
1052 optype1 = MEMOP;
1053 else
1054 optype1 = RNDOP;
1056 /* Check for the cases that the operand constraints are not
1057 supposed to allow to happen. Abort if we get one,
1058 because generating code for these cases is painful. */
1060 if (optype0 == RNDOP || optype1 == RNDOP)
1061 abort ();
1063 /* If one operand is decrementing and one is incrementing
1064 decrement the former register explicitly
1065 and change that operand into ordinary indexing. */
1067 if (optype0 == PUSHOP && optype1 == POPOP)
1069 /* ??? Can this ever happen on i386? */
1070 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1071 asm_add (-size, operands[0]);
1072 if (GET_MODE (operands[1]) == XFmode)
1073 operands[0] = gen_rtx_MEM (XFmode, operands[0]);
1074 else if (GET_MODE (operands[0]) == DFmode)
1075 operands[0] = gen_rtx_MEM (DFmode, operands[0]);
1076 else
1077 operands[0] = gen_rtx_MEM (DImode, operands[0]);
1078 optype0 = OFFSOP;
1081 if (optype0 == POPOP && optype1 == PUSHOP)
1083 /* ??? Can this ever happen on i386? */
1084 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1085 asm_add (-size, operands[1]);
1086 if (GET_MODE (operands[1]) == XFmode)
1087 operands[1] = gen_rtx_MEM (XFmode, operands[1]);
1088 else if (GET_MODE (operands[1]) == DFmode)
1089 operands[1] = gen_rtx_MEM (DFmode, operands[1]);
1090 else
1091 operands[1] = gen_rtx_MEM (DImode, operands[1]);
1092 optype1 = OFFSOP;
1095 /* If an operand is an unoffsettable memory ref, find a register
1096 we can increment temporarily to make it refer to the second word. */
1098 if (optype0 == MEMOP)
1099 addreg0 = find_addr_reg (XEXP (operands[0], 0));
1101 if (optype1 == MEMOP)
1102 addreg1 = find_addr_reg (XEXP (operands[1], 0));
1104 /* Ok, we can do one word at a time.
1105 Normally we do the low-numbered word first,
1106 but if either operand is autodecrementing then we
1107 do the high-numbered word first.
1109 In either case, set up in LATEHALF the operands to use
1110 for the high-numbered word and in some cases alter the
1111 operands in OPERANDS to be suitable for the low-numbered word. */
1113 if (size == 12)
1115 if (optype0 == REGOP)
1117 middlehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1118 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 2);
1120 else if (optype0 == OFFSOP)
1122 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
1123 latehalf[0] = adj_offsettable_operand (operands[0], 8);
1125 else
1127 middlehalf[0] = operands[0];
1128 latehalf[0] = operands[0];
1131 if (optype1 == REGOP)
1133 middlehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1134 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 2);
1136 else if (optype1 == OFFSOP)
1138 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1139 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1141 else if (optype1 == CNSTOP)
1143 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1145 REAL_VALUE_TYPE r; long l[3];
1147 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1148 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1149 operands[1] = GEN_INT (l[0]);
1150 middlehalf[1] = GEN_INT (l[1]);
1151 latehalf[1] = GEN_INT (l[2]);
1153 else if (CONSTANT_P (operands[1]))
1154 /* No non-CONST_DOUBLE constant should ever appear here. */
1155 abort ();
1157 else
1159 middlehalf[1] = operands[1];
1160 latehalf[1] = operands[1];
1164 else
1166 /* Size is not 12. */
1168 if (optype0 == REGOP)
1169 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1170 else if (optype0 == OFFSOP)
1171 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1172 else
1173 latehalf[0] = operands[0];
1175 if (optype1 == REGOP)
1176 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1177 else if (optype1 == OFFSOP)
1178 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1179 else if (optype1 == CNSTOP)
1180 split_double (operands[1], &operands[1], &latehalf[1]);
1181 else
1182 latehalf[1] = operands[1];
1185 /* If insn is effectively movd N (sp),-(sp) then we will do the
1186 high word first. We should use the adjusted operand 1
1187 (which is N+4 (sp) or N+8 (sp))
1188 for the low word and middle word as well,
1189 to compensate for the first decrement of sp. */
1190 if (optype0 == PUSHOP
1191 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1192 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1193 middlehalf[1] = operands[1] = latehalf[1];
1195 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1196 if the upper part of reg N does not appear in the MEM, arrange to
1197 emit the move late-half first. Otherwise, compute the MEM address
1198 into the upper part of N and use that as a pointer to the memory
1199 operand. */
1200 if (optype0 == REGOP
1201 && (optype1 == OFFSOP || optype1 == MEMOP))
1203 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1204 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1206 /* If both halves of dest are used in the src memory address,
1207 compute the address into latehalf of dest. */
1208 compadr:
1209 xops[0] = latehalf[0];
1210 xops[1] = XEXP (operands[1], 0);
1211 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1212 if (GET_MODE (operands[1]) == XFmode)
1214 operands[1] = gen_rtx_MEM (XFmode, latehalf[0]);
1215 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1216 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1218 else
1220 operands[1] = gen_rtx_MEM (DImode, latehalf[0]);
1221 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1225 else if (size == 12
1226 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1228 /* Check for two regs used by both source and dest. */
1229 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1230 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1231 goto compadr;
1233 /* JRV says this can't happen: */
1234 if (addreg0 || addreg1)
1235 abort ();
1237 /* Only the middle reg conflicts; simply put it last. */
1238 output_asm_insn (singlemove_string (operands), operands);
1239 output_asm_insn (singlemove_string (latehalf), latehalf);
1240 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1241 return "";
1244 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1245 /* If the low half of dest is mentioned in the source memory
1246 address, the arrange to emit the move late half first. */
1247 dest_overlapped_low = 1;
1250 /* If one or both operands autodecrementing,
1251 do the two words, high-numbered first. */
1253 /* Likewise, the first move would clobber the source of the second one,
1254 do them in the other order. This happens only for registers;
1255 such overlap can't happen in memory unless the user explicitly
1256 sets it up, and that is an undefined circumstance. */
1258 #if 0
1259 if (optype0 == PUSHOP || optype1 == PUSHOP
1260 || (optype0 == REGOP && optype1 == REGOP
1261 && REGNO (operands[0]) == REGNO (latehalf[1]))
1262 || dest_overlapped_low)
1263 #endif
1265 if (optype0 == PUSHOP || optype1 == PUSHOP
1266 || (optype0 == REGOP && optype1 == REGOP
1267 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1268 || REGNO (operands[0]) == REGNO (latehalf[1])))
1269 || dest_overlapped_low)
1271 /* Make any unoffsettable addresses point at high-numbered word. */
1272 if (addreg0)
1273 asm_add (size-4, addreg0);
1274 if (addreg1)
1275 asm_add (size-4, addreg1);
1277 /* Do that word. */
1278 output_asm_insn (singlemove_string (latehalf), latehalf);
1280 /* Undo the adds we just did. */
1281 if (addreg0)
1282 asm_add (-4, addreg0);
1283 if (addreg1)
1284 asm_add (-4, addreg1);
1286 if (size == 12)
1288 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1289 if (addreg0)
1290 asm_add (-4, addreg0);
1291 if (addreg1)
1292 asm_add (-4, addreg1);
1295 /* Do low-numbered word. */
1296 return singlemove_string (operands);
1299 /* Normal case: do the two words, low-numbered first. */
1301 output_asm_insn (singlemove_string (operands), operands);
1303 /* Do the middle one of the three words for long double */
1304 if (size == 12)
1306 if (addreg0)
1307 asm_add (4, addreg0);
1308 if (addreg1)
1309 asm_add (4, addreg1);
1311 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1314 /* Make any unoffsettable addresses point at high-numbered word. */
1315 if (addreg0)
1316 asm_add (4, addreg0);
1317 if (addreg1)
1318 asm_add (4, addreg1);
1320 /* Do that word. */
1321 output_asm_insn (singlemove_string (latehalf), latehalf);
1323 /* Undo the adds we just did. */
1324 if (addreg0)
1325 asm_add (4-size, addreg0);
1326 if (addreg1)
1327 asm_add (4-size, addreg1);
1329 return "";
1332 #define MAX_TMPS 2 /* max temporary registers used */
1334 /* Output the appropriate code to move push memory on the stack */
1336 char *
1337 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1338 rtx operands[];
1339 rtx insn;
1340 int length;
1341 int tmp_start;
1342 int n_operands;
1344 struct
1346 char *load;
1347 char *push;
1348 rtx xops[2];
1349 } tmp_info[MAX_TMPS];
1351 rtx src = operands[1];
1352 int max_tmps = 0;
1353 int offset = 0;
1354 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1355 int stack_offset = 0;
1356 int i, num_tmps;
1357 rtx xops[1];
1359 if (! offsettable_memref_p (src))
1360 fatal_insn ("Source is not offsettable", insn);
1362 if ((length & 3) != 0)
1363 fatal_insn ("Pushing non-word aligned size", insn);
1365 /* Figure out which temporary registers we have available */
1366 for (i = tmp_start; i < n_operands; i++)
1368 if (GET_CODE (operands[i]) == REG)
1370 if (reg_overlap_mentioned_p (operands[i], src))
1371 continue;
1373 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1374 if (max_tmps == MAX_TMPS)
1375 break;
1379 if (max_tmps == 0)
1380 for (offset = length - 4; offset >= 0; offset -= 4)
1382 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1383 output_asm_insn (AS1(push%L0,%0), xops);
1384 if (stack_p)
1385 stack_offset += 4;
1388 else
1389 for (offset = length - 4; offset >= 0; )
1391 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1393 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1394 tmp_info[num_tmps].push = AS1(push%L0,%1);
1395 tmp_info[num_tmps].xops[0]
1396 = adj_offsettable_operand (src, offset + stack_offset);
1397 offset -= 4;
1400 for (i = 0; i < num_tmps; i++)
1401 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1403 for (i = 0; i < num_tmps; i++)
1404 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1406 if (stack_p)
1407 stack_offset += 4*num_tmps;
1410 return "";
1413 /* Output the appropriate code to move data between two memory locations */
1415 char *
1416 output_move_memory (operands, insn, length, tmp_start, n_operands)
1417 rtx operands[];
1418 rtx insn;
1419 int length;
1420 int tmp_start;
1421 int n_operands;
1423 struct
1425 char *load;
1426 char *store;
1427 rtx xops[3];
1428 } tmp_info[MAX_TMPS];
1430 rtx dest = operands[0];
1431 rtx src = operands[1];
1432 rtx qi_tmp = NULL_RTX;
1433 int max_tmps = 0;
1434 int offset = 0;
1435 int i, num_tmps;
1436 rtx xops[3];
1438 if (GET_CODE (dest) == MEM
1439 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1440 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1441 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1443 if (! offsettable_memref_p (src))
1444 fatal_insn ("Source is not offsettable", insn);
1446 if (! offsettable_memref_p (dest))
1447 fatal_insn ("Destination is not offsettable", insn);
1449 /* Figure out which temporary registers we have available */
1450 for (i = tmp_start; i < n_operands; i++)
1452 if (GET_CODE (operands[i]) == REG)
1454 if ((length & 1) != 0 && qi_tmp == 0 && QI_REG_P (operands[i]))
1455 qi_tmp = operands[i];
1457 if (reg_overlap_mentioned_p (operands[i], dest))
1458 fatal_insn ("Temporary register overlaps the destination", insn);
1460 if (reg_overlap_mentioned_p (operands[i], src))
1461 fatal_insn ("Temporary register overlaps the source", insn);
1463 tmp_info[max_tmps++].xops[2] = operands[i];
1464 if (max_tmps == MAX_TMPS)
1465 break;
1469 if (max_tmps == 0)
1470 fatal_insn ("No scratch registers were found to do memory->memory moves",
1471 insn);
1473 if ((length & 1) != 0)
1475 if (qi_tmp == 0)
1476 fatal_insn ("No byte register found when moving odd # of bytes.",
1477 insn);
1480 while (length > 1)
1482 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1484 if (length >= 4)
1486 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1487 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1488 tmp_info[num_tmps].xops[0]
1489 = adj_offsettable_operand (dest, offset);
1490 tmp_info[num_tmps].xops[1]
1491 = adj_offsettable_operand (src, offset);
1493 offset += 4;
1494 length -= 4;
1497 else if (length >= 2)
1499 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1500 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1501 tmp_info[num_tmps].xops[0]
1502 = adj_offsettable_operand (dest, offset);
1503 tmp_info[num_tmps].xops[1]
1504 = adj_offsettable_operand (src, offset);
1506 offset += 2;
1507 length -= 2;
1509 else
1510 break;
1513 for (i = 0; i < num_tmps; i++)
1514 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1516 for (i = 0; i < num_tmps; i++)
1517 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1520 if (length == 1)
1522 xops[0] = adj_offsettable_operand (dest, offset);
1523 xops[1] = adj_offsettable_operand (src, offset);
1524 xops[2] = qi_tmp;
1525 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1526 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1529 return "";
1533 standard_80387_constant_p (x)
1534 rtx x;
1536 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1537 REAL_VALUE_TYPE d;
1538 jmp_buf handler;
1539 int is0, is1;
1541 if (setjmp (handler))
1542 return 0;
1544 set_float_handler (handler);
1545 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1546 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1547 is1 = REAL_VALUES_EQUAL (d, dconst1);
1548 set_float_handler (NULL_PTR);
1550 if (is0)
1551 return 1;
1553 if (is1)
1554 return 2;
1556 /* Note that on the 80387, other constants, such as pi,
1557 are much slower to load as standard constants
1558 than to load from doubles in memory! */
1559 #endif
1561 return 0;
1564 char *
1565 output_move_const_single (operands)
1566 rtx *operands;
1568 if (FP_REG_P (operands[0]))
1570 int conval = standard_80387_constant_p (operands[1]);
1572 if (conval == 1)
1573 return "fldz";
1575 if (conval == 2)
1576 return "fld1";
1579 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1581 REAL_VALUE_TYPE r; long l;
1583 if (GET_MODE (operands[1]) == XFmode)
1584 abort ();
1586 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1587 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1588 operands[1] = GEN_INT (l);
1591 return singlemove_string (operands);
1594 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1595 reference and a constant. */
1598 symbolic_operand (op, mode)
1599 register rtx op;
1600 enum machine_mode mode;
1602 switch (GET_CODE (op))
1604 case SYMBOL_REF:
1605 case LABEL_REF:
1606 return 1;
1608 case CONST:
1609 op = XEXP (op, 0);
1610 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1611 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1612 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1614 default:
1615 return 0;
1619 /* Test for a valid operand for a call instruction.
1620 Don't allow the arg pointer register or virtual regs
1621 since they may change into reg + const, which the patterns
1622 can't handle yet. */
1625 call_insn_operand (op, mode)
1626 rtx op;
1627 enum machine_mode mode;
1629 if (GET_CODE (op) == MEM
1630 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1631 /* This makes a difference for PIC. */
1632 && general_operand (XEXP (op, 0), Pmode))
1633 || (GET_CODE (XEXP (op, 0)) == REG
1634 && XEXP (op, 0) != arg_pointer_rtx
1635 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1636 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1637 return 1;
1639 return 0;
1642 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1643 even if pic. */
1646 expander_call_insn_operand (op, mode)
1647 rtx op;
1648 enum machine_mode mode;
1650 if (GET_CODE (op) == MEM
1651 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1652 || (GET_CODE (XEXP (op, 0)) == REG
1653 && XEXP (op, 0) != arg_pointer_rtx
1654 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1655 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1656 return 1;
1658 return 0;
1661 /* Return 1 if OP is a comparison operator that can use the condition code
1662 generated by an arithmetic operation. */
1665 arithmetic_comparison_operator (op, mode)
1666 register rtx op;
1667 enum machine_mode mode;
1669 enum rtx_code code;
1671 if (mode != VOIDmode && mode != GET_MODE (op))
1672 return 0;
1674 code = GET_CODE (op);
1675 if (GET_RTX_CLASS (code) != '<')
1676 return 0;
1678 return (code != GT && code != LE);
1681 /* Returns 1 if OP contains a symbol reference */
1684 symbolic_reference_mentioned_p (op)
1685 rtx op;
1687 register char *fmt;
1688 register int i;
1690 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1691 return 1;
1693 fmt = GET_RTX_FORMAT (GET_CODE (op));
1694 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1696 if (fmt[i] == 'E')
1698 register int j;
1700 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1701 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1702 return 1;
1705 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1706 return 1;
1709 return 0;
1712 /* Attempt to expand a binary operator. Make the expansion closer to the
1713 actual machine, then just general_operand, which will allow 3 separate
1714 memory references (one output, two input) in a single insn. Return
1715 whether the insn fails, or succeeds. */
1718 ix86_expand_binary_operator (code, mode, operands)
1719 enum rtx_code code;
1720 enum machine_mode mode;
1721 rtx operands[];
1723 rtx insn;
1724 int i;
1725 int modified;
1727 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1728 if (GET_RTX_CLASS (code) == 'c'
1729 && (rtx_equal_p (operands[0], operands[2])
1730 || immediate_operand (operands[1], mode)))
1732 rtx temp = operands[1];
1733 operands[1] = operands[2];
1734 operands[2] = temp;
1737 /* If optimizing, copy to regs to improve CSE */
1738 if (TARGET_PSEUDO && optimize
1739 && ((reload_in_progress | reload_completed) == 0))
1741 if (GET_CODE (operands[1]) == MEM
1742 && ! rtx_equal_p (operands[0], operands[1]))
1743 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1745 if (GET_CODE (operands[2]) == MEM)
1746 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1748 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1750 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1752 emit_move_insn (temp, operands[1]);
1753 operands[1] = temp;
1754 return TRUE;
1758 if (!ix86_binary_operator_ok (code, mode, operands))
1760 /* If not optimizing, try to make a valid insn (optimize code
1761 previously did this above to improve chances of CSE) */
1763 if ((! TARGET_PSEUDO || !optimize)
1764 && ((reload_in_progress | reload_completed) == 0)
1765 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1767 modified = FALSE;
1768 if (GET_CODE (operands[1]) == MEM
1769 && ! rtx_equal_p (operands[0], operands[1]))
1771 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1772 modified = TRUE;
1775 if (GET_CODE (operands[2]) == MEM)
1777 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1778 modified = TRUE;
1781 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1783 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1785 emit_move_insn (temp, operands[1]);
1786 operands[1] = temp;
1787 return TRUE;
1790 if (modified && ! ix86_binary_operator_ok (code, mode, operands))
1791 return FALSE;
1793 else
1794 return FALSE;
1797 return TRUE;
1800 /* Return TRUE or FALSE depending on whether the binary operator meets the
1801 appropriate constraints. */
1804 ix86_binary_operator_ok (code, mode, operands)
1805 enum rtx_code code;
1806 enum machine_mode mode;
1807 rtx operands[3];
1809 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1810 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1813 /* Attempt to expand a unary operator. Make the expansion closer to the
1814 actual machine, then just general_operand, which will allow 2 separate
1815 memory references (one output, one input) in a single insn. Return
1816 whether the insn fails, or succeeds. */
1819 ix86_expand_unary_operator (code, mode, operands)
1820 enum rtx_code code;
1821 enum machine_mode mode;
1822 rtx operands[];
1824 rtx insn;
1826 /* If optimizing, copy to regs to improve CSE */
1827 if (TARGET_PSEUDO
1828 && optimize
1829 && ((reload_in_progress | reload_completed) == 0)
1830 && GET_CODE (operands[1]) == MEM)
1831 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1833 if (! ix86_unary_operator_ok (code, mode, operands))
1835 if ((! TARGET_PSEUDO || optimize == 0)
1836 && ((reload_in_progress | reload_completed) == 0)
1837 && GET_CODE (operands[1]) == MEM)
1839 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1840 if (! ix86_unary_operator_ok (code, mode, operands))
1841 return FALSE;
1843 else
1844 return FALSE;
1847 return TRUE;
1850 /* Return TRUE or FALSE depending on whether the unary operator meets the
1851 appropriate constraints. */
1854 ix86_unary_operator_ok (code, mode, operands)
1855 enum rtx_code code;
1856 enum machine_mode mode;
1857 rtx operands[2];
1859 return TRUE;
1862 static rtx pic_label_rtx;
1863 static char pic_label_name [256];
1864 static int pic_label_no = 0;
1866 /* This function generates code for -fpic that loads %ebx with
1867 with the return address of the caller and then returns. */
1869 void
1870 asm_output_function_prefix (file, name)
1871 FILE *file;
1872 char *name;
1874 rtx xops[2];
1875 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1876 || current_function_uses_const_pool);
1877 xops[0] = pic_offset_table_rtx;
1878 xops[1] = stack_pointer_rtx;
1880 /* Deep branch prediction favors having a return for every call. */
1881 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1883 tree prologue_node;
1885 if (pic_label_rtx == 0)
1887 pic_label_rtx = gen_label_rtx ();
1888 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1889 LABEL_NAME (pic_label_rtx) = pic_label_name;
1892 prologue_node = make_node (FUNCTION_DECL);
1893 DECL_RESULT (prologue_node) = 0;
1894 #ifdef ASM_DECLARE_FUNCTION_NAME
1895 ASM_DECLARE_FUNCTION_NAME (file, pic_label_name, prologue_node);
1896 #endif
1897 output_asm_insn ("movl (%1),%0", xops);
1898 output_asm_insn ("ret", xops);
1902 /* Generate the assembly code for function entry.
1903 FILE is an stdio stream to output the code to.
1904 SIZE is an int: how many units of temporary storage to allocate. */
1906 void
1907 function_prologue (file, size)
1908 FILE *file;
1909 int size;
1911 if (TARGET_SCHEDULE_PROLOGUE)
1913 pic_label_rtx = 0;
1914 return;
1917 ix86_prologue (0);
1920 /* Expand the prologue into a bunch of separate insns. */
1922 void
1923 ix86_expand_prologue ()
1925 if (! TARGET_SCHEDULE_PROLOGUE)
1926 return;
1928 ix86_prologue (1);
1931 void
1932 load_pic_register (do_rtl)
1933 int do_rtl;
1935 rtx xops[4];
1937 if (TARGET_DEEP_BRANCH_PREDICTION)
1939 xops[0] = pic_offset_table_rtx;
1940 if (pic_label_rtx == 0)
1942 pic_label_rtx = gen_label_rtx ();
1943 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1944 LABEL_NAME (pic_label_rtx) = pic_label_name;
1947 xops[1] = gen_rtx_MEM (QImode,
1948 gen_rtx_SYMBOL_REF (Pmode,
1949 LABEL_NAME (pic_label_rtx)));
1951 if (do_rtl)
1953 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
1954 emit_insn (gen_prologue_set_got
1955 (xops[0],
1956 gen_rtx_SYMBOL_REF (Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1957 xops[1]));
1959 else
1961 output_asm_insn (AS1 (call,%P1), xops);
1962 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1963 pic_label_rtx = 0;
1967 else
1969 xops[0] = pic_offset_table_rtx;
1970 xops[1] = gen_label_rtx ();
1972 if (do_rtl)
1974 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
1975 a new CODE_LABEL after reload, so we need a single pattern to
1976 emit the 3 necessary instructions. */
1977 emit_insn (gen_prologue_get_pc_and_set_got (xops[0]));
1979 else
1981 output_asm_insn (AS1 (call,%P1), xops);
1982 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
1983 CODE_LABEL_NUMBER (xops[1]));
1984 output_asm_insn (AS1 (pop%L0,%0), xops);
1985 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1989 /* When -fpic, we must emit a scheduling barrier, so that the instruction
1990 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
1991 moved before any instruction which implicitly uses the got. */
1993 if (do_rtl)
1994 emit_insn (gen_blockage ());
1997 static void
1998 ix86_prologue (do_rtl)
1999 int do_rtl;
2001 register int regno;
2002 int limit;
2003 rtx xops[4];
2004 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2005 || current_function_uses_const_pool);
2006 long tsize = get_frame_size ();
2007 rtx insn;
2008 int cfa_offset = INCOMING_FRAME_SP_OFFSET, cfa_store_offset = cfa_offset;
2010 xops[0] = stack_pointer_rtx;
2011 xops[1] = frame_pointer_rtx;
2012 xops[2] = GEN_INT (tsize);
2014 if (frame_pointer_needed)
2016 if (do_rtl)
2018 insn = emit_insn (gen_rtx_SET
2019 (VOIDmode,
2020 gen_rtx_MEM (SImode,
2021 gen_rtx_PRE_DEC (SImode,
2022 stack_pointer_rtx)),
2023 frame_pointer_rtx));
2025 RTX_FRAME_RELATED_P (insn) = 1;
2026 insn = emit_move_insn (xops[1], xops[0]);
2027 RTX_FRAME_RELATED_P (insn) = 1;
2030 else
2032 output_asm_insn ("push%L1 %1", xops);
2033 #ifdef INCOMING_RETURN_ADDR_RTX
2034 if (dwarf2out_do_frame ())
2036 char *l = dwarf2out_cfi_label ();
2038 cfa_store_offset += 4;
2039 cfa_offset = cfa_store_offset;
2040 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2041 dwarf2out_reg_save (l, FRAME_POINTER_REGNUM, - cfa_store_offset);
2043 #endif
2045 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
2046 #ifdef INCOMING_RETURN_ADDR_RTX
2047 if (dwarf2out_do_frame ())
2048 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM, cfa_offset);
2049 #endif
2053 if (tsize == 0)
2055 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
2057 if (do_rtl)
2059 insn = emit_insn (gen_prologue_set_stack_ptr (xops[2]));
2060 RTX_FRAME_RELATED_P (insn) = 1;
2062 else
2064 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
2065 #ifdef INCOMING_RETURN_ADDR_RTX
2066 if (dwarf2out_do_frame ())
2068 cfa_store_offset += tsize;
2069 if (! frame_pointer_needed)
2071 cfa_offset = cfa_store_offset;
2072 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM, cfa_offset);
2075 #endif
2078 else
2080 xops[3] = gen_rtx_REG (SImode, 0);
2081 if (do_rtl)
2082 emit_move_insn (xops[3], xops[2]);
2083 else
2084 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
2086 xops[3] = gen_rtx_MEM (FUNCTION_MODE,
2087 gen_rtx_SYMBOL_REF (Pmode, "_alloca"));
2089 if (do_rtl)
2090 emit_call_insn (gen_rtx_CALL (VOIDmode, xops[3], const0_rtx));
2091 else
2092 output_asm_insn (AS1 (call,%P3), xops);
2095 /* Note If use enter it is NOT reversed args.
2096 This one is not reversed from intel!!
2097 I think enter is slower. Also sdb doesn't like it.
2098 But if you want it the code is:
2100 xops[3] = const0_rtx;
2101 output_asm_insn ("enter %2,%3", xops);
2105 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2106 for (regno = limit - 1; regno >= 0; regno--)
2107 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2108 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2110 xops[0] = gen_rtx_REG (SImode, regno);
2111 if (do_rtl)
2113 insn
2114 = emit_insn (gen_rtx_SET
2115 (VOIDmode,
2116 gen_rtx_MEM (SImode,
2117 gen_rtx_PRE_DEC (SImode,
2118 stack_pointer_rtx)),
2119 xops[0]));
2121 RTX_FRAME_RELATED_P (insn) = 1;
2123 else
2125 output_asm_insn ("push%L0 %0", xops);
2126 #ifdef INCOMING_RETURN_ADDR_RTX
2127 if (dwarf2out_do_frame ())
2129 char *l = dwarf2out_cfi_label ();
2131 cfa_store_offset += 4;
2132 if (! frame_pointer_needed)
2134 cfa_offset = cfa_store_offset;
2135 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2138 dwarf2out_reg_save (l, regno, - cfa_store_offset);
2140 #endif
2144 if (pic_reg_used)
2145 load_pic_register (do_rtl);
2147 /* If we are profiling, make sure no instructions are scheduled before
2148 the call to mcount. However, if -fpic, the above call will have
2149 done that. */
2150 if ((profile_flag || profile_block_flag)
2151 && ! pic_reg_used && do_rtl)
2152 emit_insn (gen_blockage ());
2155 /* Return 1 if it is appropriate to emit `ret' instructions in the
2156 body of a function. Do this only if the epilogue is simple, needing a
2157 couple of insns. Prior to reloading, we can't tell how many registers
2158 must be saved, so return 0 then. Return 0 if there is no frame
2159 marker to de-allocate.
2161 If NON_SAVING_SETJMP is defined and true, then it is not possible
2162 for the epilogue to be simple, so return 0. This is a special case
2163 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2164 until final, but jump_optimize may need to know sooner if a
2165 `return' is OK. */
2168 ix86_can_use_return_insn_p ()
2170 int regno;
2171 int nregs = 0;
2172 int reglimit = (frame_pointer_needed
2173 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2174 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2175 || current_function_uses_const_pool);
2177 #ifdef NON_SAVING_SETJMP
2178 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
2179 return 0;
2180 #endif
2182 if (! reload_completed)
2183 return 0;
2185 for (regno = reglimit - 1; regno >= 0; regno--)
2186 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2187 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2188 nregs++;
2190 return nregs == 0 || ! frame_pointer_needed;
2193 /* This function generates the assembly code for function exit.
2194 FILE is an stdio stream to output the code to.
2195 SIZE is an int: how many units of temporary storage to deallocate. */
2197 void
2198 function_epilogue (file, size)
2199 FILE *file;
2200 int size;
2202 return;
2205 /* Restore function stack, frame, and registers. */
2207 void
2208 ix86_expand_epilogue ()
2210 ix86_epilogue (1);
2213 static void
2214 ix86_epilogue (do_rtl)
2215 int do_rtl;
2217 register int regno;
2218 register int nregs, limit;
2219 int offset;
2220 rtx xops[3];
2221 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2222 || current_function_uses_const_pool);
2223 long tsize = get_frame_size ();
2225 /* Compute the number of registers to pop */
2227 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2229 nregs = 0;
2231 for (regno = limit - 1; regno >= 0; regno--)
2232 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2233 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2234 nregs++;
2236 /* sp is often unreliable so we must go off the frame pointer.
2238 In reality, we may not care if sp is unreliable, because we can restore
2239 the register relative to the frame pointer. In theory, since each move
2240 is the same speed as a pop, and we don't need the leal, this is faster.
2241 For now restore multiple registers the old way. */
2243 offset = - tsize - (nregs * UNITS_PER_WORD);
2245 xops[2] = stack_pointer_rtx;
2247 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2248 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2249 moved before any instruction which implicitly uses the got. This
2250 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2252 Alternatively, this could be fixed by making the dependence on the
2253 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2255 if (flag_pic || profile_flag || profile_block_flag)
2256 emit_insn (gen_blockage ());
2258 if (nregs > 1 || ! frame_pointer_needed)
2260 if (frame_pointer_needed)
2262 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2263 if (do_rtl)
2264 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2265 else
2266 output_asm_insn (AS2 (lea%L2,%0,%2), xops);
2269 for (regno = 0; regno < limit; regno++)
2270 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2271 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2273 xops[0] = gen_rtx_REG (SImode, regno);
2275 if (do_rtl)
2276 emit_insn (gen_pop (xops[0]));
2277 else
2278 output_asm_insn ("pop%L0 %0", xops);
2282 else
2283 for (regno = 0; regno < limit; regno++)
2284 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2285 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2287 xops[0] = gen_rtx_REG (SImode, regno);
2288 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2290 if (do_rtl)
2291 emit_move_insn (xops[0], xops[1]);
2292 else
2293 output_asm_insn (AS2 (mov%L0,%1,%0), xops);
2295 offset += 4;
2298 if (frame_pointer_needed)
2300 /* If not an i386, mov & pop is faster than "leave". */
2302 if (TARGET_USE_LEAVE)
2304 if (do_rtl)
2305 emit_insn (gen_leave());
2306 else
2307 output_asm_insn ("leave", xops);
2309 else
2311 xops[0] = frame_pointer_rtx;
2312 xops[1] = stack_pointer_rtx;
2314 if (do_rtl)
2316 emit_insn (gen_epilogue_set_stack_ptr());
2317 emit_insn (gen_pop (xops[0]));
2319 else
2321 output_asm_insn (AS2 (mov%L2,%0,%2), xops);
2322 output_asm_insn ("pop%L0 %0", xops);
2327 else if (tsize)
2329 /* If there is no frame pointer, we must still release the frame. */
2330 xops[0] = GEN_INT (tsize);
2332 if (do_rtl)
2333 emit_insn (gen_rtx_SET (VOIDmode, xops[2],
2334 gen_rtx_PLUS (SImode, xops[2], xops[0])));
2335 else
2336 output_asm_insn (AS2 (add%L2,%0,%2), xops);
2339 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2340 if (profile_block_flag == 2)
2342 FUNCTION_BLOCK_PROFILER_EXIT(file);
2344 #endif
2346 if (current_function_pops_args && current_function_args_size)
2348 xops[1] = GEN_INT (current_function_pops_args);
2350 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2351 asked to pop more, pop return address, do explicit add, and jump
2352 indirectly to the caller. */
2354 if (current_function_pops_args >= 32768)
2356 /* ??? Which register to use here? */
2357 xops[0] = gen_rtx_REG (SImode, 2);
2359 if (do_rtl)
2361 emit_insn (gen_pop (xops[0]));
2362 emit_insn (gen_rtx_SET (VOIDmode, xops[2],
2363 gen_rtx_PLUS (SImode, xops[1],
2364 xops[2])));
2365 emit_jump_insn (xops[0]);
2367 else
2369 output_asm_insn ("pop%L0 %0", xops);
2370 output_asm_insn (AS2 (add%L2,%1,%2), xops);
2371 output_asm_insn ("jmp %*%0", xops);
2374 else
2376 if (do_rtl)
2377 emit_jump_insn (gen_return_pop_internal (xops[1]));
2378 else
2379 output_asm_insn ("ret %1", xops);
2382 else
2384 if (do_rtl)
2385 emit_jump_insn (gen_return_internal ());
2386 else
2387 output_asm_insn ("ret", xops);
2391 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2392 that is a valid memory address for an instruction.
2393 The MODE argument is the machine mode for the MEM expression
2394 that wants to use this address.
2396 On x86, legitimate addresses are:
2397 base movl (base),reg
2398 displacement movl disp,reg
2399 base + displacement movl disp(base),reg
2400 index + base movl (base,index),reg
2401 (index + base) + displacement movl disp(base,index),reg
2402 index*scale movl (,index,scale),reg
2403 index*scale + disp movl disp(,index,scale),reg
2404 index*scale + base movl (base,index,scale),reg
2405 (index*scale + base) + disp movl disp(base,index,scale),reg
2407 In each case, scale can be 1, 2, 4, 8. */
2409 /* This is exactly the same as print_operand_addr, except that
2410 it recognizes addresses instead of printing them.
2412 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2413 convert common non-canonical forms to canonical form so that they will
2414 be recognized. */
2416 #define ADDR_INVALID(msg,insn) \
2417 do { \
2418 if (TARGET_DEBUG_ADDR) \
2420 fprintf (stderr, msg); \
2421 debug_rtx (insn); \
2423 } while (0)
2426 legitimate_address_p (mode, addr, strict)
2427 enum machine_mode mode;
2428 register rtx addr;
2429 int strict;
2431 rtx base = NULL_RTX;
2432 rtx indx = NULL_RTX;
2433 rtx scale = NULL_RTX;
2434 rtx disp = NULL_RTX;
2436 if (TARGET_DEBUG_ADDR)
2438 fprintf (stderr,
2439 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2440 GET_MODE_NAME (mode), strict);
2442 debug_rtx (addr);
2445 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2446 base = addr;
2448 else if (GET_CODE (addr) == PLUS)
2450 rtx op0 = XEXP (addr, 0);
2451 rtx op1 = XEXP (addr, 1);
2452 enum rtx_code code0 = GET_CODE (op0);
2453 enum rtx_code code1 = GET_CODE (op1);
2455 if (code0 == REG || code0 == SUBREG)
2457 if (code1 == REG || code1 == SUBREG)
2459 indx = op0; /* index + base */
2460 base = op1;
2463 else
2465 base = op0; /* base + displacement */
2466 disp = op1;
2470 else if (code0 == MULT)
2472 indx = XEXP (op0, 0);
2473 scale = XEXP (op0, 1);
2475 if (code1 == REG || code1 == SUBREG)
2476 base = op1; /* index*scale + base */
2478 else
2479 disp = op1; /* index*scale + disp */
2482 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2484 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2485 scale = XEXP (XEXP (op0, 0), 1);
2486 base = XEXP (op0, 1);
2487 disp = op1;
2490 else if (code0 == PLUS)
2492 indx = XEXP (op0, 0); /* index + base + disp */
2493 base = XEXP (op0, 1);
2494 disp = op1;
2497 else
2499 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2500 return FALSE;
2504 else if (GET_CODE (addr) == MULT)
2506 indx = XEXP (addr, 0); /* index*scale */
2507 scale = XEXP (addr, 1);
2510 else
2511 disp = addr; /* displacement */
2513 /* Allow arg pointer and stack pointer as index if there is not scaling */
2514 if (base && indx && !scale
2515 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2517 rtx tmp = base;
2518 base = indx;
2519 indx = tmp;
2522 /* Validate base register:
2524 Don't allow SUBREG's here, it can lead to spill failures when the base
2525 is one word out of a two word structure, which is represented internally
2526 as a DImode int. */
2528 if (base)
2530 if (GET_CODE (base) != REG)
2532 ADDR_INVALID ("Base is not a register.\n", base);
2533 return FALSE;
2536 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
2537 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
2539 ADDR_INVALID ("Base is not valid.\n", base);
2540 return FALSE;
2544 /* Validate index register:
2546 Don't allow SUBREG's here, it can lead to spill failures when the index
2547 is one word out of a two word structure, which is represented internally
2548 as a DImode int. */
2549 if (indx)
2551 if (GET_CODE (indx) != REG)
2553 ADDR_INVALID ("Index is not a register.\n", indx);
2554 return FALSE;
2557 if ((strict && ! REG_OK_FOR_INDEX_STRICT_P (indx))
2558 || (! strict && ! REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2560 ADDR_INVALID ("Index is not valid.\n", indx);
2561 return FALSE;
2564 else if (scale)
2565 abort (); /* scale w/o index invalid */
2567 /* Validate scale factor: */
2568 if (scale)
2570 HOST_WIDE_INT value;
2572 if (GET_CODE (scale) != CONST_INT)
2574 ADDR_INVALID ("Scale is not valid.\n", scale);
2575 return FALSE;
2578 value = INTVAL (scale);
2579 if (value != 1 && value != 2 && value != 4 && value != 8)
2581 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2582 return FALSE;
2586 /* Validate displacement
2587 Constant pool addresses must be handled special. They are
2588 considered legitimate addresses, but only if not used with regs.
2589 When printed, the output routines know to print the reference with the
2590 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2591 if (disp)
2593 if (GET_CODE (disp) == SYMBOL_REF
2594 && CONSTANT_POOL_ADDRESS_P (disp)
2595 && base == 0
2596 && indx == 0)
2599 else if (!CONSTANT_ADDRESS_P (disp))
2601 ADDR_INVALID ("Displacement is not valid.\n", disp);
2602 return FALSE;
2605 else if (GET_CODE (disp) == CONST_DOUBLE)
2607 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2608 return FALSE;
2611 else if (flag_pic && SYMBOLIC_CONST (disp)
2612 && base != pic_offset_table_rtx
2613 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2615 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2616 return FALSE;
2619 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2620 && (base != NULL_RTX || indx != NULL_RTX))
2622 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2623 disp);
2624 return FALSE;
2628 if (TARGET_DEBUG_ADDR)
2629 fprintf (stderr, "Address is valid.\n");
2631 /* Everything looks valid, return true */
2632 return TRUE;
2635 /* Return a legitimate reference for ORIG (an address) using the
2636 register REG. If REG is 0, a new pseudo is generated.
2638 There are three types of references that must be handled:
2640 1. Global data references must load the address from the GOT, via
2641 the PIC reg. An insn is emitted to do this load, and the reg is
2642 returned.
2644 2. Static data references must compute the address as an offset
2645 from the GOT, whose base is in the PIC reg. An insn is emitted to
2646 compute the address into a reg, and the reg is returned. Static
2647 data objects have SYMBOL_REF_FLAG set to differentiate them from
2648 global data objects.
2650 3. Constant pool addresses must be handled special. They are
2651 considered legitimate addresses, but only if not used with regs.
2652 When printed, the output routines know to print the reference with the
2653 PIC reg, even though the PIC reg doesn't appear in the RTL.
2655 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2656 reg also appears in the address (except for constant pool references,
2657 noted above).
2659 "switch" statements also require special handling when generating
2660 PIC code. See comments by the `casesi' insn in i386.md for details. */
2663 legitimize_pic_address (orig, reg)
2664 rtx orig;
2665 rtx reg;
2667 rtx addr = orig;
2668 rtx new = orig;
2670 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2672 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2673 reg = new = orig;
2674 else
2676 if (reg == 0)
2677 reg = gen_reg_rtx (Pmode);
2679 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2680 || GET_CODE (addr) == LABEL_REF)
2681 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, orig);
2682 else
2683 new = gen_rtx_MEM (Pmode,
2684 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2685 orig));
2687 emit_move_insn (reg, new);
2689 current_function_uses_pic_offset_table = 1;
2690 return reg;
2693 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2695 rtx base;
2697 if (GET_CODE (addr) == CONST)
2699 addr = XEXP (addr, 0);
2700 if (GET_CODE (addr) != PLUS)
2701 abort ();
2704 if (XEXP (addr, 0) == pic_offset_table_rtx)
2705 return orig;
2707 if (reg == 0)
2708 reg = gen_reg_rtx (Pmode);
2710 base = legitimize_pic_address (XEXP (addr, 0), reg);
2711 addr = legitimize_pic_address (XEXP (addr, 1),
2712 base == reg ? NULL_RTX : reg);
2714 if (GET_CODE (addr) == CONST_INT)
2715 return plus_constant (base, INTVAL (addr));
2717 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2719 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
2720 addr = XEXP (addr, 1);
2723 return gen_rtx_PLUS (Pmode, base, addr);
2725 return new;
2728 /* Emit insns to move operands[1] into operands[0]. */
2730 void
2731 emit_pic_move (operands, mode)
2732 rtx *operands;
2733 enum machine_mode mode;
2735 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2737 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2738 operands[1] = force_reg (SImode, operands[1]);
2739 else
2740 operands[1] = legitimize_pic_address (operands[1], temp);
2743 /* Try machine-dependent ways of modifying an illegitimate address
2744 to be legitimate. If we find one, return the new, valid address.
2745 This macro is used in only one place: `memory_address' in explow.c.
2747 OLDX is the address as it was before break_out_memory_refs was called.
2748 In some cases it is useful to look at this to decide what needs to be done.
2750 MODE and WIN are passed so that this macro can use
2751 GO_IF_LEGITIMATE_ADDRESS.
2753 It is always safe for this macro to do nothing. It exists to recognize
2754 opportunities to optimize the output.
2756 For the 80386, we handle X+REG by loading X into a register R and
2757 using R+REG. R will go in a general reg and indexing will be used.
2758 However, if REG is a broken-out memory address or multiplication,
2759 nothing needs to be done because REG can certainly go in a general reg.
2761 When -fpic is used, special handling is needed for symbolic references.
2762 See comments by legitimize_pic_address in i386.c for details. */
2765 legitimize_address (x, oldx, mode)
2766 register rtx x;
2767 register rtx oldx;
2768 enum machine_mode mode;
2770 int changed = 0;
2771 unsigned log;
2773 if (TARGET_DEBUG_ADDR)
2775 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2776 GET_MODE_NAME (mode));
2777 debug_rtx (x);
2780 if (flag_pic && SYMBOLIC_CONST (x))
2781 return legitimize_pic_address (x, 0);
2783 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2784 if (GET_CODE (x) == ASHIFT
2785 && GET_CODE (XEXP (x, 1)) == CONST_INT
2786 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2788 changed = 1;
2789 x = gen_rtx_MULT (Pmode, force_reg (Pmode, XEXP (x, 0)),
2790 GEN_INT (1 << log));
2793 if (GET_CODE (x) == PLUS)
2795 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2797 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2798 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2799 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2801 changed = 1;
2802 XEXP (x, 0) = gen_rtx_MULT (Pmode,
2803 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2804 GEN_INT (1 << log));
2807 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2808 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2809 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2811 changed = 1;
2812 XEXP (x, 1) = gen_rtx_MULT (Pmode,
2813 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2814 GEN_INT (1 << log));
2817 /* Put multiply first if it isn't already. */
2818 if (GET_CODE (XEXP (x, 1)) == MULT)
2820 rtx tmp = XEXP (x, 0);
2821 XEXP (x, 0) = XEXP (x, 1);
2822 XEXP (x, 1) = tmp;
2823 changed = 1;
2826 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2827 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2828 created by virtual register instantiation, register elimination, and
2829 similar optimizations. */
2830 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2832 changed = 1;
2833 x = gen_rtx_PLUS (Pmode,
2834 gen_rtx_PLUS (Pmode, XEXP (x, 0),
2835 XEXP (XEXP (x, 1), 0)),
2836 XEXP (XEXP (x, 1), 1));
2839 /* Canonicalize
2840 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2841 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2842 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2843 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2844 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2845 && CONSTANT_P (XEXP (x, 1)))
2847 rtx constant, other;
2849 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2851 constant = XEXP (x, 1);
2852 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2854 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2856 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2857 other = XEXP (x, 1);
2859 else
2860 constant = 0;
2862 if (constant)
2864 changed = 1;
2865 x = gen_rtx_PLUS (Pmode,
2866 gen_rtx_PLUS (Pmode, XEXP (XEXP (x, 0), 0),
2867 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2868 plus_constant (other, INTVAL (constant)));
2872 if (changed && legitimate_address_p (mode, x, FALSE))
2873 return x;
2875 if (GET_CODE (XEXP (x, 0)) == MULT)
2877 changed = 1;
2878 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2881 if (GET_CODE (XEXP (x, 1)) == MULT)
2883 changed = 1;
2884 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2887 if (changed
2888 && GET_CODE (XEXP (x, 1)) == REG
2889 && GET_CODE (XEXP (x, 0)) == REG)
2890 return x;
2892 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2894 changed = 1;
2895 x = legitimize_pic_address (x, 0);
2898 if (changed && legitimate_address_p (mode, x, FALSE))
2899 return x;
2901 if (GET_CODE (XEXP (x, 0)) == REG)
2903 register rtx temp = gen_reg_rtx (Pmode);
2904 register rtx val = force_operand (XEXP (x, 1), temp);
2905 if (val != temp)
2906 emit_move_insn (temp, val);
2908 XEXP (x, 1) = temp;
2909 return x;
2912 else if (GET_CODE (XEXP (x, 1)) == REG)
2914 register rtx temp = gen_reg_rtx (Pmode);
2915 register rtx val = force_operand (XEXP (x, 0), temp);
2916 if (val != temp)
2917 emit_move_insn (temp, val);
2919 XEXP (x, 0) = temp;
2920 return x;
2924 return x;
2927 /* Print an integer constant expression in assembler syntax. Addition
2928 and subtraction are the only arithmetic that may appear in these
2929 expressions. FILE is the stdio stream to write to, X is the rtx, and
2930 CODE is the operand print code from the output string. */
2932 static void
2933 output_pic_addr_const (file, x, code)
2934 FILE *file;
2935 rtx x;
2936 int code;
2938 char buf[256];
2940 switch (GET_CODE (x))
2942 case PC:
2943 if (flag_pic)
2944 putc ('.', file);
2945 else
2946 abort ();
2947 break;
2949 case SYMBOL_REF:
2950 case LABEL_REF:
2951 if (GET_CODE (x) == SYMBOL_REF)
2952 assemble_name (file, XSTR (x, 0));
2953 else
2955 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2956 CODE_LABEL_NUMBER (XEXP (x, 0)));
2957 assemble_name (asm_out_file, buf);
2960 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2961 fprintf (file, "@GOTOFF(%%ebx)");
2962 else if (code == 'P')
2963 fprintf (file, "@PLT");
2964 else if (GET_CODE (x) == LABEL_REF)
2965 fprintf (file, "@GOTOFF");
2966 else if (! SYMBOL_REF_FLAG (x))
2967 fprintf (file, "@GOT");
2968 else
2969 fprintf (file, "@GOTOFF");
2971 break;
2973 case CODE_LABEL:
2974 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2975 assemble_name (asm_out_file, buf);
2976 break;
2978 case CONST_INT:
2979 fprintf (file, "%d", INTVAL (x));
2980 break;
2982 case CONST:
2983 /* This used to output parentheses around the expression,
2984 but that does not work on the 386 (either ATT or BSD assembler). */
2985 output_pic_addr_const (file, XEXP (x, 0), code);
2986 break;
2988 case CONST_DOUBLE:
2989 if (GET_MODE (x) == VOIDmode)
2991 /* We can use %d if the number is <32 bits and positive. */
2992 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2993 fprintf (file, "0x%x%08x",
2994 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2995 else
2996 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2998 else
2999 /* We can't handle floating point constants;
3000 PRINT_OPERAND must handle them. */
3001 output_operand_lossage ("floating constant misused");
3002 break;
3004 case PLUS:
3005 /* Some assemblers need integer constants to appear first. */
3006 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3008 output_pic_addr_const (file, XEXP (x, 0), code);
3009 if (INTVAL (XEXP (x, 1)) >= 0)
3010 fprintf (file, "+");
3011 output_pic_addr_const (file, XEXP (x, 1), code);
3013 else
3015 output_pic_addr_const (file, XEXP (x, 1), code);
3016 if (INTVAL (XEXP (x, 0)) >= 0)
3017 fprintf (file, "+");
3018 output_pic_addr_const (file, XEXP (x, 0), code);
3020 break;
3022 case MINUS:
3023 output_pic_addr_const (file, XEXP (x, 0), code);
3024 fprintf (file, "-");
3025 output_pic_addr_const (file, XEXP (x, 1), code);
3026 break;
3028 default:
3029 output_operand_lossage ("invalid expression as operand");
3033 /* Append the correct conditional move suffix which corresponds to CODE. */
3035 static void
3036 put_condition_code (code, reverse_cc, mode, file)
3037 enum rtx_code code;
3038 int reverse_cc;
3039 enum mode_class mode;
3040 FILE * file;
3042 int ieee = (TARGET_IEEE_FP && (cc_prev_status.flags & CC_IN_80387)
3043 && ! (cc_prev_status.flags & CC_FCOMI));
3044 if (reverse_cc && ! ieee)
3045 code = reverse_condition (code);
3047 if (mode == MODE_INT)
3048 switch (code)
3050 case NE:
3051 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3052 fputs ("b", file);
3053 else
3054 fputs ("ne", file);
3055 return;
3057 case EQ:
3058 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3059 fputs ("ae", file);
3060 else
3061 fputs ("e", file);
3062 return;
3064 case GE:
3065 fputs ("ge", file);
3066 return;
3068 case GT:
3069 fputs ("g", file);
3070 return;
3072 case LE:
3073 fputs ("le", file);
3074 return;
3076 case LT:
3077 fputs ("l", file);
3078 return;
3080 case GEU:
3081 fputs ("ae", file);
3082 return;
3084 case GTU:
3085 fputs ("a", file);
3086 return;
3088 case LEU:
3089 fputs ("be", file);
3090 return;
3092 case LTU:
3093 fputs ("b", file);
3094 return;
3096 default:
3097 output_operand_lossage ("Invalid %%C operand");
3100 else if (mode == MODE_FLOAT)
3101 switch (code)
3103 case NE:
3104 fputs (ieee ? (reverse_cc ? "ne" : "e") : "ne", file);
3105 return;
3106 case EQ:
3107 fputs (ieee ? (reverse_cc ? "ne" : "e") : "e", file);
3108 return;
3109 case GE:
3110 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3111 return;
3112 case GT:
3113 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3114 return;
3115 case LE:
3116 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3117 return;
3118 case LT:
3119 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3120 return;
3121 case GEU:
3122 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3123 return;
3124 case GTU:
3125 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3126 return;
3127 case LEU:
3128 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3129 return;
3130 case LTU:
3131 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3132 return;
3133 default:
3134 output_operand_lossage ("Invalid %%C operand");
3138 /* Meaning of CODE:
3139 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3140 C -- print opcode suffix for set/cmov insn.
3141 c -- like C, but print reversed condition
3142 F -- print opcode suffix for fcmov insn.
3143 f -- like C, but print reversed condition
3144 R -- print the prefix for register names.
3145 z -- print the opcode suffix for the size of the current operand.
3146 * -- print a star (in certain assembler syntax)
3147 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3148 c -- don't print special prefixes before constant operands.
3149 J -- print the appropriate jump operand.
3150 s -- print a shift double count, followed by the assemblers argument
3151 delimiter.
3152 b -- print the QImode name of the register for the indicated operand.
3153 %b0 would print %al if operands[0] is reg 0.
3154 w -- likewise, print the HImode name of the register.
3155 k -- likewise, print the SImode name of the register.
3156 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3157 y -- print "st(0)" instead of "st" as a register.
3158 P -- print as a PIC constant */
3160 void
3161 print_operand (file, x, code)
3162 FILE *file;
3163 rtx x;
3164 int code;
3166 if (code)
3168 switch (code)
3170 case '*':
3171 if (USE_STAR)
3172 putc ('*', file);
3173 return;
3175 case 'L':
3176 PUT_OP_SIZE (code, 'l', file);
3177 return;
3179 case 'W':
3180 PUT_OP_SIZE (code, 'w', file);
3181 return;
3183 case 'B':
3184 PUT_OP_SIZE (code, 'b', file);
3185 return;
3187 case 'Q':
3188 PUT_OP_SIZE (code, 'l', file);
3189 return;
3191 case 'S':
3192 PUT_OP_SIZE (code, 's', file);
3193 return;
3195 case 'T':
3196 PUT_OP_SIZE (code, 't', file);
3197 return;
3199 case 'z':
3200 /* 387 opcodes don't get size suffixes if the operands are
3201 registers. */
3203 if (STACK_REG_P (x))
3204 return;
3206 /* this is the size of op from size of operand */
3207 switch (GET_MODE_SIZE (GET_MODE (x)))
3209 case 1:
3210 PUT_OP_SIZE ('B', 'b', file);
3211 return;
3213 case 2:
3214 PUT_OP_SIZE ('W', 'w', file);
3215 return;
3217 case 4:
3218 if (GET_MODE (x) == SFmode)
3220 PUT_OP_SIZE ('S', 's', file);
3221 return;
3223 else
3224 PUT_OP_SIZE ('L', 'l', file);
3225 return;
3227 case 12:
3228 PUT_OP_SIZE ('T', 't', file);
3229 return;
3231 case 8:
3232 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
3234 #ifdef GAS_MNEMONICS
3235 PUT_OP_SIZE ('Q', 'q', file);
3236 return;
3237 #else
3238 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
3239 #endif
3242 PUT_OP_SIZE ('Q', 'l', file);
3243 return;
3246 case 'b':
3247 case 'w':
3248 case 'k':
3249 case 'h':
3250 case 'y':
3251 case 'P':
3252 break;
3254 case 'J':
3255 switch (GET_CODE (x))
3257 /* These conditions are appropriate for testing the result
3258 of an arithmetic operation, not for a compare operation.
3259 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3260 CC_Z_IN_NOT_C false and not floating point. */
3261 case NE: fputs ("jne", file); return;
3262 case EQ: fputs ("je", file); return;
3263 case GE: fputs ("jns", file); return;
3264 case LT: fputs ("js", file); return;
3265 case GEU: fputs ("jmp", file); return;
3266 case GTU: fputs ("jne", file); return;
3267 case LEU: fputs ("je", file); return;
3268 case LTU: fputs ("#branch never", file); return;
3270 /* no matching branches for GT nor LE */
3272 abort ();
3274 case 's':
3275 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
3277 PRINT_OPERAND (file, x, 0);
3278 fputs (AS2C (,) + 1, file);
3281 return;
3283 /* This is used by the conditional move instructions. */
3284 case 'C':
3285 put_condition_code (GET_CODE (x), 0, MODE_INT, file);
3286 return;
3288 /* Like above, but reverse condition */
3289 case 'c':
3290 put_condition_code (GET_CODE (x), 1, MODE_INT, file); return;
3292 case 'F':
3293 put_condition_code (GET_CODE (x), 0, MODE_FLOAT, file);
3294 return;
3296 /* Like above, but reverse condition */
3297 case 'f':
3298 put_condition_code (GET_CODE (x), 1, MODE_FLOAT, file);
3299 return;
3301 default:
3303 char str[50];
3305 sprintf (str, "invalid operand code `%c'", code);
3306 output_operand_lossage (str);
3311 if (GET_CODE (x) == REG)
3313 PRINT_REG (x, code, file);
3316 else if (GET_CODE (x) == MEM)
3318 PRINT_PTR (x, file);
3319 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3321 if (flag_pic)
3322 output_pic_addr_const (file, XEXP (x, 0), code);
3323 else
3324 output_addr_const (file, XEXP (x, 0));
3326 else
3327 output_address (XEXP (x, 0));
3330 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3332 REAL_VALUE_TYPE r;
3333 long l;
3335 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3336 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3337 PRINT_IMMED_PREFIX (file);
3338 fprintf (file, "0x%x", l);
3341 /* These float cases don't actually occur as immediate operands. */
3342 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3344 REAL_VALUE_TYPE r;
3345 char dstr[30];
3347 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3348 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3349 fprintf (file, "%s", dstr);
3352 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3354 REAL_VALUE_TYPE r;
3355 char dstr[30];
3357 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3358 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3359 fprintf (file, "%s", dstr);
3361 else
3363 if (code != 'P')
3365 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3366 PRINT_IMMED_PREFIX (file);
3367 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3368 || GET_CODE (x) == LABEL_REF)
3369 PRINT_OFFSET_PREFIX (file);
3371 if (flag_pic)
3372 output_pic_addr_const (file, x, code);
3373 else
3374 output_addr_const (file, x);
3378 /* Print a memory operand whose address is ADDR. */
3380 void
3381 print_operand_address (file, addr)
3382 FILE *file;
3383 register rtx addr;
3385 register rtx reg1, reg2, breg, ireg;
3386 rtx offset;
3388 switch (GET_CODE (addr))
3390 case REG:
3391 ADDR_BEG (file);
3392 fprintf (file, "%se", RP);
3393 fputs (hi_reg_name[REGNO (addr)], file);
3394 ADDR_END (file);
3395 break;
3397 case PLUS:
3398 reg1 = 0;
3399 reg2 = 0;
3400 ireg = 0;
3401 breg = 0;
3402 offset = 0;
3403 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3405 offset = XEXP (addr, 0);
3406 addr = XEXP (addr, 1);
3408 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3410 offset = XEXP (addr, 1);
3411 addr = XEXP (addr, 0);
3414 if (GET_CODE (addr) != PLUS)
3416 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3417 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3418 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3419 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3420 else if (GET_CODE (XEXP (addr, 0)) == REG)
3421 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3422 else if (GET_CODE (XEXP (addr, 1)) == REG)
3423 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3425 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3427 if (reg1 == 0)
3428 reg1 = addr;
3429 else
3430 reg2 = addr;
3432 addr = 0;
3435 if (offset != 0)
3437 if (addr != 0)
3438 abort ();
3439 addr = offset;
3442 if ((reg1 && GET_CODE (reg1) == MULT)
3443 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3445 breg = reg2;
3446 ireg = reg1;
3448 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3450 breg = reg1;
3451 ireg = reg2;
3454 if (ireg != 0 || breg != 0)
3456 int scale = 1;
3458 if (addr != 0)
3460 if (flag_pic)
3461 output_pic_addr_const (file, addr, 0);
3462 else if (GET_CODE (addr) == LABEL_REF)
3463 output_asm_label (addr);
3464 else
3465 output_addr_const (file, addr);
3468 if (ireg != 0 && GET_CODE (ireg) == MULT)
3470 scale = INTVAL (XEXP (ireg, 1));
3471 ireg = XEXP (ireg, 0);
3474 /* The stack pointer can only appear as a base register,
3475 never an index register, so exchange the regs if it is wrong. */
3477 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3479 rtx tmp;
3481 tmp = breg;
3482 breg = ireg;
3483 ireg = tmp;
3486 /* output breg+ireg*scale */
3487 PRINT_B_I_S (breg, ireg, scale, file);
3488 break;
3491 case MULT:
3493 int scale;
3495 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3497 scale = INTVAL (XEXP (addr, 0));
3498 ireg = XEXP (addr, 1);
3500 else
3502 scale = INTVAL (XEXP (addr, 1));
3503 ireg = XEXP (addr, 0);
3506 output_addr_const (file, const0_rtx);
3507 PRINT_B_I_S (NULL_RTX, ireg, scale, file);
3509 break;
3511 default:
3512 if (GET_CODE (addr) == CONST_INT
3513 && INTVAL (addr) < 0x8000
3514 && INTVAL (addr) >= -0x8000)
3515 fprintf (file, "%d", INTVAL (addr));
3516 else
3518 if (flag_pic)
3519 output_pic_addr_const (file, addr, 0);
3520 else
3521 output_addr_const (file, addr);
3526 /* Set the cc_status for the results of an insn whose pattern is EXP.
3527 On the 80386, we assume that only test and compare insns, as well
3528 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3529 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3530 Also, we assume that jumps, moves and sCOND don't affect the condition
3531 codes. All else clobbers the condition codes, by assumption.
3533 We assume that ALL integer add, minus, etc. instructions effect the
3534 condition codes. This MUST be consistent with i386.md.
3536 We don't record any float test or compare - the redundant test &
3537 compare check in final.c does not handle stack-like regs correctly. */
3539 void
3540 notice_update_cc (exp)
3541 rtx exp;
3543 if (GET_CODE (exp) == SET)
3545 /* Jumps do not alter the cc's. */
3546 if (SET_DEST (exp) == pc_rtx)
3547 return;
3549 /* Moving register or memory into a register:
3550 it doesn't alter the cc's, but it might invalidate
3551 the RTX's which we remember the cc's came from.
3552 (Note that moving a constant 0 or 1 MAY set the cc's). */
3553 if (REG_P (SET_DEST (exp))
3554 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3555 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3557 if (cc_status.value1
3558 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3559 cc_status.value1 = 0;
3561 if (cc_status.value2
3562 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3563 cc_status.value2 = 0;
3565 return;
3568 /* Moving register into memory doesn't alter the cc's.
3569 It may invalidate the RTX's which we remember the cc's came from. */
3570 if (GET_CODE (SET_DEST (exp)) == MEM
3571 && (REG_P (SET_SRC (exp))
3572 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3574 if (cc_status.value1
3575 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3576 cc_status.value1 = 0;
3577 if (cc_status.value2
3578 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3579 cc_status.value2 = 0;
3581 return;
3584 /* Function calls clobber the cc's. */
3585 else if (GET_CODE (SET_SRC (exp)) == CALL)
3587 CC_STATUS_INIT;
3588 return;
3591 /* Tests and compares set the cc's in predictable ways. */
3592 else if (SET_DEST (exp) == cc0_rtx)
3594 CC_STATUS_INIT;
3595 cc_status.value1 = SET_SRC (exp);
3596 return;
3599 /* Certain instructions effect the condition codes. */
3600 else if (GET_MODE (SET_SRC (exp)) == SImode
3601 || GET_MODE (SET_SRC (exp)) == HImode
3602 || GET_MODE (SET_SRC (exp)) == QImode)
3603 switch (GET_CODE (SET_SRC (exp)))
3605 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3606 /* Shifts on the 386 don't set the condition codes if the
3607 shift count is zero. */
3608 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3610 CC_STATUS_INIT;
3611 break;
3614 /* We assume that the CONST_INT is non-zero (this rtx would
3615 have been deleted if it were zero. */
3617 case PLUS: case MINUS: case NEG:
3618 case AND: case IOR: case XOR:
3619 cc_status.flags = CC_NO_OVERFLOW;
3620 cc_status.value1 = SET_SRC (exp);
3621 cc_status.value2 = SET_DEST (exp);
3622 break;
3624 default:
3625 CC_STATUS_INIT;
3627 else
3629 CC_STATUS_INIT;
3632 else if (GET_CODE (exp) == PARALLEL
3633 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3635 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3636 return;
3637 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3640 CC_STATUS_INIT;
3641 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3643 cc_status.flags |= CC_IN_80387;
3644 if (TARGET_CMOVE && stack_regs_mentioned_p
3645 (XEXP (SET_SRC (XVECEXP (exp, 0, 0)), 1)))
3646 cc_status.flags |= CC_FCOMI;
3648 else
3649 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3650 return;
3653 CC_STATUS_INIT;
3655 else
3657 CC_STATUS_INIT;
3661 /* Split one or more DImode RTL references into pairs of SImode
3662 references. The RTL can be REG, offsettable MEM, integer constant, or
3663 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3664 split and "num" is its length. lo_half and hi_half are output arrays
3665 that parallel "operands". */
3667 void
3668 split_di (operands, num, lo_half, hi_half)
3669 rtx operands[];
3670 int num;
3671 rtx lo_half[], hi_half[];
3673 while (num--)
3675 if (GET_CODE (operands[num]) == REG)
3677 lo_half[num] = gen_rtx_REG (SImode, REGNO (operands[num]));
3678 hi_half[num] = gen_rtx_REG (SImode, REGNO (operands[num]) + 1);
3680 else if (CONSTANT_P (operands[num]))
3681 split_double (operands[num], &lo_half[num], &hi_half[num]);
3682 else if (offsettable_memref_p (operands[num]))
3684 lo_half[num] = operands[num];
3685 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3687 else
3688 abort();
3692 /* Return 1 if this is a valid binary operation on a 387.
3693 OP is the expression matched, and MODE is its mode. */
3696 binary_387_op (op, mode)
3697 register rtx op;
3698 enum machine_mode mode;
3700 if (mode != VOIDmode && mode != GET_MODE (op))
3701 return 0;
3703 switch (GET_CODE (op))
3705 case PLUS:
3706 case MINUS:
3707 case MULT:
3708 case DIV:
3709 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3711 default:
3712 return 0;
3716 /* Return 1 if this is a valid shift or rotate operation on a 386.
3717 OP is the expression matched, and MODE is its mode. */
3720 shift_op (op, mode)
3721 register rtx op;
3722 enum machine_mode mode;
3724 rtx operand = XEXP (op, 0);
3726 if (mode != VOIDmode && mode != GET_MODE (op))
3727 return 0;
3729 if (GET_MODE (operand) != GET_MODE (op)
3730 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3731 return 0;
3733 return (GET_CODE (op) == ASHIFT
3734 || GET_CODE (op) == ASHIFTRT
3735 || GET_CODE (op) == LSHIFTRT
3736 || GET_CODE (op) == ROTATE
3737 || GET_CODE (op) == ROTATERT);
3740 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3741 MODE is not used. */
3744 VOIDmode_compare_op (op, mode)
3745 register rtx op;
3746 enum machine_mode mode;
3748 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3751 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3752 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3753 is the expression of the binary operation. The output may either be
3754 emitted here, or returned to the caller, like all output_* functions.
3756 There is no guarantee that the operands are the same mode, as they
3757 might be within FLOAT or FLOAT_EXTEND expressions. */
3759 char *
3760 output_387_binary_op (insn, operands)
3761 rtx insn;
3762 rtx *operands;
3764 rtx temp;
3765 char *base_op;
3766 static char buf[100];
3768 switch (GET_CODE (operands[3]))
3770 case PLUS:
3771 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3772 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3773 base_op = "fiadd";
3774 else
3775 base_op = "fadd";
3776 break;
3778 case MINUS:
3779 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3780 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3781 base_op = "fisub";
3782 else
3783 base_op = "fsub";
3784 break;
3786 case MULT:
3787 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3788 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3789 base_op = "fimul";
3790 else
3791 base_op = "fmul";
3792 break;
3794 case DIV:
3795 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3796 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3797 base_op = "fidiv";
3798 else
3799 base_op = "fdiv";
3800 break;
3802 default:
3803 abort ();
3806 strcpy (buf, base_op);
3808 switch (GET_CODE (operands[3]))
3810 case MULT:
3811 case PLUS:
3812 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3814 temp = operands[2];
3815 operands[2] = operands[1];
3816 operands[1] = temp;
3819 if (GET_CODE (operands[2]) == MEM)
3820 return strcat (buf, AS1 (%z2,%2));
3822 if (NON_STACK_REG_P (operands[1]))
3824 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3825 return "";
3828 else if (NON_STACK_REG_P (operands[2]))
3830 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3831 return "";
3834 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3835 if (STACK_TOP_P (operands[0]))
3836 return strcat (buf, AS2 (p,%0,%2));
3837 else
3838 return strcat (buf, AS2 (p,%2,%0));
3840 if (STACK_TOP_P (operands[0]))
3841 return strcat (buf, AS2C (%y2,%0));
3842 else
3843 return strcat (buf, AS2C (%2,%0));
3845 case MINUS:
3846 case DIV:
3847 if (GET_CODE (operands[1]) == MEM)
3848 return strcat (buf, AS1 (r%z1,%1));
3850 if (GET_CODE (operands[2]) == MEM)
3851 return strcat (buf, AS1 (%z2,%2));
3853 if (NON_STACK_REG_P (operands[1]))
3855 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3856 return "";
3859 else if (NON_STACK_REG_P (operands[2]))
3861 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3862 return "";
3865 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3866 abort ();
3868 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3869 if (STACK_TOP_P (operands[0]))
3870 return strcat (buf, AS2 (p,%0,%2));
3871 else
3872 return strcat (buf, AS2 (rp,%2,%0));
3874 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3875 if (STACK_TOP_P (operands[0]))
3876 return strcat (buf, AS2 (rp,%0,%1));
3877 else
3878 return strcat (buf, AS2 (p,%1,%0));
3880 if (STACK_TOP_P (operands[0]))
3882 if (STACK_TOP_P (operands[1]))
3883 return strcat (buf, AS2C (%y2,%0));
3884 else
3885 return strcat (buf, AS2 (r,%y1,%0));
3887 else if (STACK_TOP_P (operands[1]))
3888 return strcat (buf, AS2C (%1,%0));
3889 else
3890 return strcat (buf, AS2 (r,%2,%0));
3892 default:
3893 abort ();
3897 /* Output code for INSN to convert a float to a signed int. OPERANDS
3898 are the insn operands. The output may be SFmode or DFmode and the
3899 input operand may be SImode or DImode. As a special case, make sure
3900 that the 387 stack top dies if the output mode is DImode, because the
3901 hardware requires this. */
3903 char *
3904 output_fix_trunc (insn, operands)
3905 rtx insn;
3906 rtx *operands;
3908 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3909 rtx xops[2];
3911 if (! STACK_TOP_P (operands[1])
3912 || (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3913 abort ();
3915 xops[0] = GEN_INT (12);
3916 xops[1] = operands[4];
3918 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3919 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3920 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3921 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3922 output_asm_insn (AS1 (fldc%W3,%3), operands);
3924 if (NON_STACK_REG_P (operands[0]))
3925 output_to_reg (operands[0], stack_top_dies, operands[3]);
3927 else if (GET_CODE (operands[0]) == MEM)
3929 if (stack_top_dies)
3930 output_asm_insn (AS1 (fistp%z0,%0), operands);
3931 else
3932 output_asm_insn (AS1 (fist%z0,%0), operands);
3934 else
3935 abort ();
3937 return AS1 (fldc%W2,%2);
3940 /* Output code for INSN to compare OPERANDS. The two operands might
3941 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3942 expression. If the compare is in mode CCFPEQmode, use an opcode that
3943 will not fault if a qNaN is present. */
3945 char *
3946 output_float_compare (insn, operands)
3947 rtx insn;
3948 rtx *operands;
3950 int stack_top_dies;
3951 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3952 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3953 rtx tmp;
3955 if (TARGET_CMOVE && STACK_REG_P (operands[1]))
3957 cc_status.flags |= CC_FCOMI;
3958 cc_prev_status.flags &= ~CC_TEST_AX;
3961 if (! STACK_TOP_P (operands[0]))
3963 tmp = operands[0];
3964 operands[0] = operands[1];
3965 operands[1] = tmp;
3966 cc_status.flags |= CC_REVERSED;
3969 if (! STACK_TOP_P (operands[0]))
3970 abort ();
3972 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3974 if (STACK_REG_P (operands[1])
3975 && stack_top_dies
3976 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3977 && REGNO (operands[1]) != FIRST_STACK_REG)
3979 /* If both the top of the 387 stack dies, and the other operand
3980 is also a stack register that dies, then this must be a
3981 `fcompp' float compare */
3983 if (unordered_compare)
3985 if (cc_status.flags & CC_FCOMI)
3987 output_asm_insn (AS2 (fucomip,%y1,%0), operands);
3988 output_asm_insn (AS1 (fstp, %y0), operands);
3989 return "";
3991 else
3992 output_asm_insn ("fucompp", operands);
3994 else
3996 if (cc_status.flags & CC_FCOMI)
3998 output_asm_insn (AS2 (fcomip, %y1,%0), operands);
3999 output_asm_insn (AS1 (fstp, %y0), operands);
4000 return "";
4002 else
4003 output_asm_insn ("fcompp", operands);
4006 else
4008 static char buf[100];
4010 /* Decide if this is the integer or float compare opcode, or the
4011 unordered float compare. */
4013 if (unordered_compare)
4014 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
4015 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
4016 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
4017 else
4018 strcpy (buf, "ficom");
4020 /* Modify the opcode if the 387 stack is to be popped. */
4022 if (stack_top_dies)
4023 strcat (buf, "p");
4025 if (NON_STACK_REG_P (operands[1]))
4026 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
4027 else if (cc_status.flags & CC_FCOMI)
4029 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%0)), operands);
4030 return "";
4032 else
4033 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
4036 /* Now retrieve the condition code. */
4038 return output_fp_cc0_set (insn);
4041 /* Output opcodes to transfer the results of FP compare or test INSN
4042 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4043 result of the compare or test is unordered, no comparison operator
4044 succeeds except NE. Return an output template, if any. */
4046 char *
4047 output_fp_cc0_set (insn)
4048 rtx insn;
4050 rtx xops[3];
4051 rtx unordered_label;
4052 rtx next;
4053 enum rtx_code code;
4055 xops[0] = gen_rtx_REG (HImode, 0);
4056 output_asm_insn (AS1 (fnsts%W0,%0), xops);
4058 if (! TARGET_IEEE_FP)
4060 if (!(cc_status.flags & CC_REVERSED))
4062 next = next_cc0_user (insn);
4064 if (GET_CODE (next) == JUMP_INSN
4065 && GET_CODE (PATTERN (next)) == SET
4066 && SET_DEST (PATTERN (next)) == pc_rtx
4067 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4068 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4069 else if (GET_CODE (PATTERN (next)) == SET)
4070 code = GET_CODE (SET_SRC (PATTERN (next)));
4071 else
4072 return "sahf";
4074 if (code == GT || code == LT || code == EQ || code == NE
4075 || code == LE || code == GE)
4077 /* We will test eax directly. */
4078 cc_status.flags |= CC_TEST_AX;
4079 return "";
4083 return "sahf";
4086 next = next_cc0_user (insn);
4087 if (next == NULL_RTX)
4088 abort ();
4090 if (GET_CODE (next) == JUMP_INSN
4091 && GET_CODE (PATTERN (next)) == SET
4092 && SET_DEST (PATTERN (next)) == pc_rtx
4093 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4094 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4095 else if (GET_CODE (PATTERN (next)) == SET)
4097 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4098 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4099 else
4100 code = GET_CODE (SET_SRC (PATTERN (next)));
4103 else if (GET_CODE (PATTERN (next)) == PARALLEL
4104 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
4106 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
4107 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
4108 else
4109 code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
4111 else
4112 abort ();
4114 xops[0] = gen_rtx_REG (QImode, 0);
4116 switch (code)
4118 case GT:
4119 xops[1] = GEN_INT (0x45);
4120 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4121 /* je label */
4122 break;
4124 case LT:
4125 xops[1] = GEN_INT (0x45);
4126 xops[2] = GEN_INT (0x01);
4127 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4128 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4129 /* je label */
4130 break;
4132 case GE:
4133 xops[1] = GEN_INT (0x05);
4134 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4135 /* je label */
4136 break;
4138 case LE:
4139 xops[1] = GEN_INT (0x45);
4140 xops[2] = GEN_INT (0x40);
4141 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4142 output_asm_insn (AS1 (dec%B0,%h0), xops);
4143 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4144 /* jb label */
4145 break;
4147 case EQ:
4148 xops[1] = GEN_INT (0x45);
4149 xops[2] = GEN_INT (0x40);
4150 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4151 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4152 /* je label */
4153 break;
4155 case NE:
4156 xops[1] = GEN_INT (0x44);
4157 xops[2] = GEN_INT (0x40);
4158 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4159 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
4160 /* jne label */
4161 break;
4163 case GTU:
4164 case LTU:
4165 case GEU:
4166 case LEU:
4167 default:
4168 abort ();
4171 return "";
4174 #define MAX_386_STACK_LOCALS 2
4176 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4178 /* Define the structure for the machine field in struct function. */
4179 struct machine_function
4181 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4184 /* Functions to save and restore i386_stack_locals.
4185 These will be called, via pointer variables,
4186 from push_function_context and pop_function_context. */
4188 void
4189 save_386_machine_status (p)
4190 struct function *p;
4192 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
4193 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
4194 sizeof i386_stack_locals);
4197 void
4198 restore_386_machine_status (p)
4199 struct function *p;
4201 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
4202 sizeof i386_stack_locals);
4203 free (p->machine);
4206 /* Clear stack slot assignments remembered from previous functions.
4207 This is called from INIT_EXPANDERS once before RTL is emitted for each
4208 function. */
4210 void
4211 clear_386_stack_locals ()
4213 enum machine_mode mode;
4214 int n;
4216 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
4217 mode = (enum machine_mode) ((int) mode + 1))
4218 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
4219 i386_stack_locals[(int) mode][n] = NULL_RTX;
4221 /* Arrange to save and restore i386_stack_locals around nested functions. */
4222 save_machine_status = save_386_machine_status;
4223 restore_machine_status = restore_386_machine_status;
4226 /* Return a MEM corresponding to a stack slot with mode MODE.
4227 Allocate a new slot if necessary.
4229 The RTL for a function can have several slots available: N is
4230 which slot to use. */
4233 assign_386_stack_local (mode, n)
4234 enum machine_mode mode;
4235 int n;
4237 if (n < 0 || n >= MAX_386_STACK_LOCALS)
4238 abort ();
4240 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
4241 i386_stack_locals[(int) mode][n]
4242 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
4244 return i386_stack_locals[(int) mode][n];
4247 int is_mul(op,mode)
4248 register rtx op;
4249 enum machine_mode mode;
4251 return (GET_CODE (op) == MULT);
4254 int is_div(op,mode)
4255 register rtx op;
4256 enum machine_mode mode;
4258 return (GET_CODE (op) == DIV);
4261 #ifdef NOTYET
4262 /* Create a new copy of an rtx.
4263 Recursively copies the operands of the rtx,
4264 except for those few rtx codes that are sharable.
4265 Doesn't share CONST */
4268 copy_all_rtx (orig)
4269 register rtx orig;
4271 register rtx copy;
4272 register int i, j;
4273 register RTX_CODE code;
4274 register char *format_ptr;
4276 code = GET_CODE (orig);
4278 switch (code)
4280 case REG:
4281 case QUEUED:
4282 case CONST_INT:
4283 case CONST_DOUBLE:
4284 case SYMBOL_REF:
4285 case CODE_LABEL:
4286 case PC:
4287 case CC0:
4288 case SCRATCH:
4289 /* SCRATCH must be shared because they represent distinct values. */
4290 return orig;
4292 #if 0
4293 case CONST:
4294 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4295 a LABEL_REF, it isn't sharable. */
4296 if (GET_CODE (XEXP (orig, 0)) == PLUS
4297 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4298 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4299 return orig;
4300 break;
4301 #endif
4302 /* A MEM with a constant address is not sharable. The problem is that
4303 the constant address may need to be reloaded. If the mem is shared,
4304 then reloading one copy of this mem will cause all copies to appear
4305 to have been reloaded. */
4308 copy = rtx_alloc (code);
4309 PUT_MODE (copy, GET_MODE (orig));
4310 copy->in_struct = orig->in_struct;
4311 copy->volatil = orig->volatil;
4312 copy->unchanging = orig->unchanging;
4313 copy->integrated = orig->integrated;
4314 /* intel1 */
4315 copy->is_spill_rtx = orig->is_spill_rtx;
4317 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4319 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4321 switch (*format_ptr++)
4323 case 'e':
4324 XEXP (copy, i) = XEXP (orig, i);
4325 if (XEXP (orig, i) != NULL)
4326 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
4327 break;
4329 case '0':
4330 case 'u':
4331 XEXP (copy, i) = XEXP (orig, i);
4332 break;
4334 case 'E':
4335 case 'V':
4336 XVEC (copy, i) = XVEC (orig, i);
4337 if (XVEC (orig, i) != NULL)
4339 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4340 for (j = 0; j < XVECLEN (copy, i); j++)
4341 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4343 break;
4345 case 'w':
4346 XWINT (copy, i) = XWINT (orig, i);
4347 break;
4349 case 'i':
4350 XINT (copy, i) = XINT (orig, i);
4351 break;
4353 case 's':
4354 case 'S':
4355 XSTR (copy, i) = XSTR (orig, i);
4356 break;
4358 default:
4359 abort ();
4362 return copy;
4366 /* Try to rewrite a memory address to make it valid */
4368 void
4369 rewrite_address (mem_rtx)
4370 rtx mem_rtx;
4372 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4373 int scale = 1;
4374 int offset_adjust = 0;
4375 int was_only_offset = 0;
4376 rtx mem_addr = XEXP (mem_rtx, 0);
4377 char *storage = oballoc (0);
4378 int in_struct = 0;
4379 int is_spill_rtx = 0;
4381 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4382 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4384 if (GET_CODE (mem_addr) == PLUS
4385 && GET_CODE (XEXP (mem_addr, 1)) == PLUS
4386 && GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4388 /* This part is utilized by the combiner. */
4389 ret_rtx
4390 = gen_rtx_PLUS (GET_MODE (mem_addr),
4391 gen_rtx_PLUS (GET_MODE (XEXP (mem_addr, 1)),
4392 XEXP (mem_addr, 0),
4393 XEXP (XEXP (mem_addr, 1), 0)),
4394 XEXP (XEXP (mem_addr, 1), 1));
4396 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4398 XEXP (mem_rtx, 0) = ret_rtx;
4399 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4400 return;
4403 obfree (storage);
4406 /* This part is utilized by loop.c.
4407 If the address contains PLUS (reg,const) and this pattern is invalid
4408 in this case - try to rewrite the address to make it valid. */
4409 storage = oballoc (0);
4410 index_rtx = base_rtx = offset_rtx = NULL;
4412 /* Find the base index and offset elements of the memory address. */
4413 if (GET_CODE (mem_addr) == PLUS)
4415 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4417 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4418 base_rtx = XEXP (mem_addr, 1), index_rtx = XEXP (mem_addr, 0);
4419 else
4420 base_rtx = XEXP (mem_addr, 0), offset_rtx = XEXP (mem_addr, 1);
4423 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4425 index_rtx = XEXP (mem_addr, 0);
4426 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4427 base_rtx = XEXP (mem_addr, 1);
4428 else
4429 offset_rtx = XEXP (mem_addr, 1);
4432 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4434 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS
4435 && GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT
4436 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0))
4437 == REG)
4438 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1))
4439 == CONST_INT)
4440 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1))
4441 == CONST_INT)
4442 && GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG
4443 && GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4445 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4446 offset_rtx = XEXP (mem_addr, 1);
4447 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4448 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4450 else
4452 offset_rtx = XEXP (mem_addr, 1);
4453 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4454 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4458 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4460 was_only_offset = 1;
4461 index_rtx = NULL;
4462 base_rtx = NULL;
4463 offset_rtx = XEXP (mem_addr, 1);
4464 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4465 if (offset_adjust == 0)
4467 XEXP (mem_rtx, 0) = offset_rtx;
4468 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4469 return;
4472 else
4474 obfree (storage);
4475 return;
4478 else if (GET_CODE (mem_addr) == MULT)
4479 index_rtx = mem_addr;
4480 else
4482 obfree (storage);
4483 return;
4486 if (index_rtx != 0 && GET_CODE (index_rtx) == MULT)
4488 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4490 obfree (storage);
4491 return;
4494 scale_rtx = XEXP (index_rtx, 1);
4495 scale = INTVAL (scale_rtx);
4496 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4499 /* Now find which of the elements are invalid and try to fix them. */
4500 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4502 offset_adjust = INTVAL (index_rtx) * scale;
4504 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4505 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4506 else if (offset_rtx == 0)
4507 offset_rtx = const0_rtx;
4509 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4510 XEXP (mem_rtx, 0) = offset_rtx;
4511 return;
4514 if (base_rtx && GET_CODE (base_rtx) == PLUS
4515 && GET_CODE (XEXP (base_rtx, 0)) == REG
4516 && GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4518 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4519 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4522 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4524 offset_adjust += INTVAL (base_rtx);
4525 base_rtx = NULL;
4528 if (index_rtx && GET_CODE (index_rtx) == PLUS
4529 && GET_CODE (XEXP (index_rtx, 0)) == REG
4530 && GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4532 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4533 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4536 if (index_rtx)
4538 if (! LEGITIMATE_INDEX_P (index_rtx)
4539 && ! (index_rtx == stack_pointer_rtx && scale == 1
4540 && base_rtx == NULL))
4542 obfree (storage);
4543 return;
4547 if (base_rtx)
4549 if (! LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4551 obfree (storage);
4552 return;
4556 if (offset_adjust != 0)
4558 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4559 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4560 else
4561 offset_rtx = const0_rtx;
4563 if (index_rtx)
4565 if (base_rtx)
4567 if (scale != 1)
4569 ret_rtx = gen_rtx_PLUS (GET_MODE (base_rtx),
4570 gen_rtx_MULT (GET_MODE (index_rtx),
4571 index_rtx, scale_rtx),
4572 base_rtx);
4574 if (GET_CODE (offset_rtx) != CONST_INT
4575 || INTVAL (offset_rtx) != 0)
4576 ret_rtx = gen_rtx_PLUS (GET_MODE (ret_rtx),
4577 ret_rtx, offset_rtx);
4579 else
4581 ret_rtx = gen_rtx_PLUS (GET_MODE (index_rtx),
4582 index_rtx, base_rtx);
4584 if (GET_CODE (offset_rtx) != CONST_INT
4585 || INTVAL (offset_rtx) != 0)
4586 ret_rtx = gen_rtx_PLUS (GET_MODE (ret_rtx),
4587 ret_rtx, offset_rtx);
4590 else
4592 if (scale != 1)
4594 ret_rtx = gen_rtx_MULT (GET_MODE (index_rtx),
4595 index_rtx, scale_rtx);
4597 if (GET_CODE (offset_rtx) != CONST_INT
4598 || INTVAL (offset_rtx) != 0)
4599 ret_rtx = gen_rtx_PLUS (GET_MODE (ret_rtx),
4600 ret_rtx, offset_rtx);
4602 else
4604 if (GET_CODE (offset_rtx) == CONST_INT
4605 && INTVAL (offset_rtx) == 0)
4606 ret_rtx = index_rtx;
4607 else
4608 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4609 index_rtx, offset_rtx);
4613 else
4615 if (base_rtx)
4617 if (GET_CODE (offset_rtx) == CONST_INT
4618 && INTVAL (offset_rtx) == 0)
4619 ret_rtx = base_rtx;
4620 else
4621 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx,
4622 offset_rtx);
4624 else if (was_only_offset)
4625 ret_rtx = offset_rtx;
4626 else
4628 obfree (storage);
4629 return;
4633 XEXP (mem_rtx, 0) = ret_rtx;
4634 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4635 return;
4637 else
4639 obfree (storage);
4640 return;
4643 #endif /* NOTYET */
4645 /* Return 1 if the first insn to set cc before INSN also sets the register
4646 REG_RTX; otherwise return 0. */
4648 last_to_set_cc (reg_rtx, insn)
4649 rtx reg_rtx, insn;
4651 rtx prev_insn = PREV_INSN (insn);
4653 while (prev_insn)
4655 if (GET_CODE (prev_insn) == NOTE)
4658 else if (GET_CODE (prev_insn) == INSN)
4660 if (GET_CODE (PATTERN (prev_insn)) != SET)
4661 return (0);
4663 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4665 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4666 return (1);
4668 return (0);
4671 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4672 return (0);
4675 else
4676 return (0);
4678 prev_insn = PREV_INSN (prev_insn);
4681 return (0);
4685 doesnt_set_condition_code (pat)
4686 rtx pat;
4688 switch (GET_CODE (pat))
4690 case MEM:
4691 case REG:
4692 return 1;
4694 default:
4695 return 0;
4701 sets_condition_code (pat)
4702 rtx pat;
4704 switch (GET_CODE (pat))
4706 case PLUS:
4707 case MINUS:
4708 case AND:
4709 case IOR:
4710 case XOR:
4711 case NOT:
4712 case NEG:
4713 case MULT:
4714 case DIV:
4715 case MOD:
4716 case UDIV:
4717 case UMOD:
4718 return 1;
4720 default:
4721 return (0);
4726 str_immediate_operand (op, mode)
4727 register rtx op;
4728 enum machine_mode mode;
4730 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4731 return 1;
4733 return 0;
4737 is_fp_insn (insn)
4738 rtx insn;
4740 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4741 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4742 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4743 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4744 return 1;
4746 return 0;
4749 /* Return 1 if the mode of the SET_DEST of insn is floating point
4750 and it is not an fld or a move from memory to memory.
4751 Otherwise return 0 */
4754 is_fp_dest (insn)
4755 rtx insn;
4757 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4758 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4759 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4760 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4761 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4762 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4763 && GET_CODE (SET_SRC (insn)) != MEM)
4764 return 1;
4766 return 0;
4769 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
4770 memory and the source is a register. */
4773 is_fp_store (insn)
4774 rtx insn;
4776 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4777 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4778 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4779 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4780 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4781 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4782 return 1;
4784 return 0;
4787 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
4788 or index to reference memory.
4789 otherwise return 0 */
4792 agi_dependent (insn, dep_insn)
4793 rtx insn, dep_insn;
4795 if (GET_CODE (dep_insn) == INSN
4796 && GET_CODE (PATTERN (dep_insn)) == SET
4797 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4798 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn);
4800 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4801 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4802 && push_operand (SET_DEST (PATTERN (dep_insn)),
4803 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4804 return reg_mentioned_in_mem (stack_pointer_rtx, insn);
4806 return 0;
4809 /* Return 1 if reg is used in rtl as a base or index for a memory ref
4810 otherwise return 0. */
4813 reg_mentioned_in_mem (reg, rtl)
4814 rtx reg, rtl;
4816 register char *fmt;
4817 register int i, j;
4818 register enum rtx_code code;
4820 if (rtl == NULL)
4821 return 0;
4823 code = GET_CODE (rtl);
4825 switch (code)
4827 case HIGH:
4828 case CONST_INT:
4829 case CONST:
4830 case CONST_DOUBLE:
4831 case SYMBOL_REF:
4832 case LABEL_REF:
4833 case PC:
4834 case CC0:
4835 case SUBREG:
4836 return 0;
4839 if (code == MEM && reg_mentioned_p (reg, rtl))
4840 return 1;
4842 fmt = GET_RTX_FORMAT (code);
4843 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4845 if (fmt[i] == 'E')
4846 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4847 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4848 return 1;
4850 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4851 return 1;
4854 return 0;
4857 /* Output the appropriate insns for doing strlen if not just doing repnz; scasb
4859 operands[0] = result, initialized with the startaddress
4860 operands[1] = alignment of the address.
4861 operands[2] = scratch register, initialized with the startaddress when
4862 not aligned, otherwise undefined
4864 This is just the body. It needs the initialisations mentioned above and
4865 some address computing at the end. These things are done in i386.md. */
4867 char *
4868 output_strlen_unroll (operands)
4869 rtx operands[];
4871 rtx xops[18];
4873 xops[0] = operands[0]; /* Result */
4874 /* operands[1]; * Alignment */
4875 xops[1] = operands[2]; /* Scratch */
4876 xops[2] = GEN_INT (0);
4877 xops[3] = GEN_INT (2);
4878 xops[4] = GEN_INT (3);
4879 xops[5] = GEN_INT (4);
4880 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4881 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4882 xops[8] = gen_label_rtx (); /* label of main loop */
4884 if (TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4885 xops[9] = gen_label_rtx (); /* pentium optimisation */
4887 xops[10] = gen_label_rtx (); /* end label 2 */
4888 xops[11] = gen_label_rtx (); /* end label 1 */
4889 xops[12] = gen_label_rtx (); /* end label */
4890 /* xops[13] * Temporary used */
4891 xops[14] = GEN_INT (0xff);
4892 xops[15] = GEN_INT (0xff00);
4893 xops[16] = GEN_INT (0xff0000);
4894 xops[17] = GEN_INT (0xff000000);
4896 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
4898 /* Is there a known alignment and is it less than 4? */
4899 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4901 /* Is there a known alignment and is it not 2? */
4902 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4904 xops[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
4905 xops[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
4907 /* Leave just the 3 lower bits.
4908 If this is a q-register, then the high part is used later
4909 therefore use andl rather than andb. */
4910 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4912 /* Is aligned to 4-byte address when zero */
4913 output_asm_insn (AS1 (je,%l8), xops);
4915 /* Side-effect even Parity when %eax == 3 */
4916 output_asm_insn (AS1 (jp,%6), xops);
4918 /* Is it aligned to 2 bytes ? */
4919 if (QI_REG_P (xops[1]))
4920 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4921 else
4922 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4924 output_asm_insn (AS1 (je,%7), xops);
4926 else
4928 /* Since the alignment is 2, we have to check 2 or 0 bytes;
4929 check if is aligned to 4 - byte. */
4930 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4932 /* Is aligned to 4-byte address when zero */
4933 output_asm_insn (AS1 (je,%l8), xops);
4936 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4938 /* Now compare the bytes; compare with the high part of a q-reg
4939 gives shorter code. */
4940 if (QI_REG_P (xops[1]))
4942 /* Compare the first n unaligned byte on a byte per byte basis. */
4943 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4945 /* When zero we reached the end. */
4946 output_asm_insn (AS1 (je,%l12), xops);
4948 /* Increment the address. */
4949 output_asm_insn (AS1 (inc%L0,%0), xops);
4951 /* Not needed with an alignment of 2 */
4952 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4954 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
4955 CODE_LABEL_NUMBER (xops[7]));
4956 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4957 output_asm_insn (AS1 (je,%l12), xops);
4958 output_asm_insn (AS1 (inc%L0,%0), xops);
4960 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
4961 CODE_LABEL_NUMBER (xops[6]));
4964 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4966 else
4968 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4969 output_asm_insn (AS1 (je,%l12), xops);
4970 output_asm_insn (AS1 (inc%L0,%0), xops);
4972 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
4973 CODE_LABEL_NUMBER (xops[7]));
4974 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4975 output_asm_insn (AS1 (je,%l12), xops);
4976 output_asm_insn (AS1 (inc%L0,%0), xops);
4978 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
4979 CODE_LABEL_NUMBER (xops[6]));
4980 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4983 output_asm_insn (AS1 (je,%l12), xops);
4984 output_asm_insn (AS1 (inc%L0,%0), xops);
4987 /* Generate loop to check 4 bytes at a time. It is not a good idea to
4988 align this loop. It gives only huge programs, but does not help to
4989 speed up. */
4990 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4992 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4993 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4995 if (QI_REG_P (xops[1]))
4997 /* On i586 it is faster to combine the hi- and lo- part as
4998 a kind of lookahead. If anding both yields zero, then one
4999 of both *could* be zero, otherwise none of both is zero;
5000 this saves one instruction, on i486 this is slower
5001 tested with P-90, i486DX2-66, AMD486DX2-66 */
5002 if (TARGET_PENTIUM)
5004 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
5005 output_asm_insn (AS1 (jne,%l9), xops);
5008 /* Check first byte. */
5009 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
5010 output_asm_insn (AS1 (je,%l12), xops);
5012 /* Check second byte. */
5013 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
5014 output_asm_insn (AS1 (je,%l11), xops);
5016 if (TARGET_PENTIUM)
5017 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5018 CODE_LABEL_NUMBER (xops[9]));
5021 else
5023 /* Check first byte. */
5024 output_asm_insn (AS2 (test%L1,%14,%1), xops);
5025 output_asm_insn (AS1 (je,%l12), xops);
5027 /* Check second byte. */
5028 output_asm_insn (AS2 (test%L1,%15,%1), xops);
5029 output_asm_insn (AS1 (je,%l11), xops);
5032 /* Check third byte. */
5033 output_asm_insn (AS2 (test%L1,%16,%1), xops);
5034 output_asm_insn (AS1 (je,%l10), xops);
5036 /* Check fourth byte and increment address. */
5037 output_asm_insn (AS2 (add%L0,%5,%0), xops);
5038 output_asm_insn (AS2 (test%L1,%17,%1), xops);
5039 output_asm_insn (AS1 (jne,%l8), xops);
5041 /* Now generate fixups when the compare stops within a 4-byte word. */
5042 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
5044 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
5045 output_asm_insn (AS1 (inc%L0,%0), xops);
5047 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
5048 output_asm_insn (AS1 (inc%L0,%0), xops);
5050 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
5052 return "";