1 /* Subroutines used for code generation on Ubicom IP2022
2 Communications Controller.
3 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
5 Contributed by Red Hat, Inc and Ubicom, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
26 #include "coretypes.h"
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
36 #include "insn-attr.h"
37 #include "insn-addr.h"
49 #include "target-def.h"
50 #include "basic-block.h"
52 /* There are problems with 'frame_pointer_needed'. If we force it
53 on, we either end up not eliminating uses of FP, which results in
54 SPILL register failures or we may end up with calculation errors in
55 the stack offsets. Isolate the decision process into a simple macro. */
56 #define CHAIN_FRAMES (frame_pointer_needed || FRAME_POINTER_REQUIRED)
58 static int ip2k_naked_function_p (tree
);
59 #ifdef IP2K_MD_REORG_PASS
60 static void mdr_resequence_xy_yx (rtx
);
61 static void mdr_pres_replace_and_recurse (rtx
, rtx
, rtx
);
62 static void mdr_propagate_reg_equivs_sequence (rtx
, rtx
, rtx
);
63 static void mdr_propagate_reg_equivs (rtx
);
64 static int track_dp_reload (rtx
, rtx
*, int , int);
65 static void mdr_try_dp_reload_elim (rtx
);
66 static void mdr_try_move_dp_reload (rtx
);
67 static void mdr_try_move_pushes (rtx
);
68 static void mdr_try_propagate_clr_sequence (rtx
, unsigned int);
69 static void mdr_try_propagate_clr (rtx
);
70 static void mdr_try_propagate_move_sequence (rtx
, rtx
, rtx
);
71 static void mdr_try_propagate_move (rtx
);
72 static void mdr_try_remove_redundant_insns (rtx
);
73 static int track_w_reload (rtx
, rtx
*, int , int);
74 static void mdr_try_wreg_elim (rtx
);
75 #endif /* IP2K_MD_REORG_PASS */
76 static void ip2k_reorg (void);
77 static int ip2k_check_can_adjust_stack_ref (rtx
, int);
78 static void ip2k_adjust_stack_ref (rtx
*, int);
79 static int ip2k_xexp_not_uses_reg_for_mem (rtx
, unsigned int);
80 static tree
ip2k_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
81 static tree
ip2k_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
82 static bool ip2k_rtx_costs (rtx
, int, int, int *);
83 static int ip2k_address_cost (rtx
);
84 static void ip2k_init_libfuncs (void);
85 static bool ip2k_return_in_memory (tree
, tree
);
86 static void ip2k_setup_incoming_varargs (CUMULATIVE_ARGS
*, enum machine_mode
,
89 const struct attribute_spec ip2k_attribute_table
[];
92 /* Initialize the GCC target structure. */
93 #undef TARGET_ASM_ALIGNED_HI_OP
94 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
96 #undef TARGET_ASM_FUNCTION_PROLOGUE
97 #define TARGET_ASM_FUNCTION_PROLOGUE function_prologue
99 #undef TARGET_ASM_FUNCTION_EPILOGUE
100 #define TARGET_ASM_FUNCTION_EPILOGUE function_epilogue
102 #undef TARGET_ASM_UNIQUE_SECTION
103 #define TARGET_ASM_UNIQUE_SECTION unique_section
105 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
106 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
108 #undef TARGET_ATTRIBUTE_TABLE
109 #define TARGET_ATTRIBUTE_TABLE ip2k_attribute_table
111 #undef TARGET_RTX_COSTS
112 #define TARGET_RTX_COSTS ip2k_rtx_costs
113 #undef TARGET_ADDRESS_COST
114 #define TARGET_ADDRESS_COST ip2k_address_cost
116 #undef TARGET_MACHINE_DEPENDENT_REORG
117 #define TARGET_MACHINE_DEPENDENT_REORG ip2k_reorg
119 #undef TARGET_INIT_LIBFUNCS
120 #define TARGET_INIT_LIBFUNCS ip2k_init_libfuncs
122 #undef TARGET_RETURN_IN_MEMORY
123 #define TARGET_RETURN_IN_MEMORY ip2k_return_in_memory
125 #undef TARGET_SETUP_INCOMING_VARARGS
126 #define TARGET_SETUP_INCOMING_VARARGS ip2k_setup_incoming_varargs
128 struct gcc_target targetm
= TARGET_INITIALIZER
;
130 /* Prologue/Epilogue size in words. */
131 static int prologue_size
;
132 static int epilogue_size
;
134 /* compare and test instructions for the IP2K are materialized by
135 the conditional branch that uses them. This is because conditional
136 branches are skips over unconditional branches. */
137 rtx ip2k_compare_operands
[3]; /* Additional operands for condition code. */
138 int ip2k_test_flag
; /* Indicates Z, WREG contain condition code
141 /* Some ip2k patterns push a byte onto the stack and then access
142 SP-relative addresses. Since reload doesn't know about these
143 pushes, we must track them internally with a %< (push) or %> (pop)
145 static int ip2k_stack_delta
;
147 /* Track if or how far our ip2k reorganization pass has run. */
148 int ip2k_reorg_in_progress
= 0;
149 int ip2k_reorg_completed
= 0;
150 int ip2k_reorg_split_dimode
= 0;
151 int ip2k_reorg_split_simode
= 0;
152 int ip2k_reorg_split_himode
= 0;
153 int ip2k_reorg_split_qimode
= 0;
154 int ip2k_reorg_merge_qimode
= 0;
156 /* Set up local allocation order. */
159 ip2k_init_local_alloc (int *rao
)
161 static const int alloc_order
[] = REG_ALLOC_ORDER
;
163 memcpy (rao
, alloc_order
, sizeof (alloc_order
));
166 /* Returns the number of bytes of arguments automatically
167 popped when returning from a subroutine call.
168 FUNDECL is the declaration node of the function (as a tree),
169 FUNTYPE is the data type of the function (as a tree),
170 or for a library call it is an identifier node for the subroutine name.
171 SIZE is the number of bytes of arguments passed on the stack. */
174 ip2k_return_pops_args (tree fundecl ATTRIBUTE_UNUSED
, tree funtype
, int size
)
176 if (TREE_CODE (funtype
) == IDENTIFIER_NODE
)
179 if (TYPE_ARG_TYPES (funtype
) == NULL_TREE
180 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
))) == void_type_node
))
186 /* Return nonzero if FUNC is a naked function. */
189 ip2k_naked_function_p (tree func
)
193 if (TREE_CODE (func
) != FUNCTION_DECL
)
196 a
= lookup_attribute ("naked", DECL_ATTRIBUTES (func
));
197 return a
!= NULL_TREE
;
200 /* Output function prologue. */
202 function_prologue (FILE *file
, HOST_WIDE_INT size
)
209 prologue_size
= epilogue_size
= 0;
211 if (ip2k_naked_function_p (current_function_decl
))
213 fprintf (file
, "/* prologue: naked */\n");
217 leaf_func_p
= leaf_function_p ();
218 main_p
= MAIN_NAME_P (DECL_NAME (current_function_decl
));
220 /* For now, we compute all these facts about the function, but don't
221 take any action based on the information. */
224 fprintf (file
, "/* prologue: frame size=" HOST_WIDE_INT_PRINT_DEC
" */\n",
227 /* Unless we're a leaf we need to save the return PC. */
231 OUT_AS1 (push
, calll
);
232 OUT_AS1 (push
, callh
);
236 /* We need to save the old FP and set the new FP pointing at the
237 stack location where the old one is saved. Note that because of
238 post-decrement addressing, the SP is off-by-one after the
239 push, so we harvest the SP address BEFORE we push the MSBs of
243 OUT_AS1 (push
, REG_FP
+1); /* Save old LSBs. */
244 OUT_AS2 (mov
, w
, spl
);
245 OUT_AS2 (mov
, REG_FP
+1, w
); /* SPL -> FPL */
247 OUT_AS2 (mov
, w
, sph
); /* Freeze SP MSBs */
248 OUT_AS1 (push
, REG_FP
); /* Save old MSBs */
249 OUT_AS2 (mov
, REG_FP
, w
); /* SPH -> FPH */
253 for (reg
= (CHAIN_FRAMES
) ? (REG_FP
- 1) : (REG_FP
+ 1);
256 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
258 fprintf (file
, "\t" AS1 (push
,%s
) "\n", reg_names
[reg
]);
265 operands
[0] = GEN_INT (size
);
276 OUT_AS2 (mov
, w
, %L0
);
277 OUT_AS2 (sub
, spl
, w
);
281 switch (size
& 0xff00)
290 if ((size
& 0xff) != ((size
>> 8) & 0xff))
291 OUT_AS2 (mov
, w
, %H0
); /* Otherwise W has value we want. */
292 OUT_AS2 (sub
, sph
, w
);
297 /* XXX - change this to use the carry-propagating subtract trick. */
298 if (flag_stack_check
)
300 OUT_AS2 (mov
, w
, sph
);
301 OUT_AS2 (cmp
, w
, #%%hi8data(_end));
302 OUT_AS1 (sc
, ); /* C == 0 -> hi8(edata) < sph */
305 OUT_AS1 (sz
, ); /* Z == 1 -> look at low byte */
307 OUT_AS1 (jmp
,0f
); /* sp < edata, so raise stack fault */
308 OUT_AS2 (mov
, w
, spl
);
309 OUT_AS2 (cmp
, w
, #%%lo8data(_end));
310 OUT_AS1 (sc
,); /* C==1 -> lo8(edata) >= spl */
314 output_asm_insn ("push\t$ff", operands
);
321 /* Output function epilogue. */
323 function_epilogue (FILE *file
, HOST_WIDE_INT size
)
327 rtx operands
[2]; /* Dummy used by OUT_ASn */
328 int args_locals_size
= current_function_args_size
;
329 int saved_regs_p
= 0;
332 /* Use this opportunity to reset the reorg flags! */
333 ip2k_reorg_in_progress
= 0;
334 ip2k_reorg_completed
= 0;
335 ip2k_reorg_split_dimode
= 0;
336 ip2k_reorg_split_simode
= 0;
337 ip2k_reorg_split_himode
= 0;
338 ip2k_reorg_split_qimode
= 0;
339 ip2k_reorg_merge_qimode
= 0;
341 if (ip2k_naked_function_p (current_function_decl
))
343 fprintf (file
, "/* epilogue: naked */\n");
347 leaf_func_p
= leaf_function_p ();
349 fprintf (file
, "/* epilogue: frame size=" HOST_WIDE_INT_PRINT_DEC
" */\n",
352 savelimit
= (CHAIN_FRAMES
) ? REG_FP
: (REG_FP
+ 2);
353 for (reg
= 0; reg
< savelimit
; reg
++)
354 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
362 if (leaf_func_p
&& !CHAIN_FRAMES
&& !saved_regs_p
363 && current_function_pops_args
)
364 args_locals_size
= current_function_args_size
+ size
;
367 operands
[0] = GEN_INT (size
);
372 OUT_AS2 (mov
, w
, %L0
);
373 OUT_AS2 (add
, spl
, w
);
383 switch (size
& 0xff00)
386 if ((size
& 0xff) != ((size
>> 8) & 0xff))
387 OUT_AS2 (mov
, w
, %H0
);
388 OUT_AS2 (add
, sph
, w
);
400 for (reg
= 0; reg
< savelimit
; reg
++)
402 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
404 fprintf (file
, "\t" AS1 (pop
,%s
) "\n", reg_names
[reg
]);
410 && ! (current_function_pops_args
411 && current_function_args_size
>= 2
412 && current_function_args_size
< 0x100))
414 OUT_AS1 (pop
, REG_FP
);
415 OUT_AS1 (pop
, REG_FP
+1);
421 if (current_function_pops_args
422 && current_function_args_size
>= 2
423 && current_function_args_size
< 0x100)
425 if (current_function_args_size
== 2)
429 OUT_AS1 (page
, __fp_pop2_args_ret
);
430 OUT_AS1 (jmp
, __fp_pop2_args_ret
);
434 OUT_AS1 (page
, __pop2_args_ret
);
435 OUT_AS1 (jmp
, __pop2_args_ret
);
441 operands
[0] = GEN_INT (current_function_args_size
);
442 OUT_AS2 (mov
, w
, %L0
);
445 OUT_AS1 (page
, __fp_pop_args_ret
);
446 OUT_AS1 (jmp
, __fp_pop_args_ret
);
450 OUT_AS1 (page
, __pop_args_ret
);
451 OUT_AS1 (jmp
, __pop_args_ret
);
459 OUT_AS1 (pop
, callh
);
460 OUT_AS1 (pop
, calll
);
466 if (current_function_pops_args
467 && args_locals_size
>= 2
468 && args_locals_size
< 0x100)
470 if (args_locals_size
== 2)
474 OUT_AS1 (page
, __leaf_fp_pop2_args_ret
);
475 OUT_AS1 (jmp
, __leaf_fp_pop2_args_ret
);
482 operands
[0] = GEN_INT (args_locals_size
);
485 OUT_AS2 (mov
, w
, %L0
);
486 OUT_AS1 (page
, __leaf_fp_pop_args_ret
);
487 OUT_AS1 (jmp
, __leaf_fp_pop_args_ret
);
495 if (current_function_pops_args
&& args_locals_size
&& need_ret
)
497 operands
[0] = GEN_INT (args_locals_size
);
499 switch (args_locals_size
& 0xff)
502 OUT_AS2 (mov
, w
, %L0
);
503 OUT_AS2 (add
, spl
, w
);
515 switch (args_locals_size
& 0xff00)
518 if ((args_locals_size
& 0xff) != ((args_locals_size
>> 8) & 0xff))
519 OUT_AS2 (mov
, w
, %H0
);
520 OUT_AS2 (add
, sph
, w
);
539 fprintf (file
, "/* epilogue end (size=%d) */\n", epilogue_size
);
542 /* Return the difference between the registers after the function
545 Stack Frame grows down:
548 <------ AP ($102:$103)
549 RETURN PC (unless leaf function)
551 <------ FP [HARD_FRAME_POINTER] ($FD:$FE)
553 <------ VFP [$100:$101]
555 <------ SP ($6:$7) */
557 ip2k_init_elim_offset (int from
, int to
)
559 int leaf_func_p
= leaf_function_p ();
560 int no_saved_pc
= leaf_func_p
561 || ip2k_naked_function_p (current_function_decl
);
566 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
567 return get_frame_size () + 1;
569 if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
570 return (CHAIN_FRAMES
? 2 : 0) + (no_saved_pc
? 0 : 2);
572 /* Count all the registers we had to preserve. */
574 reglimit
= CHAIN_FRAMES
? REG_FP
: (REG_FP
+ 2);
575 for (offset
= 0,reg
= 0; reg
< reglimit
; ++reg
)
577 if ((regs_ever_live
[reg
] && ! call_used_regs
[reg
]))
583 if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
586 if (from
== HARD_FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
587 /* Add in the stack-local variables. */
588 return offset
+ get_frame_size () + 1;
590 if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
591 /* Add stack-locals plus saved FP and PC. */
592 return offset
+ get_frame_size () + 1
593 + (CHAIN_FRAMES
? 2 : 0) + (no_saved_pc
? 0 : 2);
595 abort (); /* Unanticipated elimination. */
598 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
599 machine for a memory operand of mode MODE. */
602 legitimate_address_p (enum machine_mode mode
, rtx x
, int strict
)
606 if (GET_CODE (x
) == SUBREG
)
609 switch (GET_CODE (x
))
612 /* IP allows indirection without offset - only okay if
613 we don't require access to multiple bytes. */
614 if (REGNO (x
) == REG_IP
)
615 return (GET_MODE_SIZE (mode
) == 1) ? 'R' : 0;
617 /* We can indirect through DP or SP register. */
618 if (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
619 : REG_OK_FOR_BASE_NOSTRICT_P (x
))
624 /* Offsets from DP or SP are legal in the range 0..127 */
631 if (REG_P (op2
) && ! REG_P (op1
))
638 /* Don't let anything but R+I through.. */
641 || GET_CODE (op2
) != CONST_INT
)
646 case REG_DP
: /* only 0..127 displacement */
648 off
= 2 * GET_MODE_SIZE (mode
);
652 if (INTVAL (op2
) < 0 || INTVAL (op2
) > (128 - off
))
653 return 0; /* Positive must be small enough that after
654 splitting all pieces are addressed. */
655 return 'S'; /* Safe displacement. */
658 if (GET_MODE_SIZE (mode
) <= 1 && INTVAL (op2
) == 0)
659 return (GET_MODE_SIZE (mode
) == 1) ? 'R' : 0;
666 if (strict
|| ! REG_OK_FOR_BASE_NOSTRICT_P (op1
))
667 return 0; /* Allow until reload. */
676 /* We always allow references to things in code space. */
677 return is_regfile_address (x
) ? 0 : 'C';
689 /* Is ADDR mode dependent? */
691 ip2k_mode_dependent_address (rtx addr
)
693 switch (GET_CODE (addr
))
702 return (REGNO (addr
) == REG_IP
); /* Can't do IP displaced addresses. */
705 return 0; /* Assume no dependency. */
709 /* Attempts to replace X with a valid
710 memory address for an operand of mode MODE. */
713 legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
714 enum machine_mode mode ATTRIBUTE_UNUSED
, rtx scratch
)
718 /* You might think that we could split up a symbolic address by
719 adding the HIGH 8 bits and doing a displacement off the dp. But
720 because we only have 7 bits of offset, that doesn't actually
721 help. So only constant displacements are likely to obtain an
724 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
725 && GET_CODE (XEXP (x
, 1)) == CONST_INT
726 && ! CONST_OK_FOR_LETTER_P (INTVAL (XEXP (x
, 1)), 'K'))
728 int offset
= INTVAL (XEXP (x
, 1));
730 reg
= scratch
? scratch
: gen_reg_rtx (Pmode
);
732 emit_insn (gen_rtx_SET (VOIDmode
, reg
,
733 gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
734 GEN_INT (offset
& 0xffc0))));
735 x
= gen_rtx_PLUS (Pmode
, reg
, GEN_INT (offset
& 0x3f));
738 return x
; /* We don't have any other tricks. */
741 /* Determine if X is a 'data' address or a code address. All static
742 data and stack variables reside in data memory. Only code is believed
743 to be in PRAM or FLASH. */
745 is_regfile_address (rtx x
)
748 switch (GET_CODE (x
))
751 return ! SYMBOL_REF_FUNCTION_P (x
); /* Declared as function. */
769 /* Output ADDR to FILE as address. */
772 print_operand_address (FILE *file
, rtx addr
)
774 switch (GET_CODE (addr
))
777 addr
= alter_subreg (&addr
);
781 fprintf (file
, "(%s)",
782 REGNO (addr
) == REG_DP
? "DP"
783 : REGNO (addr
) == REG_SP
? "SP"
784 : REGNO (addr
) == REG_IP
? "IP"
785 : REGNO (addr
) == REG_VFP
? "VFP" /* Should never see this */
786 : REGNO (addr
) == REG_AP
? "AP" /* or this, either. */
787 : reg_names
[REGNO (addr
)]);
796 addr
= XEXP (addr
, 0);
797 print_operand_address (file
, XEXP (addr
, 0));
799 print_operand_address (file
, XEXP (addr
, 1));
803 if (is_regfile_address (XEXP (addr
, 1)))
804 fprintf (file
, "%%lo8data(");
806 fprintf (file
, "%%lo8insn(");
807 print_operand_address (file
, XEXP (addr
, 1));
809 print_operand_address (file
, XEXP (addr
, 0));
812 case PLUS
: /* Ought to be stack or dp references. */
813 if (XEXP (addr
, 1) == const0_rtx
814 && GET_CODE (XEXP (addr
, 0)) == PLUS
)
816 print_operand_address (file
, XEXP (addr
, 0));
820 if (! REG_P (XEXP (addr
, 0)) || REGNO (XEXP (addr
, 0)) != REG_IP
)
821 print_operand_address (file
, XEXP (addr
, 1)); /* const */
822 print_operand_address (file
, XEXP (addr
, 0)); /* (reg) */
826 if (is_regfile_address (XEXP (addr
, 0)))
827 fprintf (file
, "%%hi8data(");
829 fprintf (file
, "%%hi8insn(");
830 output_addr_const (file
, XEXP (addr
, 0));
835 output_addr_const (file
, addr
);
840 /* Output X as assembler operand to file FILE. */
843 print_operand (FILE *file
, rtx x
, int code
)
887 if (ip2k_short_operand (x
, GET_MODE (x
))
888 && ip2k_address_uses_reg_p (x
, REG_SP
))
889 /* An SP-relative address needs to account for interior stack
890 pushes that reload didn't know about when it calculated the
892 abcd
+= ip2k_stack_delta
;
894 switch (GET_CODE (x
))
897 x
= alter_subreg (&x
);
901 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
908 fprintf (file
, "$%x", (int)(INTVAL (x
) & 0xffff));
912 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
)); /* bit selector */
915 case 'e': /* "1 << n" - e.g. "exp" */
916 fprintf (file
, "#%d", 1 << INTVAL (x
));
924 value
>>= 8 * (3 - abcd
);
927 fprintf (file
, "#%ld", value
);
931 fprintf (file
, "#%d", (int)((INTVAL (x
) >> 8) & 0xff));
935 fprintf (file
, "#%d", (int)(INTVAL (x
) & 0xff));
946 value
= ((unsigned long long)INTVAL (x
)) >> (8 * (7 - abcd
)) & 0xff;
947 fprintf (file
, "#%ld", value
);
951 fprintf (file
, "#" HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
973 abort (); /* Probably an error. */
977 fprintf (file
, "#%s(",
978 is_regfile_address (x
) ? "%hi8data"
980 print_operand_address (file
, x
);
985 fprintf (file
, "#%s(",
986 is_regfile_address (x
) ? "%lo8data"
988 print_operand_address (file
, x
);
993 print_operand_address (file
, x
);
999 rtx addr
= XEXP (x
, 0);
1001 if (GET_CODE (addr
) == SUBREG
)
1002 addr
= alter_subreg (&x
);
1004 if (CONSTANT_P (addr
) && abcd
)
1007 print_operand_address (file
, addr
);
1008 fprintf (file
, ")+%d", abcd
);
1012 switch (GET_CODE (addr
))
1015 abcd
+= INTVAL (XEXP (addr
, 1));
1017 /* Worry about (plus (plus (reg DP) (const_int 10))
1019 if (GET_CODE (XEXP (addr
, 0)) == PLUS
)
1021 addr
= XEXP (addr
, 0);
1022 abcd
+= INTVAL (XEXP (addr
, 1));
1025 fprintf (file
, "%d", abcd
);
1026 print_operand_address (file
, XEXP (addr
, 0));
1031 fprintf (file
, "%d", abcd
);
1032 print_operand_address (file
, addr
);
1035 else if (GET_CODE (addr
) == REG
1036 && (REGNO (addr
) == REG_DP
|| REGNO (addr
) == REG_SP
))
1038 fprintf (file
, "0");
1039 print_operand_address (file
, addr
);
1042 print_operand_address (file
, addr
);
1047 /* Is this an integer or a floating point value? */
1048 if (GET_MODE (x
) == VOIDmode
)
1056 value
= CONST_DOUBLE_HIGH (x
);
1057 value
>>= 8 * (3 - abcd
);
1060 fprintf (file
, "#%ld", value
);
1067 value
= CONST_DOUBLE_LOW (x
);
1068 value
>>= 8 * (7 - abcd
);
1071 fprintf (file
, "#%ld", value
);
1080 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1081 REAL_VALUE_TO_TARGET_SINGLE (rv
, value
);
1082 fprintf (file
, "0x%lx", value
);
1087 fatal_insn ("bad operand", x
);
1091 /* Remember the operands for the compare. */
1093 ip2k_set_compare (rtx x
, rtx y
)
1095 ip2k_compare_operands
[0] = x
;
1096 ip2k_compare_operands
[1] = y
;
1100 /* Emit the code for sCOND instructions. */
1102 ip2k_gen_sCOND (rtx insn ATTRIBUTE_UNUSED
, enum rtx_code code
, rtx dest
)
1104 #define operands ip2k_compare_operands
1105 enum machine_mode mode
;
1109 mode
= GET_MODE (operands
[0]);
1110 if ((mode
!= QImode
) && (mode
!= HImode
)
1111 && (mode
!= SImode
) && (mode
!= DImode
))
1112 mode
= GET_MODE (operands
[1]);
1114 /* We have a fast path for a specific type of QImode compare. We ought
1115 to extend this for larger cases too but that wins less frequently and
1116 introduces a lot of complexity. */
1118 && !rtx_equal_p (operands
[0], operands
[2])
1119 && !rtx_equal_p (operands
[1], operands
[2])
1120 && (! REG_P (operands
[2])
1121 || (ip2k_xexp_not_uses_reg_p (operands
[0], REGNO (operands
[2]), 1)
1122 && ip2k_xexp_not_uses_reg_p (operands
[1],
1123 REGNO (operands
[2]), 1))))
1126 if (immediate_operand (operands
[1], QImode
)
1127 && ((INTVAL (operands
[1]) & 0xff) == 0xff))
1130 OUT_AS2 (incsnz
, w
, %0);
1132 OUT_AS2 (incsz
, w
, %0);
1134 else if (immediate_operand (operands
[1], QImode
)
1135 && ((INTVAL (operands
[1]) & 0xff) == 0x01))
1138 OUT_AS2 (decsnz
, w
, %0);
1140 OUT_AS2 (decsz
, w
, %0);
1142 else if (ip2k_compare_operands
[1] == const0_rtx
)
1144 OUT_AS2 (mov
, w
, %0);
1152 OUT_AS2 (mov
, w
, %0);
1154 OUT_AS2 (csne
, w
, %1);
1156 OUT_AS2 (cse
, w
, %1);
1162 if (ip2k_compare_operands
[1] == const0_rtx
)
1167 OUT_AS2 (mov
, w
, %0);
1171 OUT_AS2 (mov
, w
, %H0
);
1172 OUT_AS2 (or, w
, %L0
);
1176 OUT_AS2 (mov
, w
, %A0
);
1177 OUT_AS2 (or, w
, %B0
);
1178 OUT_AS2 (or, w
, %C0
);
1179 OUT_AS2 (or, w
, %D0
);
1183 OUT_AS2 (mov
, w
, %S0
);
1184 OUT_AS2 (or, w
, %T0
);
1185 OUT_AS2 (or, w
, %U0
);
1186 OUT_AS2 (or, w
, %V0
);
1187 OUT_AS2 (or, w
, %W0
);
1188 OUT_AS2 (or, w
, %X0
);
1189 OUT_AS2 (or, w
, %Y0
);
1190 OUT_AS2 (or, w
, %Z0
);
1202 OUT_AS2 (mov
, w
, %1);
1203 OUT_AS2 (cmp
, w
, %0);
1207 OUT_AS2 (mov
, w
, %H1
);
1208 OUT_AS2 (cmp
, w
, %H0
);
1212 OUT_AS2 (mov
, w
, %L1
);
1213 OUT_AS2 (cmp
, w
, %L0
);
1220 OUT_AS2 (mov
, w
, #1);
1221 OUT_AS2 (mov
, mulh
, w
);
1224 OUT_AS1 (clr
, mulh
);
1225 OUT_AS2 (mov
, w
, %A1
);
1226 OUT_AS2 (cse
, w
, %A0
);
1229 OUT_AS2 (mov
, w
, %B1
);
1230 OUT_AS2 (cse
, w
, %B0
);
1233 OUT_AS2 (mov
, w
, %C1
);
1234 OUT_AS2 (cse
, w
, %C0
);
1237 OUT_AS2 (mov
, w
, %D1
);
1238 OUT_AS2 (cse
, w
, %D0
);
1241 OUT_AS1 (dec
, mulh
);
1243 OUT_AS1 (inc
, mulh
);
1244 OUT_AS2 (mov
, w
, mulh
);
1245 OUT_AS2 (mov
, %2, w
);
1251 OUT_AS2 (mov
, w
, #1);
1252 OUT_AS2 (mov
, mulh
, w
);
1255 OUT_AS1 (clr
, mulh
);
1256 OUT_AS2 (mov
, w
, %S1
);
1257 OUT_AS2 (cse
, w
, %S0
);
1260 OUT_AS2 (mov
, w
, %T1
);
1261 OUT_AS2 (cse
, w
, %T0
);
1264 OUT_AS2 (mov
, w
, %U1
);
1265 OUT_AS2 (cse
, w
, %U0
);
1268 OUT_AS2 (mov
, w
, %V1
);
1269 OUT_AS2 (cse
, w
, %V0
);
1272 OUT_AS2 (mov
, w
, %W1
);
1273 OUT_AS2 (cse
, w
, %W0
);
1276 OUT_AS2 (mov
, w
, %X1
);
1277 OUT_AS2 (cse
, w
, %X0
);
1280 OUT_AS2 (mov
, w
, %Y1
);
1281 OUT_AS2 (cse
, w
, %Y0
);
1284 OUT_AS2 (mov
, w
, %Z1
);
1285 OUT_AS2 (cse
, w
, %Z0
);
1288 OUT_AS1 (dec
, mulh
);
1290 OUT_AS1 (inc
, mulh
);
1291 OUT_AS2 (mov
, w
, mulh
);
1292 OUT_AS2 (mov
, %2, w
);
1299 OUT_AS2 (mov
, w
, #0);
1304 OUT_AS1 (inc
, wreg
);
1305 OUT_AS2 (mov
, %2, w
);
1313 ip2k_gen_signed_comp_branch (rtx insn
, enum rtx_code code
, rtx label
)
1315 #define operands ip2k_compare_operands
1316 enum machine_mode mode
;
1317 int can_use_skip
= 0;
1320 operands
[2] = label
;
1322 mode
= GET_MODE (operands
[0]);
1323 if ((mode
!= QImode
) && (mode
!= HImode
)
1324 && (mode
!= SImode
) && (mode
!= DImode
))
1325 mode
= GET_MODE (operands
[1]);
1327 /* Look for situations where we can just skip the next instruction instead
1328 of skipping and then branching! */
1329 ninsn
= next_real_insn (insn
);
1331 && (recog_memoized (ninsn
) >= 0)
1332 && get_attr_skip (ninsn
) == SKIP_YES
)
1334 rtx skip_tgt
= next_nonnote_insn (next_real_insn (insn
));
1336 /* The first situation is where the target of the jump is one insn
1337 after the jump insn and the insn being jumped is only one machine
1339 if (label
== skip_tgt
)
1343 /* If our skip target is in fact a code label then we ignore the
1344 label and move onto the next useful instruction. Nothing we do
1345 here has any effect on the use of skipping instructions. */
1346 if (GET_CODE (skip_tgt
) == CODE_LABEL
)
1347 skip_tgt
= next_nonnote_insn (skip_tgt
);
1349 /* The second situation is where we have something of the form:
1355 optional_label (this may or may not exist):
1359 In this case we can eliminate the first "page/jump label". */
1360 if (GET_CODE (skip_tgt
) == JUMP_INSN
)
1362 rtx set
= single_set (skip_tgt
);
1363 if (GET_CODE (XEXP (set
, 0)) == PC
1364 && GET_CODE (XEXP (set
, 1)) == LABEL_REF
1365 && label
== JUMP_LABEL (skip_tgt
))
1371 /* gcc is a little braindead and does some rather stateful things while
1372 inspecting attributes - we have to put this state back to what it's
1374 extract_constrain_insn_cached (insn
);
1376 if (ip2k_compare_operands
[1] == const0_rtx
) /* These are easier. */
1383 OUT_AS2 (sb
, %0, 7);
1387 OUT_AS2 (snb
, %0, 7);
1397 OUT_AS2 (rl
, w
, %S0
);
1398 OUT_AS2 (mov
, w
, %S0
);
1399 OUT_AS2 (or, w
, %T0
);
1400 OUT_AS2 (or, w
, %U0
);
1401 OUT_AS2 (or, w
, %V0
);
1402 OUT_AS2 (or, w
, %W0
);
1403 OUT_AS2 (or, w
, %X0
);
1404 OUT_AS2 (or, w
, %Y0
);
1405 OUT_AS2 (or, w
, %Z0
);
1407 OUT_AS2 (setb
, status
, 0);
1408 OUT_AS2 (sb
, status
, 0);
1414 OUT_AS2 (rl
, w
, %A0
);
1415 OUT_AS2 (mov
, w
, %A0
);
1416 OUT_AS2 (or, w
, %B0
);
1417 OUT_AS2 (or, w
, %C0
);
1418 OUT_AS2 (or, w
, %D0
);
1420 OUT_AS2 (setb
, status
, 0);
1421 OUT_AS2 (sb
, status
, 0);
1427 OUT_AS2 (rl
, w
, %H0
);
1428 OUT_AS2 (mov
, w
, %H0
);
1429 OUT_AS2 (or, w
, %L0
);
1431 OUT_AS2 (setb
, status
, 0);
1432 OUT_AS2 (sb
, status
, 0);
1438 OUT_AS2 (mov
, w
, %0); /* Will just do "sb w, 7". */
1440 OUT_AS2 (setb
, wreg
, 7);
1441 OUT_AS2 (sb
, wreg
, 7);
1455 OUT_AS2 (mov
, w
, %S0
);
1456 OUT_AS2 (or, w
, %T0
);
1457 OUT_AS2 (or, w
, %U0
);
1458 OUT_AS2 (or, w
, %V0
);
1459 OUT_AS2 (or, w
, %W0
);
1460 OUT_AS2 (or, w
, %X0
);
1461 OUT_AS2 (or, w
, %Y0
);
1462 OUT_AS2 (or, w
, %Z0
); /* Z is correct. */
1464 OUT_AS2 (snb
, %S0
, 7);
1470 OUT_AS2 (mov
, w
, %A0
);
1471 OUT_AS2 (or, w
, %B0
);
1472 OUT_AS2 (or, w
, %C0
);
1473 OUT_AS2 (or, w
, %D0
); /* Z is correct. */
1475 OUT_AS2 (snb
, %A0
, 7);
1481 OUT_AS2 (mov
, w
, %H0
);
1482 OUT_AS2 (or, w
, %L0
);
1484 OUT_AS2 (snb
, %H0
, 7);
1490 OUT_AS2 (mov
, w
, %0); /* Will just do "sb w, 7". */
1492 OUT_AS2 (snb
, wreg
, 7);
1505 OUT_AS2 (snb
, %0, 7);
1509 OUT_AS2 (sb
, %0, 7);
1521 /* signed compares are out of line because we can't get
1522 the hardware to compute the overflow for us. */
1527 OUT_AS1 (push
, %1%<);
1528 OUT_AS1 (push
, %0%>);
1529 OUT_AS1 (page
, __cmpqi2
);
1530 OUT_AS1 (call
, __cmpqi2
);
1534 OUT_AS1 (push
, %L1
%<);
1535 OUT_AS1 (push
, %H1
%<);
1536 OUT_AS1 (push
, %L0
%<);
1537 OUT_AS1 (push
, %H0
%>%>%>);
1538 OUT_AS1 (page
, __cmphi2
);
1539 OUT_AS1 (call
, __cmphi2
);
1543 OUT_AS1 (push
, %D1
%<);
1544 OUT_AS1 (push
, %C1
%<);
1545 OUT_AS1 (push
, %B1
%<);
1546 OUT_AS1 (push
, %A1
%<);
1547 OUT_AS1 (push
, %D0
%<);
1548 OUT_AS1 (push
, %C0
%<);
1549 OUT_AS1 (push
, %B0
%<);
1550 OUT_AS1 (push
, %A0
%>%>%>%>%>%>%>);
1551 OUT_AS1 (page
, __cmpsi2
);
1552 OUT_AS1 (call
, __cmpsi2
);
1556 if (GET_CODE (operands
[0]) == MEM
1557 && true_regnum (XEXP (operands
[0], 0)) == REG_DP
)
1559 OUT_AS1 (push
, %Z1
%<);
1560 OUT_AS1 (push
, %Y1
%<);
1561 OUT_AS1 (push
, %X1
%<);
1562 OUT_AS1 (push
, %W1
%<);
1563 OUT_AS1 (push
, %V1
%<);
1564 OUT_AS1 (push
, %U1
%<);
1565 OUT_AS1 (push
, %T1
%<);
1566 OUT_AS1 (push
, %S1
%>%>%>%>%>%>%>);
1567 OUT_AS1 (page
, __cmpdi2_dp
);
1568 OUT_AS1 (call
, __cmpdi2_dp
);
1572 OUT_AS1 (push
, %Z1
%<);
1573 OUT_AS1 (push
, %Y1
%<);
1574 OUT_AS1 (push
, %X1
%<);
1575 OUT_AS1 (push
, %W1
%<);
1576 OUT_AS1 (push
, %V1
%<);
1577 OUT_AS1 (push
, %U1
%<);
1578 OUT_AS1 (push
, %T1
%<);
1579 OUT_AS1 (push
, %S1
%<);
1580 OUT_AS1 (push
, %Z0
%<);
1581 OUT_AS1 (push
, %Y0
%<);
1582 OUT_AS1 (push
, %X0
%<);
1583 OUT_AS1 (push
, %W0
%<);
1584 OUT_AS1 (push
, %V0
%<);
1585 OUT_AS1 (push
, %U0
%<);
1586 OUT_AS1 (push
, %T0
%<);
1587 OUT_AS1 (push
, %S0
%>%>%>%>%>%>%>%>%>%>%>%>%>%>%>);
1588 OUT_AS1 (page
, __cmpdi2
);
1589 OUT_AS1 (call
, __cmpdi2
);
1602 OUT_AS2 (cse
, w
, #0);
1606 OUT_AS2 (csne
, w
, #0);
1615 OUT_AS2 (cse
, w
, #2);
1619 OUT_AS2 (csne
, w
, #2);
1628 OUT_AS2 (snb
, wreg
, 1);
1632 OUT_AS2 (sb
, wreg
, 1);
1641 OUT_AS2 (csne
, w
, #0);
1645 OUT_AS2 (cse
, w
, #0);
1659 ip2k_gen_unsigned_comp_branch (rtx insn
, enum rtx_code code
, rtx label
)
1661 #define operands ip2k_compare_operands
1662 enum machine_mode mode
;
1665 int can_use_skip
= 0;
1667 HOST_WIDE_INT const_low
;
1668 HOST_WIDE_INT const_high
;
1670 operands
[2] = label
;
1672 mode
= GET_MODE (operands
[0]);
1673 if ((mode
!= QImode
) && (mode
!= HImode
) && (mode
!= SImode
)
1674 && (mode
!= DImode
))
1676 mode
= GET_MODE (operands
[1]);
1679 /* Look for situations where we can just skip the next instruction instead
1680 of skipping and then branching! */
1681 ninsn
= next_real_insn (insn
);
1683 && (recog_memoized (ninsn
) >= 0)
1684 && get_attr_skip (ninsn
) == SKIP_YES
)
1686 rtx skip_tgt
= next_nonnote_insn (next_real_insn (insn
));
1688 /* The first situation is where the target of the jump is one insn
1689 after the jump insn and the insn being jumped is only one machine
1691 if (label
== skip_tgt
)
1695 /* If our skip target is in fact a code label then we ignore the
1696 label and move onto the next useful instruction. Nothing we do
1697 here has any effect on the use of skipping instructions. */
1698 if (GET_CODE (skip_tgt
) == CODE_LABEL
)
1699 skip_tgt
= next_nonnote_insn (skip_tgt
);
1701 /* The second situation is where we have something of the form:
1707 optional_label (this may or may not exist):
1711 In this case we can eliminate the first "page/jump label". */
1712 if (GET_CODE (skip_tgt
) == JUMP_INSN
)
1714 rtx set
= single_set (skip_tgt
);
1715 if (GET_CODE (XEXP (set
, 0)) == PC
1716 && GET_CODE (XEXP (set
, 1)) == LABEL_REF
1717 && label
== JUMP_LABEL (skip_tgt
))
1723 /* gcc is a little braindead and does some rather stateful things while
1724 inspecting attributes - we have to put this state back to what it's
1726 extract_constrain_insn_cached (insn
);
1728 if (ip2k_compare_operands
[1] == const0_rtx
)
1733 code
= EQ
; /* Nothing is LTU 0. */
1737 code
= NE
; /* Anything nonzero is GTU. */
1741 case NE
: /* Test all the bits, result in
1747 OUT_AS2 (mov
, w
, %S0
);
1748 OUT_AS2 (or, w
, %T0
);
1749 OUT_AS2 (or, w
, %U0
);
1750 OUT_AS2 (or, w
, %V0
);
1751 OUT_AS2 (or, w
, %W0
);
1752 OUT_AS2 (or, w
, %X0
);
1753 OUT_AS2 (or, w
, %Y0
);
1754 OUT_AS2 (or, w
, %Z0
);
1758 OUT_AS2 (mov
, w
, %A0
);
1759 OUT_AS2 (or, w
, %B0
);
1760 OUT_AS2 (or, w
, %C0
);
1761 OUT_AS2 (or, w
, %D0
);
1765 OUT_AS2 (mov
, w
, %H0
);
1766 OUT_AS2 (or, w
, %L0
);
1770 OUT_AS2 (mov
, w
, %0);
1796 /* Always succeed. */
1811 /* Look at whether we have a constant as one of our operands. If we do
1812 and it's in the position that we use to subtract from during our
1813 normal optimized comparison concept then we have to shuffle things
1817 if ((immediate_operand (operands
[1], GET_MODE (operands
[1]))
1818 && ((code
== LEU
) || (code
== GTU
)))
1819 || (immediate_operand (operands
[0], GET_MODE (operands
[0]))
1820 && ((code
== LTU
) || (code
== GEU
))))
1826 /* Same as above - look if we have a constant that we can compare
1827 for equality or non-equality. If we know this then we can look
1828 for common value eliminations. Note that we want to ensure that
1829 any immediate value is operand 1 to simplify the code later! */
1830 if ((code
== EQ
) || (code
== NE
))
1832 imm_cmp
= immediate_operand (operands
[1], GET_MODE (operands
[1]));
1835 imm_cmp
= immediate_operand (operands
[0], GET_MODE (operands
[0]));
1838 rtx tmp
= operands
[1];
1839 operands
[1] = operands
[0];
1851 if (imm_cmp
&& ((INTVAL (operands
[1]) & 0xff) == 0xff))
1852 OUT_AS2 (incsnz
, w
, %0);
1853 else if (imm_cmp
&& ((INTVAL (operands
[1]) & 0xff) == 0x01))
1854 OUT_AS2 (decsnz
, w
, %0);
1857 OUT_AS2 (mov
, w
, %1);
1858 OUT_AS2 (csne
, w
, %0);
1865 if (imm_cmp
&& ((INTVAL (operands
[1]) & 0xff) == 0xff))
1866 OUT_AS2 (incsz
, w
, %0);
1867 else if (imm_cmp
&& ((INTVAL (operands
[1]) & 0xff) == 0x01))
1868 OUT_AS2 (decsz
, w
, %0);
1871 OUT_AS2 (mov
, w
, %1);
1872 OUT_AS2 (cse
, w
, %0);
1879 OUT_AS2 (mov
, w
, %0);
1880 OUT_AS2 (cmp
, w
, %1);
1887 OUT_AS2 (mov
, w
, %1);
1888 OUT_AS2 (cmp
, w
, %0);
1895 OUT_AS2 (mov
, w
, %1);
1896 OUT_AS2 (cmp
, w
, %0);
1903 OUT_AS2 (mov
, w
, %0);
1904 OUT_AS2 (cmp
, w
, %1);
1920 unsigned char h
= 0, l
= 1;
1924 h
= (INTVAL (operands
[1]) >> 8) & 0xff;
1925 l
= INTVAL (operands
[1]) & 0xff;
1927 if ((h
== 0xff) && (l
== 0xff))
1929 /* We should be able to do the following, but the
1930 IP2k simulator doesn't like it and we get a load
1931 of failures in gcc-c-torture. */
1932 OUT_AS2 (incsnz
, w
, %L0
);
1933 OUT_AS2 (incsz
, w
, %H0
);
1934 /* OUT_AS1 (skip,); Should have this */
1935 OUT_AS1 (page
, 1f
);/* Shouldn't need this! */
1936 OUT_AS1 (jmp
, 1f
); /* Shouldn't need this either. */
1945 OUT_AS2 (dec
, w
, %L0
);
1948 OUT_AS2 (mov
, w
, %L0
);
1949 OUT_AS2 (sub
, w
, %L1
);
1951 OUT_AS2 (or, w
, %H0
);
1960 OUT_AS2 (dec
, w
, %H0
);
1963 OUT_AS2 (mov
, w
, %H0
);
1964 OUT_AS2 (sub
, w
, %H1
);
1966 OUT_AS2 (or, w
, %L0
);
1974 OUT_AS2 (mov
, w
, %H1
);
1975 OUT_AS2 (cse
, w
, %H0
);
1978 if (! imm_cmp
|| (h
!= l
))
1979 OUT_AS2 (mov
, w
, %L1
);
1980 OUT_AS2 (csne
, w
, %L0
);
1989 unsigned char h
= 0, l
= 1;
1993 h
= (INTVAL (operands
[1]) >> 8) & 0xff;
1994 l
= INTVAL (operands
[1]) & 0xff;
1996 if ((h
== 0xff) && (l
== 0xff))
1998 OUT_AS2 (incsnz
, w
, %L0
);
1999 OUT_AS2 (incsz
, w
, %H0
);
2007 OUT_AS2 (dec
, w
, %L0
);
2010 OUT_AS2 (mov
, w
, %L0
);
2011 OUT_AS2 (sub
, w
, %L1
);
2013 OUT_AS2 (or, w
, %H0
);
2022 OUT_AS2 (dec
, w
, %H0
);
2025 OUT_AS2 (mov
, w
, %H0
);
2026 OUT_AS2 (sub
, w
, %H1
);
2028 OUT_AS2 (or, w
, %L0
);
2036 OUT_AS2 (mov
, w
, %H1
);
2037 if (imm_cmp
&& (h
== l
))
2039 OUT_AS2 (csne
, w
, %H0
);
2040 OUT_AS2 (cse
, w
, %L0
);
2044 OUT_AS2 (cse
, w
, %H0
);
2047 OUT_AS2 (mov
, w
, %L1
);
2048 OUT_AS2 (cse
, w
, %L0
);
2058 /* > 0xffff never succeeds! */
2059 if ((INTVAL (operands
[1]) & 0xffff) != 0xffff)
2061 operands
[3] = GEN_INT (INTVAL (operands
[1]) + 1);
2062 OUT_AS2 (mov
, w
, %L3
);
2063 OUT_AS2 (sub
, w
, %L0
);
2064 OUT_AS2 (mov
, w
, %H3
);
2065 OUT_AS2 (subc
, w
, %H0
);
2073 OUT_AS2 (mov
, w
, %L0
);
2074 OUT_AS2 (sub
, w
, %L1
);
2075 OUT_AS2 (mov
, w
, %H0
);
2076 OUT_AS2 (subc
, w
, %H1
);
2086 if (INTVAL (operands
[0]) == 0)
2088 OUT_AS2 (mov
, w
, %H1
);
2089 OUT_AS2 (or, w
, %L1
);
2096 operands
[3] = GEN_INT (INTVAL (operands
[0]) - 1);
2097 OUT_AS2 (mov
, w
, %L3
);
2098 OUT_AS2 (sub
, w
, %L1
);
2099 OUT_AS2 (mov
, w
, %H3
);
2100 OUT_AS2 (subc
, w
, %H1
);
2108 OUT_AS2 (mov
, w
, %L1
);
2109 OUT_AS2 (sub
, w
, %L0
);
2110 OUT_AS2 (mov
, w
, %H1
);
2111 OUT_AS2 (subc
, w
, %H0
);
2121 if (INTVAL (operands
[0]) == 0)
2123 OUT_AS2 (mov
, w
, %H1
);
2124 OUT_AS2 (or, w
, %L1
);
2131 operands
[3] = GEN_INT (INTVAL (operands
[0]) - 1);
2132 OUT_AS2 (mov
, w
, %L3
);
2133 OUT_AS2 (sub
, w
, %L1
);
2134 OUT_AS2 (mov
, w
, %H3
);
2135 OUT_AS2 (subc
, w
, %H1
);
2143 OUT_AS2 (mov
, w
, %L1
);
2144 OUT_AS2 (sub
, w
, %L0
);
2145 OUT_AS2 (mov
, w
, %H1
);
2146 OUT_AS2 (subc
, w
, %H0
);
2156 if ((INTVAL (operands
[1]) & 0xffff) == 0xffff)
2158 /* <= 0xffff always succeeds. */
2164 operands
[3] = GEN_INT (INTVAL (operands
[1]) + 1);
2165 OUT_AS2 (mov
, w
, %L3
);
2166 OUT_AS2 (sub
, w
, %L0
);
2167 OUT_AS2 (mov
, w
, %H3
);
2168 OUT_AS2 (subc
, w
, %H0
);
2176 OUT_AS2 (mov
, w
, %L0
);
2177 OUT_AS2 (sub
, w
, %L1
);
2178 OUT_AS2 (mov
, w
, %H0
);
2179 OUT_AS2 (subc
, w
, %H1
);
2196 unsigned char a
= 0, b
= 1, c
= 2, d
= 3;
2200 a
= (INTVAL (operands
[1]) >> 24) & 0xff;
2201 b
= (INTVAL (operands
[1]) >> 16) & 0xff;
2202 c
= (INTVAL (operands
[1]) >> 8) & 0xff;
2203 d
= INTVAL (operands
[1]) & 0xff;
2206 OUT_AS2 (mov
, w
, %A1
);
2207 if (imm_cmp
&& (b
== a
))
2209 OUT_AS2 (csne
, w
, %A0
);
2210 OUT_AS2 (cse
, w
, %B0
);
2214 OUT_AS2 (cse
, w
, %A0
);
2217 OUT_AS2 (mov
, w
, %B1
);
2218 OUT_AS2 (cse
, w
, %B0
);
2222 if (! imm_cmp
|| (c
!= b
))
2223 OUT_AS2 (mov
, w
, %C1
);
2224 OUT_AS2 (cse
, w
, %C0
);
2227 if (! imm_cmp
|| (d
!= c
))
2228 OUT_AS2 (mov
, w
, %D1
);
2229 OUT_AS2 (csne
, w
, %D0
);
2238 unsigned char a
= 0, b
= 1, c
= 2, d
= 3;
2242 a
= (INTVAL (operands
[1]) >> 24) & 0xff;
2243 b
= (INTVAL (operands
[1]) >> 16) & 0xff;
2244 c
= (INTVAL (operands
[1]) >> 8) & 0xff;
2245 d
= INTVAL (operands
[1]) & 0xff;
2248 OUT_AS2 (mov
, w
, %A1
);
2249 if (imm_cmp
&& (b
== a
))
2251 OUT_AS2 (csne
, w
, %A0
);
2252 OUT_AS2 (cse
, w
, %B0
);
2256 OUT_AS2 (cse
, w
, %A0
);
2259 OUT_AS2 (mov
, w
, %B1
);
2260 OUT_AS2 (cse
, w
, %B0
);
2264 if (! imm_cmp
|| (c
!= b
))
2265 OUT_AS2 (mov
, w
, %C1
);
2266 if (imm_cmp
&& (d
== c
))
2268 OUT_AS2 (csne
, w
, %C0
);
2269 OUT_AS2 (cse
, w
, %D0
);
2273 OUT_AS2 (cse
, w
, %C0
);
2276 OUT_AS2 (mov
, w
, %D1
);
2277 OUT_AS2 (cse
, w
, %D0
);
2287 /* > 0xffffffff never succeeds! */
2288 if ((unsigned HOST_WIDE_INT
)(INTVAL (operands
[1]) & 0xffffffff)
2291 operands
[3] = GEN_INT (INTVAL (operands
[1]) + 1);
2292 OUT_AS2 (mov
, w
, %D3
);
2293 OUT_AS2 (sub
, w
, %D0
);
2294 OUT_AS2 (mov
, w
, %C3
);
2295 OUT_AS2 (subc
, w
, %C0
);
2296 OUT_AS2 (mov
, w
, %B3
);
2297 OUT_AS2 (subc
, w
, %B0
);
2298 OUT_AS2 (mov
, w
, %A3
);
2299 OUT_AS2 (subc
, w
, %A0
);
2307 OUT_AS2 (mov
, w
, %D0
);
2308 OUT_AS2 (sub
, w
, %D1
);
2309 OUT_AS2 (mov
, w
, %C0
);
2310 OUT_AS2 (subc
, w
, %C1
);
2311 OUT_AS2 (mov
, w
, %B0
);
2312 OUT_AS2 (subc
, w
, %B1
);
2313 OUT_AS2 (mov
, w
, %A0
);
2314 OUT_AS2 (subc
, w
, %A1
);
2324 if (INTVAL (operands
[0]) == 0)
2326 OUT_AS2 (mov
, w
, %A1
);
2327 OUT_AS2 (or, w
, %B1
);
2328 OUT_AS2 (or, w
, %C1
);
2329 OUT_AS2 (or, w
, %D1
);
2336 operands
[3] = GEN_INT (INTVAL (operands
[0]) - 1);
2337 OUT_AS2 (mov
, w
, %D3
);
2338 OUT_AS2 (sub
, w
, %D1
);
2339 OUT_AS2 (mov
, w
, %C3
);
2340 OUT_AS2 (subc
, w
, %C1
);
2341 OUT_AS2 (mov
, w
, %B3
);
2342 OUT_AS2 (subc
, w
, %B1
);
2343 OUT_AS2 (mov
, w
, %A3
);
2344 OUT_AS2 (subc
, w
, %A1
);
2352 OUT_AS2 (mov
, w
, %D1
);
2353 OUT_AS2 (sub
, w
, %D0
);
2354 OUT_AS2 (mov
, w
, %C1
);
2355 OUT_AS2 (subc
, w
, %C0
);
2356 OUT_AS2 (mov
, w
, %B1
);
2357 OUT_AS2 (subc
, w
, %B0
);
2358 OUT_AS2 (mov
, w
, %A1
);
2359 OUT_AS2 (subc
, w
, %A0
);
2369 if (INTVAL (operands
[0]) == 0)
2371 OUT_AS2 (mov
, w
, %A1
);
2372 OUT_AS2 (or, w
, %B1
);
2373 OUT_AS2 (or, w
, %C1
);
2374 OUT_AS2 (or, w
, %D1
);
2381 operands
[3] = GEN_INT (INTVAL (operands
[0]) - 1);
2382 OUT_AS2 (mov
, w
, %D3
);
2383 OUT_AS2 (sub
, w
, %D1
);
2384 OUT_AS2 (mov
, w
, %C3
);
2385 OUT_AS2 (subc
, w
, %C1
);
2386 OUT_AS2 (mov
, w
, %B3
);
2387 OUT_AS2 (subc
, w
, %B1
);
2388 OUT_AS2 (mov
, w
, %A3
);
2389 OUT_AS2 (subc
, w
, %A1
);
2397 OUT_AS2 (mov
, w
, %D1
);
2398 OUT_AS2 (sub
, w
, %D0
);
2399 OUT_AS2 (mov
, w
, %C1
);
2400 OUT_AS2 (subc
, w
, %C0
);
2401 OUT_AS2 (mov
, w
, %B1
);
2402 OUT_AS2 (subc
, w
, %B0
);
2403 OUT_AS2 (mov
, w
, %A1
);
2404 OUT_AS2 (subc
, w
, %A0
);
2414 if ((unsigned HOST_WIDE_INT
)(INTVAL (operands
[1]) & 0xffffffff)
2417 /* <= 0xffffffff always succeeds. */
2423 operands
[3] = GEN_INT (INTVAL (operands
[1]) + 1);
2424 OUT_AS2 (mov
, w
, %D3
);
2425 OUT_AS2 (sub
, w
, %D0
);
2426 OUT_AS2 (mov
, w
, %C3
);
2427 OUT_AS2 (subc
, w
, %C0
);
2428 OUT_AS2 (mov
, w
, %B3
);
2429 OUT_AS2 (subc
, w
, %B0
);
2430 OUT_AS2 (mov
, w
, %A3
);
2431 OUT_AS2 (subc
, w
, %A0
);
2439 OUT_AS2 (mov
, w
, %D0
);
2440 OUT_AS2 (sub
, w
, %D1
);
2441 OUT_AS2 (mov
, w
, %C0
);
2442 OUT_AS2 (subc
, w
, %C1
);
2443 OUT_AS2 (mov
, w
, %B0
);
2444 OUT_AS2 (subc
, w
, %B1
);
2445 OUT_AS2 (mov
, w
, %A0
);
2446 OUT_AS2 (subc
, w
, %A1
);
2459 if (GET_CODE (operands
[1]) == CONST_INT
)
2461 const_low
= INTVAL (operands
[1]);
2462 const_high
= (const_low
>= 0) - 1;
2464 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
2466 const_low
= CONST_DOUBLE_LOW (operands
[1]);
2467 const_high
= CONST_DOUBLE_HIGH (operands
[1]);
2473 unsigned char s
= 0, t
= 1, u
= 2, v
= 3;
2474 unsigned char w
= 4, x
= 5, y
= 6, z
= 7;
2477 if (GET_CODE (operands
[0]) == MEM
2478 && true_regnum (XEXP (operands
[0], 0)) == REG_DP
)
2480 OUT_AS1 (push
, %Z1
%<);
2481 OUT_AS1 (push
, %Y1
%<);
2482 OUT_AS1 (push
, %X1
%<);
2483 OUT_AS1 (push
, %W1
%<);
2484 OUT_AS1 (push
, %V1
%<);
2485 OUT_AS1 (push
, %U1
%<);
2486 OUT_AS1 (push
, %T1
%<);
2487 OUT_AS1 (push
, %S1
%>%>%>%>%>%>%>);
2488 OUT_AS1 (page
, __cmpdi2_dp
);
2489 OUT_AS1 (call
, __cmpdi2_dp
);
2490 OUT_AS2 (csne
, w
, #1);
2496 OUT_AS1 (push
, %Z1
%<);
2497 OUT_AS1 (push
, %Y1
%<);
2498 OUT_AS1 (push
, %X1
%<);
2499 OUT_AS1 (push
, %W1
%<);
2500 OUT_AS1 (push
, %V1
%<);
2501 OUT_AS1 (push
, %U1
%<);
2502 OUT_AS1 (push
, %T1
%<);
2503 OUT_AS1 (push
, %S1
%<);
2504 OUT_AS1 (push
, %Z0
%<);
2505 OUT_AS1 (push
, %Y0
%<);
2506 OUT_AS1 (push
, %X0
%<);
2507 OUT_AS1 (push
, %W0
%<);
2508 OUT_AS1 (push
, %V0
%<);
2509 OUT_AS1 (push
, %U0
%<);
2510 OUT_AS1 (push
, %T0
%<);
2511 OUT_AS1 (push
, %S0
%>%>%>%>%>%>%>%>%>%>%>%>%>%>%>);
2512 OUT_AS1 (page
, __cmpdi2
);
2513 OUT_AS1 (call
, __cmpdi2
);
2514 OUT_AS2 (csne
, w
, #1);
2523 s
= (const_high
>> 24) & 0xff;
2524 t
= (const_high
>> 16) & 0xff;
2525 u
= (const_high
>> 8) & 0xff;
2526 v
= const_high
& 0xff;
2527 w
= (const_low
>> 24) & 0xff;
2528 x
= (const_low
>> 16) & 0xff;
2529 y
= (const_low
>> 8) & 0xff;
2530 z
= const_low
& 0xff;
2533 OUT_AS2 (mov
, w
, %S1
);
2534 if (imm_cmp
&& (s
== t
))
2536 OUT_AS2 (csne
, w
, %S0
);
2537 OUT_AS2 (cse
, w
, %T0
);
2541 OUT_AS2 (cse
, w
, %S0
);
2544 OUT_AS2 (mov
, w
, %T1
);
2545 OUT_AS2 (cse
, w
, %T0
);
2550 OUT_AS2 (mov
, w
, %U1
);
2551 if (imm_cmp
&& (u
== v
))
2553 OUT_AS2 (csne
, w
, %U0
);
2554 OUT_AS2 (cse
, w
, %V0
);
2558 OUT_AS2 (cse
, w
, %U0
);
2561 OUT_AS2 (mov
, w
, %V1
);
2562 OUT_AS2 (cse
, w
, %V0
);
2567 OUT_AS2 (mov
, w
, %W1
);
2568 if (imm_cmp
&& (w
== x
))
2570 OUT_AS2 (csne
, w
, %W0
);
2571 OUT_AS2 (cse
, w
, %X0
);
2575 OUT_AS2 (cse
, w
, %W0
);
2578 OUT_AS2 (mov
, w
, %X1
);
2579 OUT_AS2 (cse
, w
, %X0
);
2584 if (! imm_cmp
|| (x
!= y
))
2585 OUT_AS2 (mov
, w
, %Y1
);
2586 OUT_AS2 (cse
, w
, %Y0
);
2589 if (! imm_cmp
|| (z
!= y
))
2590 OUT_AS2 (mov
, w
, %Z1
);
2591 OUT_AS2 (csne
, w
, %Z0
);
2601 unsigned char s
= 0, t
= 1, u
= 2, v
= 3;
2602 unsigned char w
= 4, x
= 5, y
= 6, z
= 7;
2606 if (GET_CODE (operands
[0]) == MEM
2607 && true_regnum (XEXP (operands
[0], 0)) == REG_DP
)
2609 OUT_AS1 (push
, %Z1
%<);
2610 OUT_AS1 (push
, %Y1
%<);
2611 OUT_AS1 (push
, %X1
%<);
2612 OUT_AS1 (push
, %W1
%<);
2613 OUT_AS1 (push
, %V1
%<);
2614 OUT_AS1 (push
, %U1
%<);
2615 OUT_AS1 (push
, %T1
%<);
2616 OUT_AS1 (push
, %S1
%>%>%>%>%>%>%>);
2617 OUT_AS1 (page
, __cmpdi2_dp
);
2618 OUT_AS1 (call
, __cmpdi2_dp
);
2619 OUT_AS2 (cse
, w
, #1);
2625 OUT_AS1 (push
, %Z1
%<);
2626 OUT_AS1 (push
, %Y1
%<);
2627 OUT_AS1 (push
, %X1
%<);
2628 OUT_AS1 (push
, %W1
%<);
2629 OUT_AS1 (push
, %V1
%<);
2630 OUT_AS1 (push
, %U1
%<);
2631 OUT_AS1 (push
, %T1
%<);
2632 OUT_AS1 (push
, %S1
%<);
2633 OUT_AS1 (push
, %Z0
%<);
2634 OUT_AS1 (push
, %Y0
%<);
2635 OUT_AS1 (push
, %X0
%<);
2636 OUT_AS1 (push
, %W0
%<);
2637 OUT_AS1 (push
, %V0
%<);
2638 OUT_AS1 (push
, %U0
%<);
2639 OUT_AS1 (push
, %T0
%<);
2640 OUT_AS1 (push
, %S0
%>%>%>%>%>%>%>%>%>%>%>%>%>%>%>);
2641 OUT_AS1 (page
, __cmpdi2
);
2642 OUT_AS1 (call
, __cmpdi2
);
2643 OUT_AS2 (cse
, w
, #1);
2652 s
= (const_high
>> 24) & 0xff;
2653 t
= (const_high
>> 16) & 0xff;
2654 u
= (const_high
>> 8) & 0xff;
2655 v
= const_high
& 0xff;
2656 w
= (const_low
>> 24) & 0xff;
2657 x
= (const_low
>> 16) & 0xff;
2658 y
= (const_low
>> 8) & 0xff;
2659 z
= const_low
& 0xff;
2662 OUT_AS2 (mov
, w
, %S1
);
2663 if (imm_cmp
&& (s
== t
))
2665 OUT_AS2 (csne
, w
, %S0
);
2666 OUT_AS2 (cse
, w
, %T0
);
2670 OUT_AS2 (cse
, w
, %S0
);
2673 OUT_AS2 (mov
, w
, %T1
);
2674 OUT_AS2 (cse
, w
, %T0
);
2679 OUT_AS2 (mov
, w
, %U1
);
2680 if (imm_cmp
&& (u
== v
))
2682 OUT_AS2 (csne
, w
, %U0
);
2683 OUT_AS2 (cse
, w
, %V0
);
2687 OUT_AS2 (cse
, w
, %U0
);
2690 OUT_AS2 (mov
, w
, %V1
);
2691 OUT_AS2 (cse
, w
, %V0
);
2696 OUT_AS2 (mov
, w
, %W1
);
2697 if (imm_cmp
&& (w
== x
))
2699 OUT_AS2 (csne
, w
, %W0
);
2700 OUT_AS2 (cse
, w
, %X0
);
2704 OUT_AS2 (cse
, w
, %W0
);
2707 OUT_AS2 (mov
, w
, %X1
);
2708 OUT_AS2 (cse
, w
, %X0
);
2713 if (! imm_cmp
|| (y
!= x
))
2714 OUT_AS2 (mov
, w
, %Y1
);
2715 if (imm_cmp
&& (z
== y
))
2717 OUT_AS2 (csne
, w
, %Y0
);
2718 OUT_AS2 (cse
, w
, %Z0
);
2722 OUT_AS2 (cse
, w
, %Y0
);
2725 OUT_AS2 (mov
, w
, %Z1
);
2726 OUT_AS2 (cse
, w
, %Z0
);
2737 /* > 0xffffffffffffffff never succeeds! */
2738 if (((const_high
& 0xffffffff) != 0xffffffff)
2739 || ((const_low
& 0xffffffff) != 0xffffffff))
2741 operands
[3] = GEN_INT (const_low
+ 1);
2742 operands
[4] = GEN_INT (const_high
2743 + (INTVAL (operands
[3]) ? 0 : 1));
2744 OUT_AS2 (mov
, w
, %D3
);
2745 OUT_AS2 (sub
, w
, %Z0
);
2746 OUT_AS2 (mov
, w
, %C3
);
2747 OUT_AS2 (subc
, w
, %Y0
);
2748 OUT_AS2 (mov
, w
, %B3
);
2749 OUT_AS2 (subc
, w
, %X0
);
2750 OUT_AS2 (mov
, w
, %A3
);
2751 OUT_AS2 (subc
, w
, %W0
);
2752 OUT_AS2 (mov
, w
, %D4
);
2753 OUT_AS2 (subc
, w
, %V0
);
2754 OUT_AS2 (mov
, w
, %C4
);
2755 OUT_AS2 (subc
, w
, %U0
);
2756 OUT_AS2 (mov
, w
, %B4
);
2757 OUT_AS2 (subc
, w
, %T0
);
2758 OUT_AS2 (mov
, w
, %A4
);
2759 OUT_AS2 (subc
, w
, %S0
);
2767 OUT_AS2 (mov
, w
, %Z0
);
2768 OUT_AS2 (sub
, w
, %Z1
);
2769 OUT_AS2 (mov
, w
, %Y0
);
2770 OUT_AS2 (subc
, w
, %Y1
);
2771 OUT_AS2 (mov
, w
, %X0
);
2772 OUT_AS2 (subc
, w
, %X1
);
2773 OUT_AS2 (mov
, w
, %W0
);
2774 OUT_AS2 (subc
, w
, %W1
);
2775 OUT_AS2 (mov
, w
, %V0
);
2776 OUT_AS2 (subc
, w
, %V1
);
2777 OUT_AS2 (mov
, w
, %U0
);
2778 OUT_AS2 (subc
, w
, %U1
);
2779 OUT_AS2 (mov
, w
, %T0
);
2780 OUT_AS2 (subc
, w
, %T1
);
2781 OUT_AS2 (mov
, w
, %S0
);
2782 OUT_AS2 (subc
, w
, %S1
);
2792 HOST_WIDE_INT const_low0
;
2793 HOST_WIDE_INT const_high0
;
2795 if (GET_CODE (operands
[0]) == CONST_INT
)
2797 const_low0
= INTVAL (operands
[0]);
2798 const_high0
= (const_low
>= 0) - 1;
2800 else if (GET_CODE (operands
[0]) == CONST_DOUBLE
)
2802 const_low0
= CONST_DOUBLE_LOW (operands
[0]);
2803 const_high0
= CONST_DOUBLE_HIGH (operands
[0]);
2806 if (const_high0
== 0 && const_low0
== 0)
2808 OUT_AS2 (mov
, w
, %S1
);
2809 OUT_AS2 (or, w
, %T1
);
2810 OUT_AS2 (or, w
, %U1
);
2811 OUT_AS2 (or, w
, %V1
);
2812 OUT_AS2 (or, w
, %W1
);
2813 OUT_AS2 (or, w
, %X1
);
2814 OUT_AS2 (or, w
, %Y1
);
2815 OUT_AS2 (or, w
, %Z1
);
2822 operands
[3] = GEN_INT (const_low0
- 1);
2823 operands
[4] = GEN_INT (const_high0
- (const_low0
? 1 : 0));
2824 OUT_AS2 (mov
, w
, %D3
);
2825 OUT_AS2 (sub
, w
, %Z1
);
2826 OUT_AS2 (mov
, w
, %C3
);
2827 OUT_AS2 (subc
, w
, %Y1
);
2828 OUT_AS2 (mov
, w
, %B3
);
2829 OUT_AS2 (subc
, w
, %X1
);
2830 OUT_AS2 (mov
, w
, %A3
);
2831 OUT_AS2 (subc
, w
, %W1
);
2832 OUT_AS2 (mov
, w
, %D4
);
2833 OUT_AS2 (subc
, w
, %V1
);
2834 OUT_AS2 (mov
, w
, %C4
);
2835 OUT_AS2 (subc
, w
, %U1
);
2836 OUT_AS2 (mov
, w
, %B4
);
2837 OUT_AS2 (subc
, w
, %T1
);
2838 OUT_AS2 (mov
, w
, %A4
);
2839 OUT_AS2 (subc
, w
, %S1
);
2847 OUT_AS2 (mov
, w
, %Z1
);
2848 OUT_AS2 (sub
, w
, %Z0
);
2849 OUT_AS2 (mov
, w
, %Y1
);
2850 OUT_AS2 (subc
, w
, %Y0
);
2851 OUT_AS2 (mov
, w
, %X1
);
2852 OUT_AS2 (subc
, w
, %X0
);
2853 OUT_AS2 (mov
, w
, %W1
);
2854 OUT_AS2 (subc
, w
, %W0
);
2855 OUT_AS2 (mov
, w
, %V1
);
2856 OUT_AS2 (subc
, w
, %V0
);
2857 OUT_AS2 (mov
, w
, %U1
);
2858 OUT_AS2 (subc
, w
, %U0
);
2859 OUT_AS2 (mov
, w
, %T1
);
2860 OUT_AS2 (subc
, w
, %T0
);
2861 OUT_AS2 (mov
, w
, %S1
);
2862 OUT_AS2 (subc
, w
, %S0
);
2872 HOST_WIDE_INT const_low0
;
2873 HOST_WIDE_INT const_high0
;
2875 if (GET_CODE (operands
[0]) == CONST_INT
)
2877 const_low0
= INTVAL (operands
[0]);
2878 const_high0
= (const_low
>= 0) - 1;
2880 else if (GET_CODE (operands
[0]) == CONST_DOUBLE
)
2882 const_low0
= CONST_DOUBLE_LOW (operands
[0]);
2883 const_high0
= CONST_DOUBLE_HIGH (operands
[0]);
2886 if (const_high0
== 0 && const_low0
== 0)
2888 OUT_AS2 (mov
, w
, %S1
);
2889 OUT_AS2 (or, w
, %T1
);
2890 OUT_AS2 (or, w
, %U1
);
2891 OUT_AS2 (or, w
, %V1
);
2892 OUT_AS2 (or, w
, %W1
);
2893 OUT_AS2 (or, w
, %X1
);
2894 OUT_AS2 (or, w
, %Y1
);
2895 OUT_AS2 (or, w
, %Z1
);
2902 operands
[3] = GEN_INT (const_low0
- 1);
2903 operands
[4] = GEN_INT (const_high0
- (const_low0
? 1 : 0));
2904 OUT_AS2 (mov
, w
, %D3
);
2905 OUT_AS2 (sub
, w
, %Z1
);
2906 OUT_AS2 (mov
, w
, %C3
);
2907 OUT_AS2 (subc
, w
, %Y1
);
2908 OUT_AS2 (mov
, w
, %B3
);
2909 OUT_AS2 (subc
, w
, %X1
);
2910 OUT_AS2 (mov
, w
, %A3
);
2911 OUT_AS2 (subc
, w
, %W1
);
2912 OUT_AS2 (mov
, w
, %D4
);
2913 OUT_AS2 (subc
, w
, %V1
);
2914 OUT_AS2 (mov
, w
, %C4
);
2915 OUT_AS2 (subc
, w
, %U1
);
2916 OUT_AS2 (mov
, w
, %B4
);
2917 OUT_AS2 (subc
, w
, %T1
);
2918 OUT_AS2 (mov
, w
, %A4
);
2919 OUT_AS2 (subc
, w
, %S1
);
2927 OUT_AS2 (mov
, w
, %Z1
);
2928 OUT_AS2 (sub
, w
, %Z0
);
2929 OUT_AS2 (mov
, w
, %Y1
);
2930 OUT_AS2 (subc
, w
, %Y0
);
2931 OUT_AS2 (mov
, w
, %X1
);
2932 OUT_AS2 (subc
, w
, %X0
);
2933 OUT_AS2 (mov
, w
, %W1
);
2934 OUT_AS2 (subc
, w
, %W0
);
2935 OUT_AS2 (mov
, w
, %V1
);
2936 OUT_AS2 (subc
, w
, %V0
);
2937 OUT_AS2 (mov
, w
, %U1
);
2938 OUT_AS2 (subc
, w
, %U0
);
2939 OUT_AS2 (mov
, w
, %T1
);
2940 OUT_AS2 (subc
, w
, %T0
);
2941 OUT_AS2 (mov
, w
, %S1
);
2942 OUT_AS2 (subc
, w
, %S0
);
2952 if (((const_high
& 0xffffffff) == 0xffffffff)
2953 && ((const_low
& 0xffffffff) == 0xffffffff))
2955 /* <= 0xffffffffffffffff always succeeds. */
2961 operands
[3] = GEN_INT (const_low
+ 1);
2962 operands
[4] = GEN_INT (const_high
2963 + (INTVAL (operands
[3]) ? 0 : 1));
2964 OUT_AS2 (mov
, w
, %D3
);
2965 OUT_AS2 (sub
, w
, %Z0
);
2966 OUT_AS2 (mov
, w
, %C3
);
2967 OUT_AS2 (subc
, w
, %Y0
);
2968 OUT_AS2 (mov
, w
, %B3
);
2969 OUT_AS2 (subc
, w
, %X0
);
2970 OUT_AS2 (mov
, w
, %A3
);
2971 OUT_AS2 (subc
, w
, %W0
);
2972 OUT_AS2 (mov
, w
, %D4
);
2973 OUT_AS2 (subc
, w
, %V0
);
2974 OUT_AS2 (mov
, w
, %C4
);
2975 OUT_AS2 (subc
, w
, %U0
);
2976 OUT_AS2 (mov
, w
, %B4
);
2977 OUT_AS2 (subc
, w
, %T0
);
2978 OUT_AS2 (mov
, w
, %A4
);
2979 OUT_AS2 (subc
, w
, %S0
);
2987 OUT_AS2 (mov
, w
, %Z0
);
2988 OUT_AS2 (sub
, w
, %Z1
);
2989 OUT_AS2 (mov
, w
, %Y0
);
2990 OUT_AS2 (subc
, w
, %Y1
);
2991 OUT_AS2 (mov
, w
, %X0
);
2992 OUT_AS2 (subc
, w
, %X1
);
2993 OUT_AS2 (mov
, w
, %W0
);
2994 OUT_AS2 (subc
, w
, %W1
);
2995 OUT_AS2 (mov
, w
, %V0
);
2996 OUT_AS2 (subc
, w
, %V1
);
2997 OUT_AS2 (mov
, w
, %U0
);
2998 OUT_AS2 (subc
, w
, %U1
);
2999 OUT_AS2 (mov
, w
, %T0
);
3000 OUT_AS2 (subc
, w
, %T1
);
3001 OUT_AS2 (mov
, w
, %S0
);
3002 OUT_AS2 (subc
, w
, %S1
);
3021 /* Output rtx VALUE as .byte to file FILE. */
3024 asm_output_char (FILE *file
, rtx value
)
3026 fprintf (file
, "\t.byte ");
3027 output_addr_const (file
, value
);
3028 fprintf (file
, "\n");
3032 /* Output VALUE as .byte to file FILE. */
3035 asm_output_byte (FILE *file
, int value
)
3037 fprintf (file
, "\t.byte 0x%x\n",value
& 0xff);
3041 /* Output rtx VALUE as .word to file FILE. */
3044 asm_output_short (FILE *file
, rtx value
)
3046 fprintf (file
, "\t.word ");
3047 output_addr_const (file
, (value
));
3048 fprintf (file
, "\n");
3052 /* Output real N to file FILE. */
3055 asm_output_float (FILE *file
, REAL_VALUE_TYPE n
)
3060 REAL_VALUE_TO_TARGET_SINGLE (n
, val
);
3061 real_to_decimal (dstr
, &n
, sizeof (dstr
), 0, 1);
3063 fprintf (file
, "\t.long 0x%08lx\t/* %s */\n", val
, dstr
);
3066 /* Sets section name for declaration DECL. */
3069 unique_section (tree decl
, int reloc ATTRIBUTE_UNUSED
)
3075 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
3076 /* Strip off any encoding in name. */
3077 name
= (* targetm
.strip_name_encoding
) (name
);
3079 if (TREE_CODE (decl
) == FUNCTION_DECL
)
3081 if (flag_function_sections
)
3089 if (flag_function_sections
)
3091 len
= strlen (name
) + strlen (prefix
);
3092 string
= alloca (len
+ 1);
3093 sprintf (string
, "%s%s", prefix
, name
);
3094 DECL_SECTION_NAME (decl
) = build_string (len
, string
);
3098 /* Return value is nonzero if pseudos that have been
3099 assigned to registers of class CLASS would likely be spilled
3100 because registers of CLASS are needed for spill registers. */
3103 class_likely_spilled_p (int c
)
3105 return (c
== IP_REGS
3116 /* Valid attributes:
3117 progmem - put data to program memory;
3118 naked - don't generate function prologue/epilogue and `ret' command.
3120 Only `progmem' attribute valid for type. */
3122 const struct attribute_spec ip2k_attribute_table
[] =
3124 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
3125 { "progmem", 0, 0, false, false, false, ip2k_handle_progmem_attribute
},
3126 { "naked", 0, 0, true, false, false, ip2k_handle_fndecl_attribute
},
3127 { NULL
, 0, 0, false, false, false, NULL
}
3130 /* Handle a "progmem" attribute; arguments as in
3131 struct attribute_spec.handler. */
3133 ip2k_handle_progmem_attribute (tree
*node
, tree name
,
3134 tree args ATTRIBUTE_UNUSED
,
3135 int flags ATTRIBUTE_UNUSED
,
3140 if (TREE_CODE (*node
) == TYPE_DECL
)
3142 /* This is really a decl attribute, not a type attribute,
3143 but try to handle it for GCC 3.0 backwards compatibility. */
3145 tree type
= TREE_TYPE (*node
);
3146 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
3147 tree newtype
= build_type_attribute_variant (type
, attr
);
3149 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
3150 TREE_TYPE (*node
) = newtype
;
3151 *no_add_attrs
= true;
3153 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
3155 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
3157 warning (0, "only initialized variables can be placed into "
3158 "program memory area");
3159 *no_add_attrs
= true;
3164 warning (0, "%qs attribute ignored", IDENTIFIER_POINTER (name
));
3165 *no_add_attrs
= true;
3172 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
3173 struct attribute_spec.handler. */
3175 ip2k_handle_fndecl_attribute (tree
*node
, tree name
,
3176 tree args ATTRIBUTE_UNUSED
,
3177 int flags ATTRIBUTE_UNUSED
,
3180 if (TREE_CODE (*node
) != FUNCTION_DECL
)
3182 warning (0, "%qs attribute only applies to functions",
3183 IDENTIFIER_POINTER (name
));
3184 *no_add_attrs
= true;
3190 /* Cost functions. */
3192 /* Compute a (partial) cost for rtx X. Return true if the complete
3193 cost has been computed, and false if subexpressions should be
3194 scanned. In either case, *TOTAL contains the cost result. */
3197 ip2k_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
3199 enum machine_mode mode
= GET_MODE (x
);
3215 *total
= ip2k_address_cost (XEXP (x
, 0));
3223 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3225 int val
= INTVAL (XEXP (x
, 1));
3228 /* Shift by const instructions are proportional to
3229 the shift count modulus 8. Note that we increase the mode
3230 size multiplier by 1 to account for clearing the carry flag. */
3231 cost
= COSTS_N_INSNS (abs (val
) % 8);
3232 cost
+= rtx_cost (XEXP (x
, 0), code
);
3233 cost
*= (GET_MODE_SIZE (mode
) + 1);
3235 /* Sign-preserving shifts require 2 extra instructions. */
3237 cost
+= COSTS_N_INSNS (2);
3242 *total
= rtx_cost (XEXP (x
, 0), code
);
3243 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
) * 8);
3251 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) * 3);
3257 *total
= COSTS_N_INSNS (20);
3258 else if (mode
== HImode
)
3259 *total
= COSTS_N_INSNS (60);
3260 else if (mode
== SImode
)
3261 *total
= COSTS_N_INSNS (180);
3263 *total
= COSTS_N_INSNS (540);
3267 /* These costs are OK, but should really handle subtle cases
3268 where we're using sign or zero extended args as these are
3269 *much* cheaper than those given below! */
3271 *total
= COSTS_N_INSNS (4);
3272 else if (mode
== HImode
)
3273 *total
= COSTS_N_INSNS (12);
3274 else if (mode
== SImode
)
3275 *total
= COSTS_N_INSNS (36);
3277 *total
= COSTS_N_INSNS (108);
3282 extra_cost
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
3288 *total
= extra_cost
+ COSTS_N_INSNS (GET_MODE_SIZE (mode
) * 2);
3293 if (outer_code
== SET
)
3295 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) * 3 / 2);
3300 *total
= -(COSTS_N_INSNS (GET_MODE_SIZE (mode
)) / 2);
3305 *total
= rtx_cost (XEXP (x
, 0), code
) + COSTS_N_INSNS (2);
3322 *total
= COSTS_N_INSNS (4);
3327 /* Calculate the cost of a memory address. */
3330 ip2k_address_cost (rtx x
)
3332 switch (legitimate_address_p (VOIDmode
, x
, 0))
3334 case 'S': /* Very low cost - (IP), (SP+N) or (DP+N) */
3337 case 'R': /* Indirected through IP. */
3340 case 'L': /* Label references. */
3343 case 'C': /* Constants and symbol references. */
3347 return 1000; /* Must reload. */
3351 /* As part of the machine-dependent reorg we look for opcode sequences where
3352 we do some operation and then move the results back to one of the original
3353 source operands. With working on the source operand directly is probably
3354 much cheaper and the move from this to the original source operand will be
3355 no more expensive than the original move. */
3357 #ifdef IP2K_MD_REORG_PASS
3359 mdr_resequence_xy_yx (first_insn
)
3364 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
3368 if (GET_CODE (insn
) != INSN
)
3371 set
= (GET_CODE (PATTERN (insn
)) == SET
) ? PATTERN (insn
) : NULL_RTX
;
3372 if (set
== NULL_RTX
)
3375 /* Look for operations that tend to be very cheap to run when the source
3376 * and dest args are the same because the IP2022 has opcodes that can
3377 operate on the source directly. If we have to spill through the W
3378 register then we've possibly not got a good case for doing this. */
3379 if ((GET_CODE (XEXP (set
, 0)) == REG
3380 || GET_CODE (XEXP (set
, 0)) == MEM
)
3381 && (GET_CODE (XEXP (set
, 1)) == ASHIFT
3382 || GET_CODE (XEXP (set
, 1)) == ASHIFTRT
3383 || GET_CODE (XEXP (set
, 1)) == LSHIFTRT
3384 || GET_CODE (XEXP (set
, 1)) == XOR
3385 || GET_CODE (XEXP (set
, 1)) == IOR
3386 || GET_CODE (XEXP (set
, 1)) == AND
3387 || GET_CODE (XEXP (set
, 1)) == PLUS
3388 || GET_CODE (XEXP (set
, 1)) == MINUS
3389 || GET_CODE (XEXP (set
, 1)) == MULT
))
3394 next_insn
= next_nonnote_insn (insn
);
3398 if (GET_CODE (next_insn
) != INSN
)
3401 set2
= ((GET_CODE (PATTERN (next_insn
)) == SET
)
3402 ? PATTERN (next_insn
) : NULL_RTX
);
3403 if (set2
== NULL_RTX
)
3406 if ((GET_CODE (XEXP (XEXP (set
, 1), 0)) == REG
3407 || GET_CODE (XEXP (XEXP (set
, 1), 0)) == MEM
)
3408 && rtx_equal_p (XEXP (set2
, 0), XEXP (XEXP (set
, 1), 0))
3409 && rtx_equal_p (XEXP (set2
, 1), XEXP (set
, 0)))
3414 b_insn
= gen_rtx_SET (VOIDmode
,
3415 XEXP (XEXP (set
, 1), 0),
3416 gen_rtx_fmt_ee (GET_CODE (XEXP (set
, 1)),
3417 GET_MODE (XEXP (set
, 0)),
3418 XEXP (XEXP (set
, 1), 0),
3419 XEXP (XEXP (set
, 1), 1)));
3421 emit_insn_before (b_insn
, insn
);
3422 b_insn
= gen_rtx_SET (GET_MODE (XEXP (set
, 0)), XEXP (set
, 0),
3423 XEXP (XEXP (set
, 1), 0));
3424 next2_insn
= emit_insn_before (b_insn
, insn
);
3426 delete_insn (next_insn
);
3431 /* Having tried with one operand of the expression, now, if
3432 appropriate, try to do the same thing with the second operand.
3433 Of course there are fewer operations that can match here
3434 because they must be commutative. */
3435 if (GET_RTX_CLASS (GET_CODE (XEXP (set
, 1))) == RTX_COMM_ARITH
3436 && (GET_CODE (XEXP (XEXP (set
, 1), 1)) == REG
3437 || GET_CODE (XEXP (XEXP (set
, 1), 1)) == MEM
)
3438 && rtx_equal_p (XEXP (set2
, 0), XEXP (XEXP (set
, 1), 1))
3439 && rtx_equal_p (XEXP (set2
, 1), XEXP (set
, 0)))
3445 /* Try to ensure that we put things in a canonical form. */
3446 swap_args
= (GET_CODE (XEXP (XEXP (set
, 1), 0)) == REG
3447 || GET_CODE (XEXP (XEXP (set
, 1), 0)) == MEM
);
3448 rtx_ee
= gen_rtx_fmt_ee (GET_CODE (XEXP (set
, 1)),
3449 GET_MODE (XEXP (set
, 0)),
3450 XEXP (XEXP (set
, 1), swap_args
? 1 : 0),
3451 XEXP (XEXP (set
, 1),
3452 swap_args
? 0 : 1));
3454 emit_insn_before (gen_rtx_SET (VOIDmode
,
3455 XEXP (XEXP (set
, 1), 1),
3458 next2_insn
= emit_insn_before (gen_rtx_SET
3459 (GET_MODE (XEXP (set
, 0)),
3461 XEXP (XEXP (set
, 1), 1)),
3464 delete_insn (next_insn
);
3471 /* Replace and recurse until we've tried QImode pieces! */
3474 mdr_pres_replace_and_recurse (orig
, with
, insn
)
3479 enum machine_mode new_mode
;
3481 validate_replace_rtx (orig
, with
, insn
);
3483 switch (GET_MODE (orig
))
3503 mdr_pres_replace_and_recurse (ip2k_get_low_half (orig
, new_mode
),
3504 ip2k_get_low_half (with
, new_mode
),
3506 mdr_pres_replace_and_recurse (ip2k_get_high_half (orig
, new_mode
),
3507 ip2k_get_high_half (with
, new_mode
),
3511 /* Assist the following function, mdr_propagate_reg_equivs(). */
3514 mdr_propagate_reg_equivs_sequence (first_insn
, orig
, equiv
)
3520 rtx try_equiv
= equiv
;
3522 /* First scan the RTL looking for anything else that might clobber what
3523 we're doing. If we find anything then we can't do the replacement. */
3524 for (try_insn
= next_nonnote_insn (first_insn
);
3525 try_insn
; try_insn
= next_nonnote_insn (try_insn
))
3529 if (GET_CODE (try_insn
) != JUMP_INSN
&& GET_CODE (try_insn
) != INSN
)
3532 pattern
= PATTERN (try_insn
);
3533 if (GET_CODE (pattern
) == PARALLEL
)
3537 for (j
= 0; j
< XVECLEN (pattern
, 0); j
++)
3539 rtx px
= XVECEXP (pattern
, 0, j
);
3541 if (GET_CODE (px
) == SET
)
3542 if (! ip2k_composite_xexp_not_uses_reg_p (XEXP (px
, 0),
3544 GET_MODE_SIZE (GET_MODE (orig
))))
3548 else if (GET_CODE (pattern
) == SET
)
3550 if (! ip2k_composite_xexp_not_uses_reg_p (XEXP (pattern
, 0),
3552 GET_MODE_SIZE (GET_MODE (orig
))))
3557 /* Once we've decided that we're safe to do the replacement then make the
3559 for (try_insn
= next_nonnote_insn (first_insn
); try_insn
;
3560 try_insn
= next_nonnote_insn (try_insn
))
3563 rtx new_equiv
= NULL_RTX
;
3565 if (GET_CODE (try_insn
) != JUMP_INSN
&& GET_CODE (try_insn
) != INSN
)
3571 set
= ((GET_CODE (PATTERN (try_insn
)) == SET
)
3572 ? PATTERN (try_insn
) : NULL_RTX
);
3573 if (set
== NULL_RTX
)
3576 /* We look for a special case of "push" operations screwing our
3577 register equivalence when it's based on a stack slot. We can
3578 track this one and replace the old equivalence expression with
3580 if (GET_CODE (XEXP (set
, 0)) == MEM
3581 && GET_CODE (XEXP (XEXP (set
, 0), 0)) == POST_DEC
3582 && REG_P (XEXP (XEXP (XEXP (set
, 0), 0), 0))
3583 && REGNO (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG_SP
)
3585 /* XXX - need to ensure that we can track this without going
3587 HOST_WIDE_INT disp
= (INTVAL (XEXP (XEXP (try_equiv
, 0), 1))
3588 + GET_MODE_SIZE (GET_MODE (XEXP (set
, 0))));
3589 new_equiv
= gen_rtx_MEM (GET_MODE (try_equiv
),
3590 gen_rtx_PLUS (Pmode
,
3591 gen_rtx_REG (HImode
, REG_SP
),
3595 /* The replacement process is somewhat complicated by the fact that we
3596 might be dealing with what were originally subregs and thus we have
3597 to replace parts of our original expression! */
3598 mdr_pres_replace_and_recurse (orig
, try_equiv
, try_insn
);
3600 if (new_equiv
!= NULL_RTX
)
3601 try_equiv
= new_equiv
;
3605 /* Try propagating register equivalences forwards. It may be that we can
3606 replace a register use with an equivalent expression that already
3607 holds the same value and thus allow one or more register loads to
3611 mdr_propagate_reg_equivs (first_insn
)
3617 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
3619 if (GET_CODE (insn
) != INSN
)
3622 set
= (GET_CODE (PATTERN (insn
)) == SET
) ? PATTERN (insn
) : NULL_RTX
;
3623 if (set
== NULL_RTX
)
3626 /* Have we found a stack slot equivalence for a register? */
3627 if (REG_P (XEXP (set
, 0))
3628 && REGNO (XEXP (set
, 0)) >= 0x88
3629 && GET_CODE (XEXP (set
, 1)) == MEM
3630 && GET_CODE (XEXP (XEXP (set
, 1), 0)) == PLUS
3631 && REG_P (XEXP (XEXP (XEXP (set
, 1), 0), 0))
3632 && REGNO (XEXP (XEXP (XEXP (set
, 1), 0), 0)) == REG_SP
3633 && find_reg_note (insn
, REG_EQUIV
, NULL_RTX
))
3635 mdr_propagate_reg_equivs_sequence (insn
, XEXP (set
, 0),
3641 /* Structure used to track jump targets. */
3643 struct dpre_jump_targets
3645 int target
; /* Is this a jump target? */
3646 int reach_count
; /* Number of ways we can reach this insn. */
3647 int touch_count
; /* Number of times we've touched this
3648 insns during scanning. */
3649 rtx dp_equiv
; /* DP-equivalence at this point. */
3652 struct dpre_jump_targets
*ip2k_dpre_jump_targets
;
3654 /* DP equivalence tracking used within DP reload elimination. */
3657 track_dp_reload (insn
, dp_current
, dp_current_ok
, modifying
)
3665 if (GET_CODE (insn
) != INSN
)
3667 *dp_current
= NULL_RTX
;
3671 set
= (GET_CODE (PATTERN (insn
)) == SET
) ? PATTERN (insn
) : NULL_RTX
;
3672 if (set
== NULL_RTX
)
3674 *dp_current
= NULL_RTX
;
3678 /* If we're pushing a PLUS or MINUS then it's a win if we can replace
3679 an expression for which DP is equivalent with DP. This happens
3680 surprisingly often when we pass a pointer to a structure embedded
3681 within another structure. */
3682 if (*dp_current
!= NULL_RTX
3683 && GET_CODE (XEXP (set
, 0)) == MEM
3684 && GET_CODE (XEXP (XEXP (set
, 0), 0)) == POST_DEC
3685 && GET_CODE (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG
3686 && REGNO (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG_SP
3687 && (GET_CODE (XEXP (set
, 1)) == PLUS
3688 || GET_CODE (XEXP (set
, 1)) == MINUS
)
3689 && GET_CODE (*dp_current
) != SYMBOL_REF
3690 && GET_CODE (*dp_current
) != LABEL_REF
3691 && GET_CODE (*dp_current
) != CONST
)
3694 validate_replace_rtx (*dp_current
, gen_rtx_REG (HImode
, REG_DP
), insn
);
3697 /* Look for DP being modified. If it is, see if it's being changed
3698 to what it already is! */
3699 if (GET_CODE (XEXP (set
, 0)) == REG
3700 && REGNO (XEXP (set
, 0)) == REG_DP
3701 && GET_MODE (XEXP (set
, 0)) == HImode
)
3703 /* If this is an equivalence we can delete the new set operation. */
3704 if (*dp_current
!= NULL_RTX
3705 && rtx_equal_p (XEXP (set
, 1), *dp_current
))
3712 /* If we've not found an equivalence we can look for a special
3713 case where an operand of the expression that sets DP is
3714 already equivalent to DP and in that circumstance we simplify
3715 by replacing that expression with DP. */
3716 if (*dp_current
!= NULL_RTX
3717 && GET_CODE (*dp_current
) != SYMBOL_REF
3718 && GET_CODE (*dp_current
) != LABEL_REF
3719 && GET_CODE (*dp_current
) != CONST
3721 validate_replace_rtx (*dp_current
, XEXP (set
, 0), insn
);
3723 /* Assuming that we're not loading DP from something that uses DP
3724 itself then we mark the new equivalence for DP. If we did match
3725 DP then we can't re-use this one. */
3726 if (ip2k_xexp_not_uses_reg_p (XEXP (set
, 1), REG_DP
, 2))
3728 *dp_current
= XEXP (set
, 1);
3733 *dp_current
= NULL_RTX
;
3738 else if (GET_CODE (XEXP (set
, 0)) == REG
3739 && (REGNO (XEXP (set
, 0)) == REG_DPL
3740 || REGNO (XEXP (set
, 0)) == REG_DPH
))
3742 /* If we clobber part of DP then we've clobbered any equivalences! */
3743 *dp_current
= NULL_RTX
;
3746 else if (! ip2k_xexp_not_uses_reg_p (XEXP (set
, 0), REG_SP
, 2)
3747 && *dp_current
!= NULL_RTX
3748 && !ip2k_xexp_not_uses_reg_p (*dp_current
, REG_SP
, 2))
3750 /* We look for a special case of "push" operations screwing up the
3751 setting of DP when it's based on the stack. We can track this one
3752 and replace the old expression for DP with a new one. */
3753 if (GET_CODE (XEXP (set
, 0)) == MEM
3754 && GET_CODE (XEXP (XEXP (set
, 0), 0)) == POST_DEC
3755 && GET_CODE (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG
3756 && REGNO (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG_SP
3757 && GET_CODE (*dp_current
) == MEM
3758 && GET_CODE (XEXP (*dp_current
, 0)) == PLUS
)
3760 /* XXX - need to ensure that we can track this without going
3762 HOST_WIDE_INT disp
= (INTVAL (XEXP (XEXP (*dp_current
, 0), 1))
3763 + GET_MODE_SIZE (GET_MODE (XEXP (set
, 0))));
3764 *dp_current
= gen_rtx_MEM (HImode
,
3765 gen_rtx_PLUS (Pmode
,
3766 gen_rtx_REG (HImode
, REG_SP
),
3771 /* Now we look for writes to the stack. We can determine if these will
3772 affect the equivalence we're tracking for DP and if not then we can
3773 keep tracking it. */
3774 if (GET_CODE (XEXP (set
, 0)) == MEM
3775 && GET_CODE (*dp_current
) == MEM
)
3777 /* Look at the SP offsets and look for any overlaps. */
3778 int dp_cur_sp_offs
= INTVAL (XEXP (XEXP (*dp_current
, 0), 1));
3779 int set_sp_offs
= INTVAL (XEXP (XEXP (XEXP (set
, 0), 0), 1));
3781 if (abs (dp_cur_sp_offs
- set_sp_offs
) < 2)
3783 *dp_current
= NULL_RTX
;
3788 else if (GET_CODE (XEXP (set
, 0)) == REG
3789 && *dp_current
!= NULL_RTX
3790 && !ip2k_xexp_not_uses_reg_p (*dp_current
, REGNO (XEXP (set
, 0)),
3791 GET_MODE_SIZE (GET_MODE (XEXP (set
,
3794 /* If we've just clobbered all or part of a register reference that we
3795 were sharing for DP then we can't share it any more! */
3796 *dp_current
= NULL_RTX
;
3799 return dp_current_ok
;
3802 /* As part of the machine-dependent reorg we scan loads and reloads of
3803 DP to see where any are redundant. This does happens because we
3804 are able to subsequently transform things in interesting ways. Sometimes
3805 gcc also does unnecessary reloads too so we try to eliminate these too. */
3808 mdr_try_dp_reload_elim (first_insn
)
3812 struct dpre_jump_targets
*djt
;
3814 int incomplete_scan
;
3815 int last_incomplete_scan
;
3817 ip2k_dpre_jump_targets
3818 = (struct dpre_jump_targets
*) xcalloc (get_max_uid (),
3819 sizeof (struct dpre_jump_targets
));
3821 /* First we scan to build up a list of all CODE_LABEL insns and we work out
3822 how many different ways we can reach them. */
3823 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
3825 if (GET_CODE (insn
) == CODE_LABEL
)
3827 djt
= &ip2k_dpre_jump_targets
[INSN_UID (insn
)];
3829 djt
->reach_count
= LABEL_NUSES (insn
);
3830 djt
->touch_count
= 0;
3831 djt
->dp_equiv
= NULL_RTX
;
3832 if (! prev_nonnote_insn (insn
)
3833 || (prev_nonnote_insn (insn
)
3834 && GET_CODE (prev_nonnote_insn (insn
)) != BARRIER
))
3839 /* Next we scan all of the ways of reaching the code labels to see
3840 what the DP register is equivalent to as we reach them. If we find
3841 that they're the same then we keep noting the matched value. We
3842 iterate around this until we reach a convergence on DP equivalences
3843 at all code labels - we have to be very careful not to be too
3845 incomplete_scan
= -1;
3848 int dp_current_ok
= 0;
3849 last_incomplete_scan
= incomplete_scan
;
3850 dp_current
= NULL_RTX
;
3852 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
3854 /* If we have a code label then we need to see if we already know
3855 what the equivalence is at this point. If we do then we use it
3856 immediately, but if we don't then we have a special case to track
3857 when we hit a fallthrough-edge (label with no barrier preceding
3858 it). Any other accesses to the label must be from jump insns
3859 and so they're handled elsewhere. */
3860 if (GET_CODE (insn
) == CODE_LABEL
)
3862 djt
= &ip2k_dpre_jump_targets
[INSN_UID (insn
)];
3864 /* If we're fully characterized the use the equivalence. */
3865 if (djt
->touch_count
== djt
->reach_count
)
3867 dp_current
= djt
->dp_equiv
;
3872 /* If we have a known equivalence for DP as we reach the
3873 fallthrough-edge then track this into the code label. */
3875 && (! prev_nonnote_insn (insn
)
3876 || (prev_nonnote_insn (insn
)
3877 && GET_CODE (prev_nonnote_insn (insn
)) != BARRIER
)))
3879 if (djt
->touch_count
== 0)
3880 djt
->dp_equiv
= dp_current
;
3882 if (djt
->touch_count
< djt
->reach_count
)
3885 if (! rtx_equal_p (djt
->dp_equiv
, dp_current
))
3887 /* When we definitely know that we can't form an
3888 equivalence for DP here we must clobber anything
3889 that we'd started to track too. */
3890 djt
->dp_equiv
= NULL_RTX
;
3891 dp_current
= NULL_RTX
;
3897 /* If we've not completely characterized this code label then
3898 be cautious and assume that we don't know what DP is
3900 if (djt
->touch_count
< djt
->reach_count
)
3902 dp_current
= NULL_RTX
;
3909 /* If we've hit a jump insn then we look for either an address
3910 vector (jump table) or for jump label references. */
3911 if (GET_CODE (insn
) == JUMP_INSN
)
3913 /* Don't attempt to track here if we don't have a known
3914 equivalence for DP at this point. */
3917 rtx pat
= PATTERN (insn
);
3918 if (GET_CODE (pat
) == ADDR_VEC
)
3921 int len
= XVECLEN (pat
, 0);
3923 for (i
= 0; i
< len
; i
++)
3925 rtx vec_insn
= XEXP (XVECEXP (pat
, 0, i
), 0);
3926 djt
= &ip2k_dpre_jump_targets
[INSN_UID (vec_insn
)];
3928 if (djt
->touch_count
== 0)
3929 djt
->dp_equiv
= dp_current
;
3931 if (djt
->touch_count
< djt
->reach_count
)
3934 if (! rtx_equal_p (djt
->dp_equiv
, dp_current
))
3935 djt
->dp_equiv
= NULL_RTX
;
3939 else if (JUMP_LABEL (insn
))
3941 rtx j_insn
= JUMP_LABEL (insn
);
3942 djt
= &ip2k_dpre_jump_targets
[INSN_UID (j_insn
)];
3944 if (djt
->touch_count
== 0)
3945 djt
->dp_equiv
= dp_current
;
3947 if (djt
->touch_count
< djt
->reach_count
)
3950 if (! rtx_equal_p (djt
->dp_equiv
, dp_current
))
3951 djt
->dp_equiv
= NULL_RTX
;
3959 /* Anything other than a code labal or jump arrives here.
3960 We try and track DP, but sometimes we might not be able to. */
3961 dp_current_ok
= track_dp_reload (insn
, &dp_current
,
3965 /* When we're looking to see if we've finished we count the number of
3966 paths through the code labels where we weren't able to definitively
3968 This number is used to see if we're converging on a solution.
3969 If this hits zero then we've fully converged, but if this stays the
3970 same as last time then we probably can't make any further
3972 incomplete_scan
= 0;
3973 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
3975 if (GET_CODE (insn
) == CODE_LABEL
)
3977 djt
= &ip2k_dpre_jump_targets
[INSN_UID (insn
)];
3978 if (djt
->touch_count
!= djt
->reach_count
)
3980 incomplete_scan
+= (djt
->reach_count
- djt
->touch_count
);
3981 djt
->dp_equiv
= NULL_RTX
;
3982 djt
->touch_count
= 0;
3987 while (incomplete_scan
&& incomplete_scan
!= last_incomplete_scan
);
3989 /* Finally we scan the whole function and run DP elimination. When we hit
3990 a CODE_LABEL we pick up any stored equivalence since we now know that
3991 every path to this point entered with DP holding the same thing! If
3992 we subsequently have a reload that matches then we can eliminate it. */
3993 dp_current
= NULL_RTX
;
3994 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
3996 if (GET_CODE (insn
) == JUMP_INSN
)
3999 if (GET_CODE (insn
) == CODE_LABEL
)
4001 djt
= &ip2k_dpre_jump_targets
[INSN_UID (insn
)];
4002 dp_current
= djt
->dp_equiv
;
4006 track_dp_reload (insn
, &dp_current
, 1, 1);
4009 free (ip2k_dpre_jump_targets
);
4012 /* As part of the machine-dependent reorg we look for reloads of DP
4013 that we can move to earlier points within the file.
4014 Moving these out of the way allows more peepholes to match. */
4017 mdr_try_move_dp_reload (first_insn
)
4024 /* Don't try to match the first instruction because we can't move it
4026 orig_first
= first_insn
;
4027 first_insn
= next_nonnote_insn (first_insn
);
4029 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
4031 if (GET_CODE (insn
) != INSN
)
4034 set
= (GET_CODE (PATTERN (insn
)) == SET
) ? PATTERN (insn
) : NULL_RTX
;
4035 if (set
== NULL_RTX
)
4038 /* Look for DP being loaded. When we find this we start a rewind
4039 scan looking for possible positions to move this to. */
4040 if (GET_CODE (XEXP (set
, 0)) == REG
4041 && REGNO (XEXP (set
, 0)) == REG_DP
4042 && GET_MODE (XEXP (set
, 0)) == HImode
)
4045 rtx try_insn
= insn
;
4054 /* For now we do the *really* simple version of things and only
4055 attempt to move the load of DP if it's very safe to do so. */
4056 rewind
= prev_nonnote_insn (try_insn
);
4057 if (rewind
!= orig_first
&& rewind
!= NULL_RTX
4058 && GET_CODE (rewind
) == INSN
)
4060 check
= ((GET_CODE (PATTERN (rewind
)) == SET
)
4061 ? PATTERN (rewind
) : NULL_RTX
);
4062 if (check
!= NULL_RTX
4063 && ip2k_composite_xexp_not_uses_cc0_p (XEXP (check
, 0))
4064 && ip2k_composite_xexp_not_uses_cc0_p (XEXP (check
, 1)))
4066 if (GET_CODE (XEXP (check
, 0)) == REG
4067 && REGNO (XEXP (check
, 0)) != REG_DPH
4068 && REGNO (XEXP (check
, 0)) != REG_DPL
4069 && (ip2k_composite_xexp_not_uses_reg_p
4070 (XEXP (check
, 1), REG_DP
, 2))
4071 && (ip2k_composite_xexp_not_uses_reg_p
4073 REGNO (XEXP (check
, 0)),
4074 GET_MODE_SIZE (GET_MODE (XEXP (check
, 0))))))
4076 emit_insn_before (set
, rewind
);
4077 if (try_insn
== insn
)
4078 insn
= prev_nonnote_insn (insn
);
4079 delete_insn (try_insn
);
4080 try_insn
= prev_nonnote_insn (rewind
);
4083 else if (GET_CODE (XEXP (set
, 1)) == REG
4084 && ip2k_composite_xexp_not_uses_reg_p (XEXP (check
, 1), REG_DP
, 2)
4085 && ip2k_composite_xexp_not_uses_reg_p (XEXP (check
, 0), REG_DP
, 2)
4086 && ip2k_composite_xexp_not_uses_reg_p (XEXP (check
, 0), REGNO (XEXP (set
, 1)),
4087 GET_MODE_SIZE (GET_MODE (XEXP (set
, 1)))))
4089 emit_insn_before (set
, rewind
);
4090 if (try_insn
== insn
)
4091 insn
= prev_nonnote_insn (insn
);
4092 delete_insn (try_insn
);
4093 try_insn
= prev_nonnote_insn (rewind
);
4099 while (try_again
&& try_insn
);
4103 #endif /* IP2K_MD_REORG_PASS */
4105 /* Look to see if the expression, x, can have any stack references offset by
4106 a fixed constant, offset. If it definitely can then returns nonzero. */
4109 ip2k_check_can_adjust_stack_ref (rtx x
, int offset
)
4111 if (ARITHMETIC_P (x
))
4112 return (ip2k_check_can_adjust_stack_ref (XEXP (x
, 0), offset
)
4113 && ip2k_check_can_adjust_stack_ref (XEXP (x
, 1), offset
));
4116 return ip2k_check_can_adjust_stack_ref (XEXP (x
, 0), offset
);
4118 switch (GET_CODE (x
))
4121 return (REGNO (x
) != REG_SPH
&& REGNO (x
) != REG_SPL
);
4124 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
4127 if (GET_CODE (XEXP (XEXP (x
, 0), 0)) != REG
)
4130 if (REGNO (XEXP (XEXP (x
, 0), 0)) != REG_SP
)
4133 /* We can't allow this if the adjustment will create an
4135 return (INTVAL (XEXP (XEXP (x
, 0), 1))
4136 + offset
<= (128 - 2 * GET_MODE_SIZE (GET_MODE (x
))));
4150 /* Adjusts all of the stack references in the expression pointed to by x by
4154 ip2k_adjust_stack_ref (rtx
*x
, int offset
)
4156 if (ARITHMETIC_P (*x
))
4158 ip2k_adjust_stack_ref (&XEXP (*x
, 0), offset
);
4159 ip2k_adjust_stack_ref (&XEXP (*x
, 1), offset
);
4165 ip2k_adjust_stack_ref (&XEXP (*x
, 0), offset
);
4169 switch (GET_CODE (*x
))
4172 if (GET_CODE (XEXP (*x
, 0)) != PLUS
)
4175 if (GET_CODE (XEXP (XEXP (*x
, 0), 0)) != REG
)
4178 if (REGNO (XEXP (XEXP (*x
, 0), 0)) != REG_SP
)
4182 XEXP (XEXP (*x
, 0), 1) = GEN_INT (INTVAL (XEXP (XEXP (*x
, 0), 1))
4191 #ifdef IP2K_MD_REORG_PASS
4192 /* As part of the machine-dependent reorg we look to move push instructions
4193 to earlier points within the file. Moving these out of the way allows more
4194 peepholes to match. */
4197 mdr_try_move_pushes (first_insn
)
4204 /* Don't try to match the first instruction because we can't move
4206 orig_first
= first_insn
;
4207 first_insn
= next_nonnote_insn (first_insn
);
4209 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
4211 if (GET_CODE (insn
) != INSN
)
4214 set
= (GET_CODE (PATTERN (insn
)) == SET
) ? PATTERN (insn
) : NULL_RTX
;
4215 if (set
== NULL_RTX
)
4218 /* Have we found a push instruction? */
4219 if (GET_CODE (XEXP (set
, 0)) == MEM
4220 && GET_CODE (XEXP (XEXP (set
, 0), 0)) == POST_DEC
4221 && GET_CODE (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG
4222 && REGNO (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG_SP
4223 && GET_CODE (XEXP (set
, 1)) == REG
)
4225 rtx try_insn
= insn
;
4226 unsigned int regno
= REGNO (XEXP (set
, 1));
4227 int reg_range
= GET_MODE_SIZE (GET_MODE (XEXP (set
, 1)));
4234 rewind
= prev_nonnote_insn (try_insn
);
4235 if (rewind
== orig_first
|| rewind
== NULL_RTX
4236 || GET_CODE (rewind
) != INSN
)
4239 check
= (GET_CODE (PATTERN (rewind
)) == SET
) ? PATTERN (rewind
) : NULL_RTX
;
4240 if (check
== NULL_RTX
)
4243 if (! ip2k_check_can_adjust_stack_ref (XEXP (check
, 0),
4245 || ! ip2k_check_can_adjust_stack_ref (XEXP (check
, 1),
4249 /* If we've hit another push instruction we can't go any
4251 if (GET_CODE (XEXP (check
, 0)) == MEM
4252 && GET_CODE (XEXP (XEXP (check
, 0), 0)) == POST_DEC
4253 && GET_CODE (XEXP (XEXP (XEXP (check
, 0), 0), 0)) == REG
4254 && REGNO (XEXP (XEXP (XEXP (check
, 0), 0), 0)) == REG_SP
)
4257 /* If this is a register move then check that it doesn't clobber
4258 SP or any part of the instruction we're trying to move. */
4259 if (GET_CODE (XEXP (check
, 0)) == REG
)
4261 unsigned int check_reg
= REGNO (XEXP (check
, 0));
4262 int check_reg_range
= GET_MODE_SIZE (GET_MODE (XEXP (check
,
4265 /* If we have a special case where what we want to push is
4266 being loaded by this "clobbering" insn then we can just
4267 push what is being used to load us and then do the load.
4268 This may seem a little odd, but we may subsequently be
4269 able to merge the load with another instruction as it
4270 may only be used once now! Note though that we
4271 specifically don't try this if the expression being
4272 loaded is an HImode MEM using IP. */
4273 if (check_reg
== regno
4274 && check_reg_range
== reg_range
4275 && ((GET_CODE (XEXP (check
, 1)) == REG
4276 || (GET_CODE (XEXP (check
, 1)) == MEM
4277 && (GET_MODE (XEXP (check
, 1)) != HImode
4278 || ip2k_xexp_not_uses_reg_for_mem (XEXP (check
, 1), REG_IP
))))))
4280 switch (check_reg_range
)
4283 emit_insn_before (gen_movqi (XEXP (set
, 0),
4286 delete_insn (try_insn
);
4290 emit_insn_before (gen_movhi (XEXP (set
, 0),
4293 delete_insn (try_insn
);
4297 emit_insn_before (gen_movsi (XEXP (set
, 0),
4300 delete_insn (try_insn
);
4304 emit_insn_before (gen_movdi (XEXP (set
, 0),
4307 delete_insn (try_insn
);
4311 ip2k_adjust_stack_ref (&XEXP (check
, 0), reg_range
);
4312 ip2k_adjust_stack_ref (&XEXP (check
, 1), reg_range
);
4313 try_insn
= prev_nonnote_insn (rewind
);
4314 /* XXX - should be a continue? */
4318 if ((check_reg
== REG_SPL
)
4319 || (check_reg
== REG_SPH
)
4320 || (((regno
<= check_reg
)
4321 && (regno
+ reg_range
- 1) >= check_reg
)
4322 || ((regno
<= (check_reg
+ check_reg_range
- 1))
4323 && ((regno
+ reg_range
- 1)
4324 >= (check_reg
+ check_reg_range
- 1)))))
4328 emit_insn_before (set
, rewind
);
4329 delete_insn (try_insn
);
4330 ip2k_adjust_stack_ref (&XEXP (check
, 0), reg_range
);
4331 ip2k_adjust_stack_ref (&XEXP (check
, 1), reg_range
);
4332 try_insn
= prev_nonnote_insn (rewind
);
4338 /* Assist the following function, mdr_try_propagate_clr(). */
4341 mdr_try_propagate_clr_sequence (first_insn
, regno
)
4347 for (try_insn
= next_nonnote_insn (first_insn
); try_insn
;
4348 try_insn
= next_nonnote_insn (try_insn
))
4350 rtx new_insn
= NULL_RTX
;
4353 if (GET_CODE (try_insn
) == JUMP_INSN
)
4356 if (GET_CODE (try_insn
) != INSN
)
4359 set2
= ((GET_CODE (PATTERN (try_insn
)) == SET
)
4360 ? PATTERN (try_insn
) : NULL_RTX
);
4361 if (set2
== NULL_RTX
)
4364 if (GET_CODE (XEXP (set2
, 1)) == AND
4365 && ((GET_CODE (XEXP (XEXP (set2
, 1), 0)) == REG
4366 && REGNO (XEXP (XEXP (set2
, 1), 0)) == regno
)
4367 || (GET_CODE (XEXP (XEXP (set2
, 1), 1)) == REG
4368 && REGNO (XEXP (XEXP (set2
, 1), 1)) == regno
)))
4370 rtx remove_insn
= try_insn
;
4371 try_insn
= emit_insn_before (gen_rtx_SET (QImode
, XEXP (set2
, 0),
4372 const0_rtx
), try_insn
);
4373 delete_insn (remove_insn
);
4375 else if (GET_CODE (XEXP (set2
, 1)) == IOR
4376 && GET_CODE (XEXP (XEXP (set2
, 1), 0)) == REG
4377 && REGNO (XEXP (XEXP (set2
, 1), 0)) == regno
)
4379 rtx remove_insn
= try_insn
;
4380 try_insn
= emit_insn_before (gen_rtx_SET (QImode
, XEXP (set2
, 0),
4381 XEXP (XEXP (set2
, 1), 1)),
4383 delete_insn (remove_insn
);
4385 else if (GET_CODE (XEXP (set2
, 1)) == IOR
4386 && GET_CODE (XEXP (XEXP (set2
, 1), 1)) == REG
4387 && REGNO (XEXP (XEXP (set2
, 1), 1)) == regno
)
4389 rtx remove_insn
= try_insn
;
4390 try_insn
= emit_insn_before (gen_rtx_SET (QImode
, XEXP (set2
, 0),
4391 XEXP (XEXP (set2
, 1), 0)),
4393 delete_insn (remove_insn
);
4395 else if (GET_CODE (XEXP (set2
, 1)) == XOR
4396 && GET_CODE (XEXP (XEXP (set2
, 1), 0)) == REG
4397 && REGNO (XEXP (XEXP (set2
, 1), 0)) == regno
)
4399 rtx remove_insn
= try_insn
;
4400 try_insn
= emit_insn_before (gen_rtx_SET (QImode
, XEXP (set2
, 0),
4401 XEXP (XEXP (set2
, 1), 1)),
4403 delete_insn (remove_insn
);
4405 else if (GET_CODE (XEXP (set2
, 1)) == XOR
4406 && GET_CODE (XEXP (XEXP (set2
, 1), 1)) == REG
4407 && REGNO (XEXP (XEXP (set2
, 1), 1)) == regno
)
4409 rtx remove_insn
= try_insn
;
4410 try_insn
= emit_insn_before (gen_rtx_SET (QImode
, XEXP (set2
, 0),
4411 XEXP (XEXP (set2
, 1), 0)),
4413 delete_insn (remove_insn
);
4416 if (GET_CODE (XEXP (set2
, 0)) == REG
)
4418 int reg2_range
= GET_MODE_SIZE (GET_MODE (XEXP (set2
, 0)));
4419 unsigned int regno2
= REGNO (XEXP (set2
, 0));
4423 && GET_CODE (XEXP (set2
, 1)) == CONST_INT
)
4425 int iv
= INTVAL (XEXP (set2
, 1));
4428 if (iv
== 1 || iv
== -1)
4430 new_insn
= gen_rtx_SET (QImode
, XEXP (set2
, 0),
4431 gen_rtx_PLUS (QImode
, XEXP (set2
, 0),
4433 new_insn
= emit_insn_before (new_insn
, try_insn
);
4434 delete_insn (try_insn
);
4435 try_insn
= new_insn
;
4440 if ((regno
>= regno2
) && (regno
<= regno2
+ reg2_range
- 1))
4443 if (GET_CODE (XEXP (set2
, 1)) == REG
4444 && REGNO (XEXP (set2
, 1)) == regno
)
4446 new_insn
= emit_insn_before (gen_rtx_SET (QImode
,
4450 delete_insn (try_insn
);
4451 try_insn
= new_insn
;
4455 if (GET_CODE (XEXP (set2
, 0)) == CC0
)
4457 if (GET_CODE (XEXP (set2
, 1)) == REG
4458 && GET_MODE_SIZE (GET_MODE (XEXP (set2
, 1))) == 2
4459 && REGNO (XEXP (set2
, 1)) == regno
)
4461 new_insn
= gen_rtx_SET (VOIDmode
, gen_rtx_CC0 (VOIDmode
),
4462 gen_rtx_REG(QImode
, regno
+ 1));
4463 new_insn
= emit_insn_before (new_insn
, try_insn
);
4465 else if (GET_CODE (XEXP (set2
, 1)) == COMPARE
4466 && GET_CODE (XEXP (XEXP (set2
, 1), 0)) == REG
4467 && GET_MODE_SIZE (GET_MODE (XEXP (XEXP (set2
, 1), 0))) == 2
4468 && REGNO (XEXP (XEXP (set2
, 1), 0)) == regno
4469 && GET_CODE (XEXP (XEXP (set2
, 1), 1)) == CONST_INT
4470 && INTVAL (XEXP (XEXP (set2
, 1), 1)) >= 0
4471 && INTVAL (XEXP (XEXP (set2
, 1), 1)) < 256)
4473 new_insn
= gen_rtx_SET (VOIDmode
, cc0_rtx
,
4474 gen_rtx_COMPARE(QImode
,
4475 gen_rtx_REG (QImode
,
4477 XEXP (XEXP (set2
, 1),
4479 new_insn
= emit_insn_before (new_insn
, try_insn
);
4482 /* If we have inserted a replacement for a CC0 setter operation
4483 then we need to delete the old one. */
4484 if (new_insn
!= NULL_RTX
)
4486 delete_insn (try_insn
);
4487 try_insn
= new_insn
;
4489 /* Now as we know that we have just done an unsigned compare
4490 (remember we were zero-extended by the clr!) we also know
4491 that we don't need a signed jump insn. If we find that
4492 our next isns is a signed jump then make it unsigned! */
4493 if (GET_CODE (next_nonnote_insn (try_insn
)) == JUMP_INSN
)
4497 try_insn
= next_nonnote_insn (try_insn
);
4498 set3
= ((GET_CODE (PATTERN (try_insn
)) == SET
)
4499 ? PATTERN (try_insn
) : NULL_RTX
);
4500 if (set3
== NULL_RTX
)
4503 /* If we discover that our jump target is only accessible
4504 from here then we can continue our "clr" propagation to
4506 if (LABEL_NUSES (JUMP_LABEL (try_insn
)) == 1)
4507 mdr_try_propagate_clr_sequence (JUMP_LABEL (try_insn
),
4510 if (GET_CODE (XEXP (set3
, 0)) == PC
4511 && GET_CODE (XEXP (set3
, 1)) == IF_THEN_ELSE
4512 && (GET_CODE (XEXP (XEXP (set3
, 1), 0)) == GT
4513 || GET_CODE (XEXP (XEXP (set3
, 1), 0)) == GE
4514 || GET_CODE (XEXP (XEXP (set3
, 1), 0)) == LT
4515 || GET_CODE (XEXP (XEXP (set3
, 1), 0)) == LE
)
4516 && GET_CODE (XEXP (XEXP (XEXP (set3
, 1), 0), 0)) == CC0
4517 && (GET_CODE (XEXP (XEXP (XEXP (set3
, 1), 0), 1))
4519 && GET_CODE (XEXP (XEXP (set3
, 1), 1)) == LABEL_REF
4520 && GET_CODE (XEXP (XEXP (set3
, 1), 2)) == PC
)
4526 /* Replace our old conditional jump with a new one that
4527 does the unsigned form of what was previously a
4528 signed comparison. */
4529 code
= GET_CODE (XEXP (XEXP (set3
, 1), 0));
4530 cmp
= gen_rtx_fmt_ee ((code
== GT
4534 : (code
== LT
? LTU
: LEU
))),
4536 XEXP (XEXP (XEXP (set3
, 1), 0), 0),
4537 XEXP (XEXP (XEXP (set3
, 1), 0),
4540 = gen_rtx_SET (GET_MODE (set3
),
4542 gen_rtx_IF_THEN_ELSE
4543 (GET_MODE (XEXP (set3
, 1)), cmp
,
4544 XEXP (XEXP (set3
, 1), 1),
4545 XEXP (XEXP (set3
, 1), 2)));
4546 new_insn
= emit_jump_insn_before (new_if
, try_insn
);
4547 LABEL_NUSES (JUMP_LABEL (try_insn
))++;
4548 delete_insn (try_insn
);
4549 try_insn
= new_insn
;
4554 else if (GET_CODE (XEXP (set2
, 1)) == PLUS
4555 && GET_CODE (XEXP (XEXP (set2
, 1), 0)) == REG
4556 && GET_MODE_SIZE (GET_MODE (XEXP (XEXP (set2
, 1), 0))) == 2
4557 && REGNO (XEXP (XEXP (set2
, 1), 0)) == regno
4558 && (GET_CODE (XEXP (XEXP (set2
, 1), 1)) == REG
4559 || GET_CODE (XEXP (XEXP (set2
, 1), 1)) == MEM
4560 || GET_CODE (XEXP (XEXP (set2
, 1), 1)) == CONST_INT
4561 || GET_CODE (XEXP (XEXP (set2
, 1), 1)) == CONST
4562 || GET_CODE (XEXP (XEXP (set2
, 1), 1)) == SYMBOL_REF
))
4564 rtx extend
= gen_rtx_ZERO_EXTEND (HImode
,
4565 gen_rtx_REG (QImode
, regno
+ 1));
4566 new_insn
= gen_rtx_SET (HImode
, XEXP (set2
, 0),
4567 gen_rtx_PLUS (HImode
, extend
,
4568 XEXP (XEXP (set2
, 1), 1)));
4569 new_insn
= emit_insn_before (new_insn
, try_insn
);
4570 delete_insn (try_insn
);
4571 try_insn
= new_insn
;
4573 else if (GET_CODE (XEXP (set2
, 1)) == PLUS
4574 && GET_CODE (XEXP (XEXP (set2
, 1), 1)) == REG
4575 && GET_MODE_SIZE (GET_MODE (XEXP (XEXP (set2
, 1), 1))) == 2
4576 && REGNO (XEXP (XEXP (set2
, 1), 1)) == regno
4577 && (GET_CODE (XEXP (XEXP (set2
, 1), 0)) == REG
4578 || GET_CODE (XEXP (XEXP (set2
, 1), 0)) == MEM
4579 || GET_CODE (XEXP (XEXP (set2
, 1), 0)) == CONST_INT
4580 || GET_CODE (XEXP (XEXP (set2
, 1), 0)) == CONST
4581 || GET_CODE (XEXP (XEXP (set2
, 1), 0)) == SYMBOL_REF
))
4583 rtx t_src
= gen_rtx_PLUS (HImode
,
4584 gen_rtx_ZERO_EXTEND (HImode
,
4585 gen_rtx_REG (QImode
,
4588 XEXP (XEXP (set2
, 1), 0));
4589 new_insn
= emit_insn_before (gen_rtx_SET (HImode
, XEXP (set2
, 0),
4592 delete_insn (try_insn
);
4593 try_insn
= new_insn
;
4598 /* One of the things that can quite often happen with an 8-bit CPU is that
4599 we end up clearing the MSByte of a 16-bit value. Unfortunately, all too
4600 often gcc doesn't have any way to realize that only half of the value is
4601 useful and ends up doing more work than it should. We scan for such
4602 occurrences here, track them and reduce compare operations to a smaller
4603 size where possible.
4605 Note that this is somewhat different to move propagation as we may
4606 actually change some instruction patterns when we're doing this whereas
4607 move propagation is just about doing a search and replace. */
4610 mdr_try_propagate_clr (first_insn
)
4616 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
4618 if (GET_CODE (insn
) != INSN
)
4621 set
= (GET_CODE (PATTERN (insn
)) == SET
) ? PATTERN (insn
) : NULL_RTX
;
4622 if (set
== NULL_RTX
)
4625 /* Have we found a "clr" instruction? */
4626 if (GET_CODE (XEXP (set
, 0)) == REG
4627 && GET_CODE (XEXP (set
, 1)) == CONST_INT
4628 && GET_MODE_SIZE (GET_MODE (XEXP (set
, 0))) == 1
4629 && INTVAL (XEXP (set
, 1)) == 0)
4631 mdr_try_propagate_clr_sequence (insn
, REGNO (XEXP (set
, 0)));
4635 #endif /* IP2K_MD_REORG_PASS */
4637 /* Look to see if the expression, x, does not make any memory references
4638 via the specified register. This is very conservative and only returns
4639 nonzero if we definitely don't have such a memory ref. */
4642 ip2k_xexp_not_uses_reg_for_mem (rtx x
, unsigned int regno
)
4645 regno
&= 0xfffffffe;
4647 switch (GET_CODE (x
))
4653 if ((GET_CODE (XEXP (x
, 0)) == PLUS
4654 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
4655 && REGNO (XEXP (XEXP (x
, 0), 0)) == regno
)
4656 || (GET_CODE (XEXP (x
, 0)) == REG
4657 && REGNO (XEXP (x
, 0)) == regno
))
4672 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_BITFIELD_OPS
)
4673 return (ip2k_xexp_not_uses_reg_for_mem (XEXP (x
, 0), regno
)
4674 && ip2k_xexp_not_uses_reg_for_mem (XEXP (x
, 1), regno
)
4675 && ip2k_xexp_not_uses_reg_for_mem (XEXP (x
, 2), regno
));
4678 return (ip2k_xexp_not_uses_reg_for_mem (XEXP (x
, 0), regno
)
4679 && ip2k_xexp_not_uses_reg_for_mem (XEXP (x
, 1), regno
));
4682 || GET_RTX_CLASS (GET_CODE (x
)) == '3')
4683 return ip2k_xexp_not_uses_reg_for_mem (XEXP (x
, 0), regno
);
4689 #ifdef IP2K_MD_REORG_PASS
4690 /* Assist the following function, mdr_try_propagate_move(). */
4693 mdr_try_propagate_move_sequence (first_insn
, orig
, equiv
)
4700 for (try_insn
= next_nonnote_insn (first_insn
); try_insn
;
4701 try_insn
= next_nonnote_insn (try_insn
))
4705 rtx new_equiv
= NULL_RTX
;
4707 if (GET_CODE (try_insn
) != JUMP_INSN
&& GET_CODE (try_insn
) != INSN
)
4710 set
= single_set (try_insn
);
4711 if (set
== NULL_RTX
)
4714 range
= MAX (GET_MODE_SIZE (GET_MODE (equiv
)),
4715 GET_MODE_SIZE (GET_MODE (XEXP (set
, 0))));
4717 if (GET_CODE (equiv
) == REG
4718 && REGNO (equiv
) == REG_W
4719 && (recog_memoized (try_insn
) < 0
4720 || get_attr_clobberw (try_insn
) != CLOBBERW_NO
)
4721 && (! (GET_CODE (XEXP (set
, 0)) == REG
4722 && REGNO (XEXP (set
, 0)) == REG_W
4723 && rtx_equal_p (XEXP (set
, 1), orig
))))
4725 else if (GET_CODE (XEXP (set
, 0)) == REG
4726 && (REGNO (XEXP (set
, 0)) == REG_SP
4727 || ! ip2k_xexp_not_uses_reg_p (equiv
, REGNO (XEXP (set
, 0)),
4729 || ! ip2k_xexp_not_uses_reg_p (orig
, REGNO (XEXP (set
, 0)),
4731 && ! rtx_equal_p (equiv
, XEXP (set
, 0))
4732 && ! rtx_equal_p (orig
, XEXP (set
, 0)))
4734 else if (GET_CODE (orig
) == REG
4735 && (REGNO (orig
) == REG_IPL
4736 || REGNO (orig
) == REG_IPH
4737 || REGNO (orig
) == REG_DPL
4738 || REGNO (orig
) == REG_DPH
)
4739 && (! ip2k_xexp_not_uses_reg_for_mem (XEXP (set
, 0),
4741 || ! ip2k_xexp_not_uses_reg_for_mem (XEXP (set
, 1),
4744 else if (GET_CODE (XEXP (set
, 0)) == MEM
4745 && GET_CODE (equiv
) == MEM
)
4747 if (! ip2k_xexp_not_uses_reg_p (equiv
, REG_SP
, 2))
4749 if (! ip2k_xexp_not_uses_reg_p (XEXP (set
, 0), REG_SP
, 2))
4751 /* We look for a special case of "push" operations screwing
4752 our register equivalence when it's based on a stack slot.
4753 We can track this one and replace the old equivalence
4754 expression with a new one. */
4755 if (GET_CODE (XEXP (XEXP (set
, 0), 0)) == POST_DEC
4756 && GET_CODE (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG
4757 && REGNO (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG_SP
4758 && GET_CODE (XEXP (equiv
, 0)) == PLUS
4759 && REGNO (XEXP (XEXP (equiv
, 0), 0)) == REG_SP
)
4761 int md_size
= GET_MODE_SIZE (GET_MODE (XEXP (set
, 0)));
4762 int new_sp_offs
= INTVAL (XEXP (XEXP (equiv
, 0), 1))
4765 /* Don't allow an invalid stack pointer offset to be
4767 if (new_sp_offs
> (128 - 2 * md_size
))
4771 = gen_rtx_MEM (GET_MODE (equiv
),
4772 gen_rtx_PLUS (Pmode
,
4773 gen_rtx_REG (HImode
,
4775 GEN_INT (new_sp_offs
)));
4777 else if (! rtx_equal_p (equiv
, XEXP (set
, 0)))
4779 /* Look at the SP offsets and look for any overlaps. */
4780 int equiv_offs
= GET_CODE (XEXP (equiv
, 0)) == PLUS
4781 ? INTVAL (XEXP (XEXP (equiv
, 0), 1))
4784 = (GET_CODE (XEXP (XEXP (set
, 0), 0)) == PLUS
4785 ? INTVAL (XEXP (XEXP (XEXP (set
, 0), 0), 1))
4788 if (abs (equiv_offs
- set_offs
) < range
)
4794 if (! ip2k_xexp_not_uses_reg_p (equiv
, REG_IP
, 2))
4797 if (! ip2k_xexp_not_uses_reg_p (XEXP (set
, 0), REG_DP
, 2)
4798 && ! ip2k_xexp_not_uses_reg_p (equiv
, REG_DP
, 2)
4799 && ! rtx_equal_p (equiv
, XEXP (set
, 0)))
4801 /* Look at the DP offsets and look for any overlaps. */
4802 int equiv_offs
= GET_CODE (XEXP (equiv
, 0)) == PLUS
4803 ? INTVAL (XEXP (XEXP (equiv
, 0), 1))
4805 int set_offs
= GET_CODE (XEXP (XEXP (set
, 0), 0)) == PLUS
4806 ? INTVAL (XEXP (XEXP (XEXP (set
, 0), 0), 1))
4809 if (abs (equiv_offs
- set_offs
) < range
)
4814 validate_replace_rtx_subexp (orig
, equiv
, try_insn
, &XEXP (set
, 1));
4816 if (rtx_equal_p (equiv
, XEXP (set
, 0))
4817 || rtx_equal_p (orig
, XEXP (set
, 0)))
4820 if (new_equiv
!= NULL_RTX
)
4825 /* Try propagating move instructions forwards. It may be that we can
4826 replace a register use with an equivalent expression that already
4827 holds the same value and thus allow one or more register loads to
4831 mdr_try_propagate_move (first_insn
)
4837 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
4839 if (GET_CODE (insn
) != INSN
)
4842 set
= (GET_CODE (PATTERN (insn
)) == SET
) ? PATTERN (insn
) : NULL_RTX
;
4843 if (set
== NULL_RTX
)
4846 /* Have we found a simple move instruction? */
4847 if (GET_CODE (XEXP (set
, 0)) == REG
4848 && (REGNO (XEXP (set
, 0)) >= 0x80
4849 || REGNO (XEXP (set
, 0)) == REG_DPL
4850 || REGNO (XEXP (set
, 0)) == REG_DPH
4851 || REGNO (XEXP (set
, 0)) == REG_IPL
4852 || REGNO (XEXP (set
, 0)) == REG_IPH
)
4853 && ((GET_CODE (XEXP (set
, 1)) == REG
4854 && REGNO (XEXP (set
, 1)) != REG_SP
4855 && ip2k_xexp_not_uses_reg_p (XEXP (set
, 0),
4856 REGNO (XEXP (set
, 1)),
4857 GET_MODE_SIZE (GET_MODE (XEXP (set
,
4859 || (GET_CODE (XEXP (set
, 1)) == MEM
4860 && (ip2k_xexp_not_uses_reg_p (XEXP (set
, 1), REG_IP
, 2)
4861 || GET_MODE (XEXP (set
, 1)) == QImode
)
4862 && ((REGNO (XEXP (set
, 0)) != REG_DPH
4863 && REGNO (XEXP (set
, 0)) != REG_DPL
)
4864 || ip2k_xexp_not_uses_reg_p (XEXP (set
, 1), REG_DP
, 2)))
4865 || (GET_CODE (XEXP (set
, 1)) == CONST_INT
4866 && (GET_MODE (XEXP (set
, 0)) != QImode
4867 || INTVAL (XEXP (set
, 1)) != 0))
4868 || GET_CODE (XEXP (set
, 1)) == CONST_DOUBLE
4869 || GET_CODE (XEXP (set
, 1)) == CONST
4870 || GET_CODE (XEXP (set
, 1)) == SYMBOL_REF
))
4872 mdr_try_propagate_move_sequence (insn
, XEXP (set
, 0), XEXP (set
, 1));
4877 /* Try to remove redundant instructions. */
4880 mdr_try_remove_redundant_insns (first_insn
)
4885 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
4888 enum machine_mode mode
;
4890 HOST_WIDE_INT pattern
;
4893 if (GET_CODE (insn
) != INSN
)
4896 if (GET_CODE (PATTERN (insn
)) == CONST_INT
)
4898 /* We've found a dummy expression. */
4899 rtx remove_insn
= insn
;
4900 insn
= prev_nonnote_insn (insn
);
4901 delete_insn (remove_insn
);
4905 set
= (GET_CODE (PATTERN (insn
)) == SET
) ? PATTERN (insn
) : NULL_RTX
;
4906 if (set
== NULL_RTX
)
4909 mode
= GET_MODE (XEXP (set
, 0));
4910 md_size
= GET_MODE_SIZE (mode
);
4911 if ((md_size
< 1) || (md_size
> 4))
4915 for (i
= 0; i
< md_size
; i
++)
4921 if ((GET_CODE (XEXP (set
, 1)) == AND
4922 && GET_CODE (XEXP (XEXP (set
, 1), 1)) == CONST_INT
4923 && INTVAL (XEXP (XEXP (set
, 1), 1)) == pattern
)
4924 || ((GET_CODE (XEXP (set
, 1)) == IOR
4925 || GET_CODE (XEXP (set
, 1)) == XOR
)
4926 && GET_CODE (XEXP (XEXP (set
, 1), 1)) == CONST_INT
4927 && INTVAL (XEXP (XEXP (set
, 1), 1)) == 0x00))
4929 /* We've found an AND with all 1's, an XOR with all 0's or an
4931 rtx remove_insn
= insn
;
4933 /* Is it completely redundant or should it become a move insn? */
4934 if (! rtx_equal_p (XEXP (set
, 0), XEXP (XEXP (set
, 1), 0)))
4936 emit_insn_before (gen_rtx_SET (mode
,
4938 XEXP (XEXP (set
, 1), 0)),
4942 insn
= prev_nonnote_insn(insn
);
4943 delete_insn (remove_insn
);
4945 else if (GET_CODE (XEXP (set
, 1)) == AND
4946 && GET_CODE (XEXP (XEXP (set
, 1), 1)) == CONST_INT
4947 && INTVAL (XEXP (XEXP (set
, 1), 1)) == 0)
4949 /* We've found an AND with all 0's. */
4950 rtx remove_insn
= insn
;
4951 insn
= emit_insn_before (gen_rtx_SET (mode
,
4953 XEXP (XEXP (set
, 1), 1)),
4955 delete_insn (remove_insn
);
4960 /* Structure used to track jump targets. */
4962 struct we_jump_targets
4964 int target
; /* Is this a jump target? */
4965 int reach_count
; /* Number of ways we can reach this insn. */
4966 int touch_count
; /* Number of times we've touched this insn
4968 rtx w_equiv
; /* WREG-equivalence at this point. */
4971 struct we_jump_targets
*ip2k_we_jump_targets
;
4973 /* WREG equivalence tracking used within DP reload elimination. */
4976 track_w_reload (insn
, w_current
, w_current_ok
, modifying
)
4984 if (GET_CODE (insn
) != INSN
)
4986 *w_current
= NULL_RTX
;
4990 set
= (GET_CODE (PATTERN (insn
)) == SET
) ? PATTERN (insn
) : NULL_RTX
;
4991 if (set
== NULL_RTX
)
4993 *w_current
= NULL_RTX
;
4997 /* Look for W being modified. If it is, see if it's being changed
4998 to what it already is! */
4999 if (GET_CODE (XEXP (set
, 0)) == REG
5000 && REGNO (XEXP (set
, 0)) == REG_W
5001 && GET_MODE (XEXP (set
, 0)) == QImode
)
5003 /* If this is an equivalence we can delete the new set operation. */
5004 if (*w_current
!= NULL_RTX
5005 && rtx_equal_p (XEXP (set
, 1), *w_current
))
5012 *w_current
= XEXP (set
, 1);
5016 else if (recog_memoized (insn
) < 0
5017 || get_attr_clobberw (insn
) != CLOBBERW_NO
)
5019 /* If we clobber W then we've clobbered any equivalences ! */
5020 *w_current
= NULL_RTX
;
5023 else if (! ip2k_xexp_not_uses_reg_p (XEXP (set
, 0), REG_SP
, 2)
5024 && *w_current
!= NULL_RTX
5025 && !ip2k_xexp_not_uses_reg_p (*w_current
, REG_SP
, 2))
5027 /* We look for a special case of "push" operations screwing up the
5028 setting of DP when it's based on the stack. We can track this one
5029 and replace the old expression for DP with a new one. */
5030 if (GET_CODE (XEXP (set
, 0)) == MEM
5031 && GET_CODE (XEXP (XEXP (set
, 0), 0)) == POST_DEC
5032 && GET_CODE (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG
5033 && REGNO (XEXP (XEXP (XEXP (set
, 0), 0), 0)) == REG_SP
5034 && GET_CODE (*w_current
) == MEM
5035 && GET_CODE (XEXP (*w_current
, 0)) == PLUS
)
5037 /* XXX - need to ensure that we can track this without going
5039 rtx val
= GEN_INT (INTVAL (XEXP (XEXP (*w_current
, 0), 1))
5040 + GET_MODE_SIZE (GET_MODE (XEXP (set
, 0))));
5042 = gen_rtx_MEM (HImode
, gen_rtx_PLUS (Pmode
,
5043 gen_rtx_REG(HImode
, REG_SP
),
5048 else if (GET_CODE (XEXP (set
, 0)) == REG
5049 && *w_current
!= NULL_RTX
5050 && !ip2k_xexp_not_uses_reg_p (*w_current
, REGNO (XEXP (set
, 0)),
5051 GET_MODE_SIZE (GET_MODE (XEXP (set
5054 /* If we've just clobbered all or part of a register reference that we
5055 were sharing for W then we can't share it any more! */
5056 *w_current
= NULL_RTX
;
5059 return w_current_ok
;
5062 /* As part of the machine-dependent reorg we scan moves into w and track them
5063 to see where any are redundant. */
5066 mdr_try_wreg_elim (first_insn
)
5070 struct we_jump_targets
*wjt
;
5072 int incomplete_scan
;
5073 int last_incomplete_scan
;
5075 ip2k_we_jump_targets
5076 = (struct we_jump_targets
*) xcalloc (get_max_uid (),
5077 sizeof (struct we_jump_targets
));
5079 /* First we scan to build up a list of all CODE_LABEL insns and we work out
5080 how many different ways we can reach them. */
5081 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
5083 if (GET_CODE (insn
) == CODE_LABEL
)
5085 wjt
= &ip2k_we_jump_targets
[INSN_UID (insn
)];
5087 wjt
->reach_count
= LABEL_NUSES (insn
);
5088 wjt
->touch_count
= 0;
5089 wjt
->w_equiv
= NULL_RTX
;
5090 if (! prev_nonnote_insn (insn
)
5091 || (prev_nonnote_insn (insn
)
5092 && GET_CODE (prev_nonnote_insn (insn
)) != BARRIER
))
5097 /* Next we scan all of the ways of reaching the code labels to see
5098 what the WREG register is equivalent to as we reach them. If we find
5099 that they're the same then we keep noting the matched value. We
5100 iterate around this until we reach a convergence on WREG equivalences
5101 at all code labels - we have to be very careful not to be too
5103 incomplete_scan
= -1;
5106 int w_current_ok
= 0;
5107 last_incomplete_scan
= incomplete_scan
;
5108 w_current
= NULL_RTX
;
5110 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
5112 /* If we have a code label then we need to see if we already know
5113 what the equivalence is at this point. If we do then we use it
5114 immediately, but if we don't then we have a special case to track
5115 when we hit a fallthrough-edge (label with no barrier preceding
5116 it). Any other accesses to the label must be from jump insns
5117 and so they're handled elsewhere. */
5118 if (GET_CODE (insn
) == CODE_LABEL
)
5120 wjt
= &ip2k_we_jump_targets
[INSN_UID (insn
)];
5122 /* If we're fully characterized the use the equivalence. */
5123 if (wjt
->touch_count
== wjt
->reach_count
)
5125 w_current
= wjt
->w_equiv
;
5130 /* If we have a known equivalence for WREG as we reach the
5131 fallthrough-edge then track this into the code label. */
5133 && (! prev_nonnote_insn (insn
)
5134 || (prev_nonnote_insn (insn
)
5135 && GET_CODE (prev_nonnote_insn (insn
)) != BARRIER
)))
5137 if (wjt
->touch_count
== 0)
5138 wjt
->w_equiv
= w_current
;
5140 if (wjt
->touch_count
< wjt
->reach_count
)
5143 if (! rtx_equal_p (wjt
->w_equiv
, w_current
))
5145 /* When we definitely know that we can't form an
5146 equivalence for WREG here we must clobber anything
5147 that we'd started to track too. */
5148 wjt
->w_equiv
= NULL_RTX
;
5149 w_current
= NULL_RTX
;
5155 /* If we've not completely characterized this code label then
5156 be cautious and assume that we don't know what WREG is
5158 if (wjt
->touch_count
< wjt
->reach_count
)
5160 w_current
= NULL_RTX
;
5167 /* If we've hit a jump insn then we look for either an address
5168 vector (jump table) or for jump label references. */
5169 if (GET_CODE (insn
) == JUMP_INSN
)
5171 /* Don't attempt to track here if we don't have a known
5172 equivalence for WREG at this point. */
5175 if (JUMP_LABEL (insn
))
5178 = &ip2k_we_jump_targets
[INSN_UID (JUMP_LABEL (insn
))];
5180 if (wjt
->touch_count
== 0)
5181 wjt
->w_equiv
= w_current
;
5183 if (wjt
->touch_count
< wjt
->reach_count
)
5186 if (! rtx_equal_p (wjt
->w_equiv
, w_current
))
5187 wjt
->w_equiv
= NULL_RTX
;
5195 /* Anything other than a code labal or jump arrives here. We try and
5196 track WREG, but sometimes we might not be able to. */
5197 w_current_ok
= track_w_reload (insn
, &w_current
, w_current_ok
, 0);
5200 /* When we're looking to see if we've finished we count the number of
5201 paths through the code labels where we weren't able to definitively
5202 track WREG. This number is used to see if we're converging on a
5204 If this hits zero then we've fully converged, but if this stays the
5205 same as last time then we probably can't make any further
5207 incomplete_scan
= 0;
5208 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
5210 if (GET_CODE (insn
) == CODE_LABEL
)
5212 wjt
= &ip2k_we_jump_targets
[INSN_UID (insn
)];
5213 if (wjt
->touch_count
!= wjt
->reach_count
)
5215 incomplete_scan
+= (wjt
->reach_count
- wjt
->touch_count
);
5216 wjt
->w_equiv
= NULL_RTX
;
5217 wjt
->touch_count
= 0;
5222 while (incomplete_scan
&& incomplete_scan
!= last_incomplete_scan
);
5224 /* Finally we scan the whole function and run WREG elimination. When we hit
5225 a CODE_LABEL we pick up any stored equivalence since we now know that
5226 every path to this point entered with WREG holding the same thing! If
5227 we subsequently have a reload that matches then we can eliminate it. */
5228 w_current
= NULL_RTX
;
5229 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
5231 if (GET_CODE (insn
) == JUMP_INSN
)
5234 if (GET_CODE (insn
) == CODE_LABEL
)
5236 wjt
= &ip2k_we_jump_targets
[INSN_UID (insn
)];
5237 w_current
= wjt
->w_equiv
;
5241 track_w_reload (insn
, &w_current
, 1, 1);
5244 free (ip2k_we_jump_targets
);
5246 #endif /* IP2K_MD_REORG_PASS */
5248 /* We perform a lot of untangling of the RTL within the reorg pass since
5249 the IP2k requires some really bizarre (and really undesirable) things
5250 to happen in order to guarantee not aborting. This pass causes several
5251 earlier passes to be re-run as it progressively transforms things,
5252 making the subsequent runs continue to win. */
5257 #ifdef IP2K_MD_REORG_PASS
5258 rtx first_insn
, insn
, set
;
5265 ip2k_reorg_completed
= 1;
5266 ip2k_reorg_split_dimode
= 1;
5267 ip2k_reorg_split_simode
= 1;
5268 ip2k_reorg_split_himode
= 1;
5269 ip2k_reorg_split_qimode
= 1;
5270 ip2k_reorg_merge_qimode
= 1;
5273 #ifndef IP2K_MD_REORG_PASS
5274 ip2k_reorg_completed
= 1;
5275 ip2k_reorg_split_dimode
= 1;
5276 ip2k_reorg_split_simode
= 1;
5277 ip2k_reorg_split_himode
= 1;
5278 ip2k_reorg_split_qimode
= 1;
5279 ip2k_reorg_merge_qimode
= 1;
5281 /* All optimizations below must be debugged and enabled one by one.
5282 All of them commented now because of abort in GCC core. */
5284 ip2k_reorg_in_progress
= 1;
5286 first_insn
= get_insns ();
5288 /* Look for size effects of earlier optimizations - in particular look for
5289 situations where we're saying "use" a register on one hand but immediately
5290 tagging it as "REG_DEAD" at the same time! Seems like a bug in core-gcc
5291 somewhere really but this is what we have to live with! */
5292 for (insn
= first_insn
; insn
; insn
= NEXT_INSN (insn
))
5296 if (GET_CODE (insn
) == CODE_LABEL
5297 || GET_CODE (insn
) == NOTE
5298 || GET_CODE (insn
) == BARRIER
)
5304 body
= PATTERN (insn
);
5305 if (GET_CODE (body
) == USE
)
5306 if (GET_CODE (XEXP (body
, 0)) == REG
)
5310 reg
= REGNO (XEXP (body
, 0));
5311 if (find_regno_note (insn
, REG_DEAD
, reg
))
5318 /* There's a good chance that since we last did CSE that we've rearranged
5319 things in such a way that another go will win. Do so now! */
5320 reload_cse_regs (first_insn
);
5321 find_basic_blocks (first_insn
);
5322 life_analysis (0, PROP_REG_INFO
| PROP_DEATH_NOTES
);
5324 /* Look for where absurd things are happening with DP. */
5325 mdr_try_dp_reload_elim (first_insn
);
5327 ip2k_reorg_in_progress
= 0;
5328 ip2k_reorg_completed
= 1;
5330 split_all_insns (0);
5332 reload_cse_regs (first_insn
);
5333 find_basic_blocks (first_insn
);
5334 life_analysis (0, PROP_REG_INFO
| PROP_DEATH_NOTES
);
5336 peephole2_optimize (NULL
);
5338 mdr_resequence_xy_yx (first_insn
);
5339 mdr_propagate_reg_equivs (first_insn
);
5341 /* Look for redundant set instructions. These can occur when we split
5342 instruction patterns and end up with the second half merging with
5343 or being replaced by something that clobbers the first half. */
5344 for (insn
= first_insn
; insn
; insn
= next_nonnote_insn (insn
))
5346 if (GET_CODE (insn
) == INSN
)
5348 set
= (GET_CODE (PATTERN (insn
)) == SET
) ? PATTERN (insn
) : NULL_RTX
;
5349 if ((set
!= NULL_RTX
)
5350 && (GET_CODE (XEXP (set
, 0)) == REG
)
5351 && (GET_MODE (XEXP (set
, 0)) == QImode
)
5352 && (find_regno_note (insn
, REG_UNUSED
, REGNO (XEXP (set
, 0)))))
5357 mdr_try_move_dp_reload (first_insn
);
5358 mdr_try_move_pushes (first_insn
);
5360 find_basic_blocks (first_insn
);
5361 life_analysis (0, PROP_FINAL
);
5363 mdr_try_propagate_move (first_insn
);
5364 mdr_resequence_xy_yx (first_insn
);
5366 ip2k_reorg_split_dimode
= 1;
5367 split_all_insns (0);
5369 mdr_try_remove_redundant_insns (first_insn
);
5371 mdr_try_propagate_move (first_insn
);
5373 reload_cse_regs (first_insn
);
5374 find_basic_blocks (first_insn
);
5375 life_analysis (0, PROP_FINAL
);
5377 peephole2_optimize (NULL
);
5379 mdr_try_propagate_move (first_insn
);
5381 find_basic_blocks (first_insn
);
5382 life_analysis (0, PROP_FINAL
);
5384 ip2k_reorg_split_simode
= 1;
5385 split_all_insns (0);
5387 mdr_try_remove_redundant_insns (first_insn
);
5389 mdr_try_propagate_move (first_insn
);
5391 reload_cse_regs (first_insn
);
5392 find_basic_blocks (first_insn
);
5393 life_analysis (0, PROP_FINAL
);
5395 peephole2_optimize (NULL
);
5397 mdr_try_propagate_move (first_insn
);
5399 find_basic_blocks (first_insn
);
5400 life_analysis (0, PROP_FINAL
);
5402 ip2k_reorg_split_himode
= 1;
5403 ip2k_reorg_merge_qimode
= 1;
5404 split_all_insns (0);
5406 mdr_try_remove_redundant_insns (first_insn
);
5407 mdr_try_propagate_clr (first_insn
);
5408 mdr_try_propagate_move (first_insn
);
5410 mdr_try_dp_reload_elim (first_insn
);
5411 mdr_try_move_dp_reload (first_insn
);
5413 rebuild_jump_labels (first_insn
);
5415 /* Call to jump_optimize (...) was here, but now I removed it. */
5417 find_basic_blocks (first_insn
);
5418 life_analysis (0, PROP_FINAL
);
5420 peephole2_optimize (NULL
);
5422 mdr_try_propagate_move (first_insn
);
5424 find_basic_blocks (first_insn
);
5425 life_analysis (0, PROP_FINAL
);
5426 mdr_try_remove_redundant_insns (first_insn
);
5428 mdr_try_propagate_clr (first_insn
);
5429 mdr_try_propagate_move (first_insn
);
5431 find_basic_blocks (first_insn
);
5432 life_analysis (0, PROP_FINAL
);
5434 ip2k_reorg_split_qimode
= 1;
5435 split_all_insns (0);
5437 mdr_try_wreg_elim (first_insn
);
5438 mdr_try_propagate_move (first_insn
);
5440 find_basic_blocks (first_insn
);
5441 life_analysis (0, PROP_FINAL
);
5446 ip2k_init_libfuncs (void)
5448 set_optab_libfunc (smul_optab
, SImode
, "_mulsi3");
5449 set_optab_libfunc (smul_optab
, DImode
, "_muldi3");
5450 set_optab_libfunc (cmp_optab
, HImode
, "_cmphi2");
5451 set_optab_libfunc (cmp_optab
, SImode
, "_cmpsi2");
5454 /* Returns a bit position if mask contains only a single bit. Returns -1 if
5455 there were zero or more than one set bits. */
5457 find_one_set_bit_p (HOST_WIDE_INT mask
)
5460 unsigned HOST_WIDE_INT n
= mask
;
5461 for (i
= 0; i
< 32; i
++)
5463 if (n
& 0x80000000UL
)
5465 if (n
& 0x7fffffffUL
)
5475 /* Returns a bit position if mask contains only a single clear bit.
5476 Returns -1 if there were zero or more than one clear bits. */
5478 find_one_clear_bit_p (HOST_WIDE_INT mask
)
5481 unsigned HOST_WIDE_INT n
= mask
;
5482 for (i
= 0; i
< 32; i
++)
5484 if ((n
& 0x80000000UL
) == 0UL)
5486 if ((n
& 0x7fffffffUL
) != 0x7fffffffUL
)
5498 /* Split a move into two smaller pieces.
5499 MODE indicates the reduced mode. OPERANDS[0] is the original destination
5500 OPERANDS[1] is the original src. The new destinations are
5501 OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
5505 ip2k_split_words (enum machine_mode nmode
, enum machine_mode omode
,
5508 rtx dl
, dh
; /* src/dest pieces. */
5510 int move_high_first
= 0; /* Assume no overlap. */
5513 switch (GET_CODE (operands
[0])) /* DEST */
5517 if ((GET_CODE (operands
[1]) == REG
5518 || GET_CODE (operands
[1]) == SUBREG
)
5519 && (true_regnum (operands
[0]) <= true_regnum (operands
[1])
5520 || (true_regnum (operands
[1])
5521 + GET_MODE_SIZE (omode
) - 1 < true_regnum (operands
[0]))))
5522 move_high_first
= 1;
5524 if (GET_CODE (operands
[0]) == SUBREG
)
5526 dl
= simplify_gen_subreg (nmode
, operands
[0], omode
,
5527 GET_MODE_SIZE (nmode
));
5528 dh
= simplify_gen_subreg (nmode
, operands
[0], omode
, 0);
5530 else if (GET_CODE (operands
[0]) == REG
&& ! IS_PSEUDO_P (operands
[0]))
5532 int r
= REGNO (operands
[0]);
5533 dh
= gen_rtx_REG (nmode
, r
);
5534 dl
= gen_rtx_REG (nmode
, r
+ HARD_REGNO_NREGS (r
, nmode
));
5538 dh
= gen_rtx_SUBREG (nmode
, operands
[0], 0);
5539 dl
= gen_rtx_SUBREG (nmode
, operands
[0], GET_MODE_SIZE (nmode
));
5544 switch (GET_CODE (XEXP (operands
[0], 0)))
5549 dl
= dh
= gen_rtx_MEM (nmode
, XEXP (operands
[0], 0));
5553 dl
= change_address (operands
[0], nmode
,
5554 plus_constant (XEXP (operands
[0], 0),
5555 GET_MODE_SIZE (nmode
)));
5556 dh
= gen_rtx_MEM (nmode
, XEXP (operands
[0], 0));
5563 switch (GET_CODE (operands
[1]))
5566 if (! IS_PSEUDO_P (operands
[1]))
5568 int r
= REGNO (operands
[1]);
5570 sh
= gen_rtx_REG (nmode
, r
);
5571 sl
= gen_rtx_REG (nmode
, r
+ HARD_REGNO_NREGS (r
, nmode
));
5575 sh
= gen_rtx_SUBREG (nmode
, operands
[1], 0);
5576 sl
= gen_rtx_SUBREG (nmode
, operands
[1], GET_MODE_SIZE (nmode
));
5581 if (operands
[1] == const0_rtx
)
5582 sh
= sl
= const0_rtx
;
5585 if (GET_MODE (operands
[0]) != DImode
)
5590 REAL_VALUE_FROM_CONST_DOUBLE (rv
, operands
[1]);
5591 REAL_VALUE_TO_TARGET_SINGLE (rv
, value
);
5593 sh
= gen_int_mode ((value
>> 16) & 0xffff, nmode
);
5594 sl
= gen_int_mode (value
& 0xffff, nmode
);
5598 sh
= gen_int_mode (CONST_DOUBLE_HIGH (operands
[1]), nmode
);
5599 sl
= gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), nmode
);
5605 if (operands
[1] == const0_rtx
)
5606 sh
= sl
= const0_rtx
;
5609 int val
= INTVAL (operands
[1]);
5615 vh
= (val
>> 8) & 0xff;
5620 vh
= (val
>> 16) & 0xffff;
5625 if (val
< 0) /* sign extend */
5629 vl
= val
; /* Give low 32 bits back. */
5636 sl
= gen_int_mode (vl
, nmode
);
5637 sh
= gen_int_mode (vh
, nmode
);
5642 sl
= simplify_gen_subreg (nmode
, operands
[1], omode
,
5643 GET_MODE_SIZE (nmode
));
5644 sh
= simplify_gen_subreg (nmode
, operands
[1], omode
, 0);
5648 switch (GET_CODE (XEXP (operands
[1], 0)))
5656 /* Worry about splitting stack pushes. */
5657 if (pushflag
&& ip2k_address_uses_reg_p (operands
[1], REG_SP
))
5658 sl
= sh
= change_address (operands
[1], nmode
,
5659 plus_constant (XEXP (operands
[1], 0),
5660 GET_MODE_SIZE (nmode
)));
5663 sl
= change_address (operands
[1], nmode
,
5664 plus_constant (XEXP (operands
[1], 0),
5665 GET_MODE_SIZE (nmode
)));
5666 sh
= gen_rtx_MEM (nmode
, XEXP (operands
[1], 0));
5675 if (move_high_first
)
5692 /* Get the low half of an operand. */
5694 ip2k_get_low_half (rtx x
, enum machine_mode mode
)
5696 switch (GET_CODE (x
))
5699 if (! IS_PSEUDO_P (x
))
5701 unsigned int r
= REGNO (x
);
5703 return gen_rtx_REG (mode
, r
+ HARD_REGNO_NREGS (r
, mode
));
5707 return gen_rtx_SUBREG (mode
, x
, GET_MODE_SIZE (mode
));
5712 if (x
== const0_rtx
)
5721 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
5722 REAL_VALUE_TO_TARGET_SINGLE (rv
, value
);
5724 return gen_int_mode (value
& 0xffff, mode
);
5727 return gen_int_mode (CONST_DOUBLE_LOW (x
), mode
);
5732 if (x
== const0_rtx
)
5736 int val
= INTVAL (x
);
5742 vh
= (val
>> 8) & 0xff;
5747 vh
= (val
>> 16) & 0xffff;
5752 if (val
< 0) /* sign extend */
5756 vl
= val
; /* Give low 32 bits back. */
5763 return gen_int_mode (vl
, mode
);
5768 return simplify_gen_subreg (mode
, x
, GET_MODE (x
), GET_MODE_SIZE (mode
));
5771 switch (GET_CODE (XEXP (x
, 0)))
5779 return change_address (x
, mode
,
5780 plus_constant (XEXP (x
, 0),
5781 GET_MODE_SIZE (mode
)));
5791 /* Get the high half of an operand. */
5793 ip2k_get_high_half (rtx x
, enum machine_mode mode
)
5795 switch (GET_CODE (x
))
5798 if (! IS_PSEUDO_P (x
))
5800 unsigned int r
= REGNO (x
);
5802 return gen_rtx_REG (mode
, r
);
5806 return gen_rtx_SUBREG (mode
, x
, 0);
5811 if (x
== const0_rtx
)
5820 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
5821 REAL_VALUE_TO_TARGET_SINGLE (rv
, value
);
5823 return gen_int_mode ((value
>> 16) & 0xffff, mode
);
5826 return gen_int_mode (CONST_DOUBLE_HIGH (x
), mode
);
5831 if (x
== const0_rtx
)
5835 int val
= INTVAL (x
);
5841 vh
= (val
>> 8) & 0xff;
5846 vh
= (val
>> 16) & 0xffff;
5851 if (val
< 0) /* sign extend */
5855 vl
= val
; /* Give low 32 bits back. */
5862 return gen_int_mode (vh
, mode
);
5867 return simplify_gen_subreg (mode
, x
, GET_MODE (x
), 0);
5871 switch (GET_CODE (XEXP (x
, 0)))
5879 return change_address (x
, mode
, plus_constant (XEXP (x
, 0), 0));
5889 /* Does address X use register R. Only valid for REG_SP, REG_DP, REG_IP
5893 ip2k_address_uses_reg_p (rtx x
, unsigned int r
)
5895 if (GET_CODE (x
) != MEM
)
5901 switch (GET_CODE (x
))
5911 if (ip2k_address_uses_reg_p (XEXP (x
, 1), r
))
5918 /* Ignore subwords. */
5923 /* Have to consider that r might be LSB of a pointer reg. */
5924 return ((REGNO (x
) == r
) || (REGNO (x
) == (r
- 1))) ? 1 : 0;
5927 /* We might be looking at a (mem:BLK (mem (...))) */
5936 /* Does the queried XEXP not use a particular register? If we're certain
5937 that it doesn't then we return TRUE otherwise we assume FALSE. */
5940 ip2k_xexp_not_uses_reg_p (rtx x
, unsigned int r
, int rsz
)
5942 switch (GET_CODE (x
))
5946 int msz
= GET_MODE_SIZE (GET_MODE (x
));
5948 return (((REGNO (x
) + msz
- 1) < r
)
5949 || (REGNO (x
) > (r
+ rsz
- 1)));
5953 return !ip2k_address_uses_reg_p (x
, r
);
5969 /* Does the queried XEXP not use a particular register? If we're certain
5970 that it doesn't then we return TRUE otherwise we assume FALSE. */
5973 ip2k_composite_xexp_not_uses_reg_p (rtx x
, unsigned int r
, int rsz
)
5975 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_BITFIELD_OPS
)
5976 return (ip2k_composite_xexp_not_uses_reg_p (XEXP (x
, 0), r
, rsz
)
5977 && ip2k_composite_xexp_not_uses_reg_p (XEXP (x
, 1), r
, rsz
)
5978 && ip2k_composite_xexp_not_uses_reg_p (XEXP (x
, 2), r
, rsz
));
5981 return (ip2k_composite_xexp_not_uses_reg_p (XEXP (x
, 0), r
, rsz
)
5982 && ip2k_composite_xexp_not_uses_reg_p (XEXP (x
, 1), r
, rsz
));
5985 || GET_RTX_CLASS (GET_CODE (x
)) == RTX_TERNARY
)
5986 return ip2k_composite_xexp_not_uses_reg_p (XEXP (x
, 0), r
, rsz
);
5988 return ip2k_xexp_not_uses_reg_p (x
, r
, rsz
);
5991 /* Does the queried XEXP not use CC0? If we're certain that
5992 it doesn't then we return TRUE otherwise we assume FALSE. */
5995 ip2k_composite_xexp_not_uses_cc0_p (rtx x
)
5997 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_BITFIELD_OPS
)
5998 return (ip2k_composite_xexp_not_uses_cc0_p (XEXP (x
, 0))
5999 && ip2k_composite_xexp_not_uses_cc0_p (XEXP (x
, 1))
6000 && ip2k_composite_xexp_not_uses_cc0_p (XEXP (x
, 2)));
6003 return (ip2k_composite_xexp_not_uses_cc0_p (XEXP (x
, 0))
6004 && ip2k_composite_xexp_not_uses_cc0_p (XEXP (x
, 1)));
6007 || GET_RTX_CLASS (GET_CODE (x
)) == RTX_TERNARY
)
6008 return ip2k_composite_xexp_not_uses_cc0_p (XEXP (x
, 0));
6010 return GET_CODE (x
) != CC0
;
6014 ip2k_split_dest_operand (rtx x
, enum machine_mode mode
)
6016 return nonimmediate_operand (x
, mode
) || push_operand (x
, mode
);
6020 ip2k_nonptr_operand (rtx x
, enum machine_mode mode
)
6022 return register_operand (x
, mode
) && !ip2k_ptr_operand (x
, mode
);
6025 /* Is X a reference to IP or DP or SP? */
6028 ip2k_ptr_operand (rtx x
, enum machine_mode mode
)
6031 if (GET_CODE (x
) == SUBREG
)
6035 && (mode
== HImode
|| mode
== VOIDmode
)
6036 && (REGNO (x
) == REG_IP
6037 || REGNO (x
) == REG_DP
6038 || REGNO (x
) == REG_SP
));
6042 ip2k_sp_operand (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
)
6045 return REG_P (x
) && REGNO (x
) == REG_SP
;
6049 ip2k_ip_operand (rtx x
, enum machine_mode mode
)
6052 if (GET_CODE (x
) != MEM
)
6057 if (GET_CODE (x
) == PLUS
&& XEXP (x
, 1) == const0_rtx
)
6063 if (GET_MODE_SIZE (mode
) > 1)
6064 return 0; /* Can't access offset bytes. */
6066 return REGNO (x
) == REG_IP
;
6069 /* Is X a memory address suitable for SP or DP relative addressing? */
6071 ip2k_short_operand (rtx x
, enum machine_mode mode
)
6074 unsigned int offs
= 0;
6076 if (! memory_operand (x
, mode
))
6077 return 0; /* Got to be a memory address. */
6080 switch (GET_CODE (x
))
6086 if (! REG_P (XEXP (x
, 0))
6087 || GET_CODE (XEXP (x
, 1)) != CONST_INT
)
6090 offs
= INTVAL (XEXP (x
, 1));
6100 if (IS_PSEUDO_P (x
))
6101 return 0; /* Optimistic - doesn't work. */
6105 /* For 'S' constraint, we presume that no IP adjustment
6106 simulation is performed - so only QI mode allows IP to be a
6107 short offset address. All other IP references must be
6108 handled by 'R' constraints. */
6109 if (r
== REG_IP
&& offs
== 0 && GET_MODE_SIZE (mode
) <= 1)
6112 return (r
== REG_SP
|| r
== REG_DP
);
6117 ip2k_nonsp_reg_operand (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
)
6119 if (GET_CODE (x
) == SUBREG
)
6122 return (REG_P (x
) && REGNO (x
) != REG_SP
);
6126 ip2k_gen_operand (rtx x
, enum machine_mode mode
)
6128 return ip2k_short_operand (x
, mode
)
6129 || (GET_CODE (x
) == SUBREG
6130 && REG_P (SUBREG_REG (x
)))
6131 || (ip2k_nonsp_reg_operand (x
, mode
));
6135 ip2k_extra_constraint (rtx x
, int c
)
6139 case 'S': /* Allow offset in stack frame... */
6140 return ip2k_short_operand (x
, GET_MODE (x
));
6143 return ip2k_ip_operand (x
, GET_MODE (x
));
6145 case 'T': /* Constant int or .data address. */
6146 return CONSTANT_P (x
) && is_regfile_address (x
);
6154 ip2k_unary_operator (rtx op
, enum machine_mode mode
)
6156 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
6161 ip2k_binary_operator (rtx op
, enum machine_mode mode
)
6163 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
6164 && ARITHMETIC_P (op
));
6168 ip2k_symbol_ref_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
6170 /* We define an IP2k symbol ref to be either a direct reference or one
6171 with a constant offset. */
6172 return (GET_CODE (op
) == SYMBOL_REF
)
6173 || (GET_CODE (op
) == CONST
6174 && GET_CODE (XEXP (op
, 0)) == PLUS
6175 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
);
6179 ip2k_signed_comparison_operator (rtx op
, enum machine_mode mode
)
6181 return (comparison_operator (op
, mode
)
6182 && signed_condition (GET_CODE (op
)) == GET_CODE (op
));
6186 ip2k_unsigned_comparison_operator (rtx op
, enum machine_mode mode
)
6188 return (comparison_operator (op
, mode
)
6189 && unsigned_condition (GET_CODE (op
)) == GET_CODE (op
));
6192 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6195 ip2k_return_in_memory (tree type
, tree fntype ATTRIBUTE_UNUSED
)
6197 if (TYPE_MODE (type
) == BLKmode
)
6199 HOST_WIDE_INT size
= int_size_in_bytes (type
);
6200 return (size
== -1 || size
> 8);
6206 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
6209 ip2k_setup_incoming_varargs (CUMULATIVE_ARGS
*ca ATTRIBUTE_UNUSED
,
6210 enum machine_mode mode ATTRIBUTE_UNUSED
,
6211 tree type ATTRIBUTE_UNUSED
,
6212 int *pretend_arg_size
,
6213 int second_time ATTRIBUTE_UNUSED
)
6215 *pretend_arg_size
= 0;