From 58d6528bb02daa2bcd69edb055cea217a2f5ded1 Mon Sep 17 00:00:00 2001 From: rearnsha Date: Fri, 17 Oct 2003 10:58:17 +0000 Subject: [PATCH] * arm-modes.def (CC_Nmode): New condition code mode. * arm.c (thumb_condition_code): Delete. (arm_select_cc_mode): Handle single-bit test for Thumb. (arm_print_operand, cases 'd' and 'D'): Don't special case the condition code logic for Thumb. (get_arm_condition_code): Handle CC_Nmode. (thumb_cbrch_target_operand): New function. * arm.h (PREDICATE_CODES): Add thumb_cbrch_target_operand. * arm-protos.h (thumb_cbrch_target_operand): Add prototype. * arm.md: Add Thumb split patterns for zero_extract and sign_extract. (tbit_cbranch, andsi3_cbranch_scratch, andsi3_cbranch) (orrsi3_cbranch_scratch, orrsi3_cbranch, xorsi3_cbranch_scratch) (xorsi3_cbranch, addsi3_cbranch, addsi3_cbranch_scratch) (subsi3_cbranch, subsi3_cbranch_scratch): New Thumb patterns. (cbranchne_decr1): Re-work to use CC_Nmode. * arm.c (thumb_expand_epilogue): Add clobbers of registers restored by the return instruction. Add a use of the link register if it wasn't stored. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@72595 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/ChangeLog | 23 ++ gcc/config/arm/arm-modes.def | 2 + gcc/config/arm/arm-protos.h | 1 + gcc/config/arm/arm.c | 92 +++--- gcc/config/arm/arm.h | 1 + gcc/config/arm/arm.md | 660 ++++++++++++++++++++++++++++++++++++++++++- 6 files changed, 736 insertions(+), 43 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 9882de6a5a8..a096fda0333 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,5 +1,28 @@ 2003-10-17 Richard Earnshaw + * arm-modes.def (CC_Nmode): New condition code mode. + * arm.c (thumb_condition_code): Delete. + (arm_select_cc_mode): Handle single-bit test for Thumb. + (arm_print_operand, cases 'd' and 'D'): Don't special case the + condition code logic for Thumb. + (get_arm_condition_code): Handle CC_Nmode. + (thumb_cbrch_target_operand): New function. + * arm.h (PREDICATE_CODES): Add thumb_cbrch_target_operand. + * arm-protos.h (thumb_cbrch_target_operand): Add prototype. + * arm.md: Add Thumb split patterns for zero_extract and + sign_extract. + (tbit_cbranch, andsi3_cbranch_scratch, andsi3_cbranch) + (orrsi3_cbranch_scratch, orrsi3_cbranch, xorsi3_cbranch_scratch) + (xorsi3_cbranch, addsi3_cbranch, addsi3_cbranch_scratch) + (subsi3_cbranch, subsi3_cbranch_scratch): New Thumb patterns. + (cbranchne_decr1): Re-work to use CC_Nmode. + + * arm.c (thumb_expand_epilogue): Add clobbers of registers restored + by the return instruction. Add a use of the link register if it + wasn't stored. + +2003-10-17 Richard Earnshaw + * flow.c (init_propagate_block_info): Don't abort if a conditional jump is not a comparison of a register. Instead, just don't record conditional life information. diff --git a/gcc/config/arm/arm-modes.def b/gcc/config/arm/arm-modes.def index 154d3220689..1d58b18bb23 100644 --- a/gcc/config/arm/arm-modes.def +++ b/gcc/config/arm/arm-modes.def @@ -30,6 +30,7 @@ FLOAT_MODE (XF, 12, 0); CCFPmode should be used with floating equalities. CC_NOOVmode should be used with SImode integer equalities. CC_Zmode should be used if only the Z flag is set correctly + CC_Nmode should be used if only the N (sign) flag is set correctly CCmode should be used otherwise. */ CC_MODE (CC_NOOV); @@ -48,3 +49,4 @@ CC_MODE (CC_DLTU); CC_MODE (CC_DGEU); CC_MODE (CC_DGTU); CC_MODE (CC_C); +CC_MODE (CC_N); diff --git a/gcc/config/arm/arm-protos.h b/gcc/config/arm/arm-protos.h index c16deb63a24..61c28be8629 100644 --- a/gcc/config/arm/arm-protos.h +++ b/gcc/config/arm/arm-protos.h @@ -178,6 +178,7 @@ extern const char *thumb_load_double_from_address (rtx *); extern const char *thumb_output_move_mem_multiple (int, rtx *); extern void thumb_expand_movstrqi (rtx *); extern int thumb_cmp_operand (rtx, enum machine_mode); +extern int thumb_cbrch_target_operand (rtx, enum machine_mode); extern rtx *thumb_legitimize_pic_address (rtx, enum machine_mode, rtx); extern int thumb_go_if_legitimate_address (enum machine_mode, rtx); extern rtx arm_return_addr (int, rtx); diff --git a/gcc/config/arm/arm.c b/gcc/config/arm/arm.c index d338e00895a..dc8c5366536 100644 --- a/gcc/config/arm/arm.c +++ b/gcc/config/arm/arm.c @@ -86,7 +86,6 @@ static int number_of_first_bit_set (int); static void replace_symbols_in_block (tree, rtx, rtx); static void thumb_exit (FILE *, int, rtx); static void thumb_pushpop (FILE *, int, int); -static const char *thumb_condition_code (rtx, int); static rtx is_jump_table (rtx); static HOST_WIDE_INT get_jump_table_size (rtx); static Mnode *move_minipool_fix_forward_ref (Mnode *, Mnode *, HOST_WIDE_INT); @@ -5575,10 +5574,19 @@ arm_select_cc_mode (enum rtx_code op, rtx x, rtx y) return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), DOM_CC_X_OR_Y); + /* An operation (on Thumb) where we want to test for a single bit. + This is done by shifting that bit up into the top bit of a + scratch register; we can then branch on the sign bit. */ + if (TARGET_THUMB + && GET_MODE (x) == SImode + && (op == EQ || op == NE) + && (GET_CODE (x) == ZERO_EXTRACT)) + return CC_Nmode; + /* An operation that sets the condition codes as a side-effect, the V flag is not set correctly, so we can only use comparisons where this doesn't matter. (For LT and GE we can use "mi" and "pl" - instead. */ + instead.) */ if (GET_MODE (x) == SImode && y == const0_rtx && (op == EQ || op == NE || op == LT || op == GE) @@ -5588,7 +5596,8 @@ arm_select_cc_mode (enum rtx_code op, rtx x, rtx y) || GET_CODE (x) == NOT || GET_CODE (x) == NEG || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT - || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT)) + || GET_CODE (x) == ROTATERT + || (TARGET_ARM && GET_CODE (x) == ZERO_EXTRACT))) return CC_NOOVmode; if (GET_MODE (x) == QImode && (op == EQ || op == NE)) @@ -9557,11 +9566,8 @@ arm_print_operand (FILE *stream, rtx x, int code) if (x == const_true_rtx) return; - if (TARGET_ARM) - fputs (arm_condition_codes[get_arm_condition_code (x)], - stream); - else - fputs (thumb_condition_code (x, 0), stream); + fputs (arm_condition_codes[get_arm_condition_code (x)], + stream); return; case 'D': @@ -9570,12 +9576,9 @@ arm_print_operand (FILE *stream, rtx x, int code) if (x == const_true_rtx) abort (); - if (TARGET_ARM) - fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE - (get_arm_condition_code (x))], - stream); - else - fputs (thumb_condition_code (x, 1), stream); + fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE + (get_arm_condition_code (x))], + stream); return; /* Cirrus registers can be accessed in a variety of ways: @@ -9815,6 +9818,14 @@ get_arm_condition_code (rtx comparison) default: abort (); } + case CC_Nmode: + switch (comp_code) + { + case NE: return ARM_MI; + case EQ: return ARM_PL; + default: abort (); + } + case CCFPEmode: case CCFPmode: /* These encodings assume that AC=1 in the FPA system control @@ -12083,7 +12094,8 @@ thumb_expand_epilogue (void) { HOST_WIDE_INT amount = (thumb_get_frame_size () + current_function_outgoing_args_size); - + int regno; + /* Naked functions don't have prologues. */ if (IS_NAKED (arm_current_func_type ())) return; @@ -12113,6 +12125,15 @@ thumb_expand_epilogue (void) if (current_function_profile || TARGET_NO_SCHED_PRO) emit_insn (gen_blockage ()); + + /* Emit a clobber for each insn that will be restored in the epilogue, + so that flow2 will get register lifetimes correct. */ + for (regno = 0; regno < 13; regno++) + if (regs_ever_live[regno] && !call_used_regs[regno]) + emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, regno))); + + if (! regs_ever_live[LR_REGNUM]) + emit_insn (gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, LR_REGNUM))); } static void @@ -12571,36 +12592,25 @@ thumb_cmp_operand (rtx op, enum machine_mode mode) { return ((GET_CODE (op) == CONST_INT && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256) - || register_operand (op, mode)); + || s_register_operand (op, mode)); } -static const char * -thumb_condition_code (rtx x, int invert) -{ - static const char * const conds[] = - { - "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc", - "hi", "ls", "ge", "lt", "gt", "le" - }; - int val; +/* Return TRUE if a result can be stored in OP without clobbering the + condition code register. Prior to reload we only accept a + register. After reload we have to be able to handle memory as + well, since a pseudo may not get a hard reg and reload cannot + handle output-reloads on jump insns. - switch (GET_CODE (x)) - { - case EQ: val = 0; break; - case NE: val = 1; break; - case GEU: val = 2; break; - case LTU: val = 3; break; - case GTU: val = 8; break; - case LEU: val = 9; break; - case GE: val = 10; break; - case LT: val = 11; break; - case GT: val = 12; break; - case LE: val = 13; break; - default: - abort (); - } + We could possibly handle mem before reload as well, but that might + complicate things with the need to handle increment + side-effects. */ - return conds[val ^ invert]; +int +thumb_cbrch_target_operand (rtx op, enum machine_mode mode) +{ + return (s_register_operand (op, mode) + || ((reload_in_progress || reload_completed) + && memory_operand (op, mode))); } /* Handle storing a half-word to memory during reload. */ diff --git a/gcc/config/arm/arm.h b/gcc/config/arm/arm.h index 77debec618d..bfdfb3b8e20 100644 --- a/gcc/config/arm/arm.h +++ b/gcc/config/arm/arm.h @@ -2689,6 +2689,7 @@ extern int making_const_table; {"reg_or_int_operand", {SUBREG, REG, CONST_INT}}, \ {"index_operand", {SUBREG, REG, CONST_INT}}, \ {"thumb_cmp_operand", {SUBREG, REG, CONST_INT}}, \ + {"thumb_cbrch_target_operand", {SUBREG, REG, MEM}}, \ {"offsettable_memory_operand", {MEM}}, \ {"bad_signed_byte_operand", {MEM}}, \ {"alignable_memory_operand", {MEM}}, \ diff --git a/gcc/config/arm/arm.md b/gcc/config/arm/arm.md index 502b8d20578..c4ef68d937e 100644 --- a/gcc/config/arm/arm.md +++ b/gcc/config/arm/arm.md @@ -1721,6 +1721,39 @@ (set_attr "length" "8")] ) +(define_split + [(set (match_operand:SI 0 "s_register_operand" "") + (zero_extract:SI (match_operand:SI 1 "s_register_operand" "") + (match_operand:SI 2 "const_int_operand" "") + (match_operand:SI 3 "const_int_operand" ""))) + (clobber (match_operand:SI 4 "s_register_operand" ""))] + "TARGET_THUMB" + [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2))) + (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))] + "{ + HOST_WIDE_INT temp = INTVAL (operands[2]); + + operands[2] = GEN_INT (32 - temp - INTVAL (operands[3])); + operands[3] = GEN_INT (32 - temp); + }" +) + +(define_split + [(set (match_operand:SI 0 "s_register_operand" "") + (sign_extract:SI (match_operand:SI 1 "s_register_operand" "") + (match_operand:SI 2 "const_int_operand" "") + (match_operand:SI 3 "const_int_operand" "")))] + "TARGET_THUMB" + [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2))) + (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))] + "{ + HOST_WIDE_INT temp = INTVAL (operands[2]); + + operands[2] = GEN_INT (32 - temp - INTVAL (operands[3])); + operands[3] = GEN_INT (32 - temp); + }" +) + ;;; ??? This pattern is bogus. If operand3 has bits outside the range ;;; represented by the bitfield, then this will produce incorrect results. ;;; Somewhere, the value needs to be truncated. On targets like the m68k, @@ -5246,6 +5279,365 @@ (const_int 8))))] ) +(define_insn "*tbit_cbranch" + [(set (pc) + (if_then_else + (match_operator 0 "equality_operator" + [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l") + (const_int 1) + (match_operand:SI 2 "const_int_operand" "i")) + (const_int 0)]) + (label_ref (match_operand 3 "" "")) + (pc))) + (clobber (match_scratch:SI 4 "=l"))] + "TARGET_THUMB" + "* + { + rtx op[3]; + op[0] = operands[4]; + op[1] = operands[1]; + op[2] = GEN_INT (32 - 1 - INTVAL (operands[2])); + + output_asm_insn (\"lsl\\t%0, %1, %2\", op); + switch (get_attr_length (insn)) + { + case 4: return \"b%d0\\t%l3\"; + case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\"; + default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\"; + } + }" + [(set (attr "far_jump") + (if_then_else + (eq_attr "length" "8") + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (and (ge (minus (match_dup 3) (pc)) (const_int -250)) + (le (minus (match_dup 3) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 3) (pc)) (const_int -2040)) + (le (minus (match_dup 3) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))))] +) + +(define_insn "*andsi3_cbranch_scratch" + [(set (pc) + (if_then_else + (match_operator 4 "equality_operator" + [(and:SI (match_operand:SI 1 "s_register_operand" "%0") + (match_operand:SI 2 "s_register_operand" "l")) + (const_int 0)]) + (label_ref (match_operand 3 "" "")) + (pc))) + (clobber (match_scratch:SI 0 "=l"))] + "TARGET_THUMB" + "* + { + output_asm_insn (\"and\\t%0, %2\", operands); + switch (get_attr_length (insn)) + { + case 4: return \"b%d4\\t%l3\"; + case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\"; + default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\"; + } + }" + [(set (attr "far_jump") + (if_then_else + (eq_attr "length" "8") + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (and (ge (minus (match_dup 3) (pc)) (const_int -250)) + (le (minus (match_dup 3) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 3) (pc)) (const_int -2040)) + (le (minus (match_dup 3) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))))] +) + +(define_insn "*andsi3_cbranch" + [(set (pc) + (if_then_else + (match_operator 5 "equality_operator" + [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1") + (match_operand:SI 3 "s_register_operand" "l,l,l,l")) + (const_int 0)]) + (label_ref (match_operand 4 "" "")) + (pc))) + (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,?h,?m,?m") + (and:SI (match_dup 2) (match_dup 3))) + (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] + "TARGET_THUMB" + "* + { + if (which_alternative == 0) + output_asm_insn (\"and\\t%0, %3\", operands); + else if (which_alternative == 1) + { + output_asm_insn (\"and\\t%1, %3\", operands); + output_asm_insn (\"mov\\t%0, %1\", operands); + } + else + { + output_asm_insn (\"and\\t%1, %3\", operands); + output_asm_insn (\"str\\t%1, %0\", operands); + } + + switch (get_attr_length (insn) - (which_alternative ? 2 : 0)) + { + case 4: return \"b%d5\\t%l4\"; + case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\"; + default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\"; + } + }" + [(set (attr "far_jump") + (if_then_else + (ior (and (eq (symbol_ref ("which_alternative")) + (const_int 0)) + (eq_attr "length" "8")) + (eq_attr "length" "10")) + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (eq (symbol_ref ("which_alternative")) + (const_int 0)) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -250)) + (le (minus (match_dup 4) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -2040)) + (le (minus (match_dup 4) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -248)) + (le (minus (match_dup 4) (pc)) (const_int 256))) + (const_int 6) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -2038)) + (le (minus (match_dup 4) (pc)) (const_int 2048))) + (const_int 8) + (const_int 10)))))] +) + +(define_insn "*orrsi3_cbranch_scratch" + [(set (pc) + (if_then_else + (match_operator 4 "equality_operator" + [(ior:SI (match_operand:SI 1 "s_register_operand" "%0") + (match_operand:SI 2 "s_register_operand" "l")) + (const_int 0)]) + (label_ref (match_operand 3 "" "")) + (pc))) + (clobber (match_scratch:SI 0 "=l"))] + "TARGET_THUMB" + "* + { + output_asm_insn (\"orr\\t%0, %2\", operands); + switch (get_attr_length (insn)) + { + case 4: return \"b%d4\\t%l3\"; + case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\"; + default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\"; + } + }" + [(set (attr "far_jump") + (if_then_else + (eq_attr "length" "8") + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (and (ge (minus (match_dup 3) (pc)) (const_int -250)) + (le (minus (match_dup 3) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 3) (pc)) (const_int -2040)) + (le (minus (match_dup 3) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))))] +) + +(define_insn "*orrsi3_cbranch" + [(set (pc) + (if_then_else + (match_operator 5 "equality_operator" + [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1") + (match_operand:SI 3 "s_register_operand" "l,l,l,l")) + (const_int 0)]) + (label_ref (match_operand 4 "" "")) + (pc))) + (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,?h,?m,?m") + (ior:SI (match_dup 2) (match_dup 3))) + (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] + "TARGET_THUMB" + "* + { + if (which_alternative == 0) + output_asm_insn (\"orr\\t%0, %3\", operands); + else if (which_alternative == 1) + { + output_asm_insn (\"orr\\t%1, %3\", operands); + output_asm_insn (\"mov\\t%0, %1\", operands); + } + else + { + output_asm_insn (\"orr\\t%1, %3\", operands); + output_asm_insn (\"str\\t%1, %0\", operands); + } + + switch (get_attr_length (insn) - (which_alternative ? 2 : 0)) + { + case 4: return \"b%d5\\t%l4\"; + case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\"; + default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\"; + } + }" + [(set (attr "far_jump") + (if_then_else + (ior (and (eq (symbol_ref ("which_alternative")) + (const_int 0)) + (eq_attr "length" "8")) + (eq_attr "length" "10")) + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (eq (symbol_ref ("which_alternative")) + (const_int 0)) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -250)) + (le (minus (match_dup 4) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -2040)) + (le (minus (match_dup 4) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -248)) + (le (minus (match_dup 4) (pc)) (const_int 256))) + (const_int 6) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -2038)) + (le (minus (match_dup 4) (pc)) (const_int 2048))) + (const_int 8) + (const_int 10)))))] +) + +(define_insn "*xorsi3_cbranch_scratch" + [(set (pc) + (if_then_else + (match_operator 4 "equality_operator" + [(xor:SI (match_operand:SI 1 "s_register_operand" "%0") + (match_operand:SI 2 "s_register_operand" "l")) + (const_int 0)]) + (label_ref (match_operand 3 "" "")) + (pc))) + (clobber (match_scratch:SI 0 "=l"))] + "TARGET_THUMB" + "* + { + output_asm_insn (\"eor\\t%0, %2\", operands); + switch (get_attr_length (insn)) + { + case 4: return \"b%d4\\t%l3\"; + case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\"; + default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\"; + } + }" + [(set (attr "far_jump") + (if_then_else + (eq_attr "length" "8") + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (and (ge (minus (match_dup 3) (pc)) (const_int -250)) + (le (minus (match_dup 3) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 3) (pc)) (const_int -2040)) + (le (minus (match_dup 3) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))))] +) + +(define_insn "*xorsi3_cbranch" + [(set (pc) + (if_then_else + (match_operator 5 "equality_operator" + [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1") + (match_operand:SI 3 "s_register_operand" "l,l,l,l")) + (const_int 0)]) + (label_ref (match_operand 4 "" "")) + (pc))) + (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,?h,?m,?m") + (xor:SI (match_dup 2) (match_dup 3))) + (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] + "TARGET_THUMB" + "* + { + if (which_alternative == 0) + output_asm_insn (\"eor\\t%0, %3\", operands); + else if (which_alternative == 1) + { + output_asm_insn (\"eor\\t%1, %3\", operands); + output_asm_insn (\"mov\\t%0, %1\", operands); + } + else + { + output_asm_insn (\"eor\\t%1, %3\", operands); + output_asm_insn (\"str\\t%1, %0\", operands); + } + + switch (get_attr_length (insn) - (which_alternative ? 2 : 0)) + { + case 4: return \"b%d5\\t%l4\"; + case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\"; + default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\"; + } + }" + [(set (attr "far_jump") + (if_then_else + (ior (and (eq (symbol_ref ("which_alternative")) + (const_int 0)) + (eq_attr "length" "8")) + (eq_attr "length" "10")) + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (eq (symbol_ref ("which_alternative")) + (const_int 0)) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -250)) + (le (minus (match_dup 4) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -2040)) + (le (minus (match_dup 4) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -248)) + (le (minus (match_dup 4) (pc)) (const_int 256))) + (const_int 6) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -2038)) + (le (minus (match_dup 4) (pc)) (const_int 2048))) + (const_int 8) + (const_int 10)))))] +) + (define_insn "*cbranchne_decr1" [(set (pc) (if_then_else (match_operator 3 "equality_operator" @@ -5253,7 +5645,7 @@ (const_int 0)]) (label_ref (match_operand 4 "" "")) (pc))) - (set (match_operand:SI 0 "s_register_operand" "=l,?h,?m,?m") + (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,?h,?m,?m") (plus:SI (match_dup 2) (const_int -1))) (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] "TARGET_THUMB" @@ -5262,7 +5654,7 @@ rtx cond[2]; cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE ? GEU : LTU), - VOIDmode, NULL, NULL); + VOIDmode, operands[2], const1_rtx); cond[1] = operands[4]; if (which_alternative == 0) @@ -5349,6 +5741,270 @@ (const_int 10)))])] ) +(define_insn "*addsi3_cbranch" + [(set (pc) + (if_then_else + (match_operator 4 "comparison_operator" + [(plus:SI + (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1") + (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ")) + (const_int 0)]) + (label_ref (match_operand 5 "" "")) + (pc))) + (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,?h,?m,?m") + (plus:SI (match_dup 2) (match_dup 3))) + (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))] + "TARGET_THUMB + && (GET_CODE (operands[4]) == EQ + || GET_CODE (operands[4]) == NE + || GET_CODE (operands[4]) == GE + || GET_CODE (operands[4]) == LT)" + "* + { + rtx cond[3]; + + + cond[0] = (which_alternative < 3) ? operands[0] : operands[1]; + cond[1] = operands[2]; + cond[2] = operands[3]; + + if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0) + output_asm_insn (\"sub\\t%0, %1, #%n2\", cond); + else + output_asm_insn (\"add\\t%0, %1, %2\", cond); + + if (which_alternative >= 3 + && which_alternative < 4) + output_asm_insn (\"mov\\t%0, %1\", operands); + else if (which_alternative >= 4) + output_asm_insn (\"str\\t%1, %0\", operands); + + switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0)) + { + case 4: + return \"b%d4\\t%l5\"; + case 6: + return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\"; + default: + return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\"; + } + } + " + [(set (attr "far_jump") + (if_then_else + (ior (and (lt (symbol_ref ("which_alternative")) + (const_int 3)) + (eq_attr "length" "8")) + (eq_attr "length" "10")) + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (lt (symbol_ref ("which_alternative")) + (const_int 3)) + (if_then_else + (and (ge (minus (match_dup 5) (pc)) (const_int -250)) + (le (minus (match_dup 5) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 5) (pc)) (const_int -2040)) + (le (minus (match_dup 5) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))) + (if_then_else + (and (ge (minus (match_dup 5) (pc)) (const_int -248)) + (le (minus (match_dup 5) (pc)) (const_int 256))) + (const_int 6) + (if_then_else + (and (ge (minus (match_dup 5) (pc)) (const_int -2038)) + (le (minus (match_dup 5) (pc)) (const_int 2048))) + (const_int 8) + (const_int 10)))))] +) + +(define_insn "*addsi3_cbranch_scratch" + [(set (pc) + (if_then_else + (match_operator 3 "comparison_operator" + [(plus:SI + (match_operand:SI 1 "s_register_operand" "%l,l,l,0") + (match_operand:SI 2 "reg_or_int_operand" "J,l,I,L")) + (const_int 0)]) + (label_ref (match_operand 4 "" "")) + (pc))) + (clobber (match_scratch:SI 0 "=X,X,l,l"))] + "TARGET_THUMB + && (GET_CODE (operands[3]) == EQ + || GET_CODE (operands[3]) == NE + || GET_CODE (operands[3]) == GE + || GET_CODE (operands[3]) == LT)" + "* + { + switch (which_alternative) + { + case 0: + output_asm_insn (\"cmp\t%1, #%n2\", operands); + break; + case 1: + output_asm_insn (\"cmn\t%1, %2\", operands); + break; + case 3: + output_asm_insn (\"add\t%0, %1, %2\", operands); + break; + case 4: + output_asm_insn (\"add\t%0, %0, %2\", operands); + break; + } + + switch (get_attr_length (insn)) + { + case 4: + return \"b%d3\\t%l4\"; + case 6: + return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\"; + default: + return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\"; + } + } + " + [(set (attr "far_jump") + (if_then_else + (eq_attr "length" "8") + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -250)) + (le (minus (match_dup 4) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 4) (pc)) (const_int -2040)) + (le (minus (match_dup 4) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))))] +) + +(define_insn "*subsi3_cbranch" + [(set (pc) + (if_then_else + (match_operator 4 "comparison_operator" + [(minus:SI + (match_operand:SI 2 "s_register_operand" "l,l,1,l") + (match_operand:SI 3 "s_register_operand" "l,l,l,l")) + (const_int 0)]) + (label_ref (match_operand 5 "" "")) + (pc))) + (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,?h,?m,?m") + (minus:SI (match_dup 2) (match_dup 3))) + (clobber (match_scratch:SI 1 "=X,l,&l,&l"))] + "TARGET_THUMB + && (GET_CODE (operands[4]) == EQ + || GET_CODE (operands[4]) == NE + || GET_CODE (operands[4]) == GE + || GET_CODE (operands[4]) == LT)" + "* + { + if (which_alternative == 0) + output_asm_insn (\"sub\\t%0, %2, %3\", operands); + else if (which_alternative == 1) + { + /* We must provide an alternative for a hi reg because reload + cannot handle output reloads on a jump instruction, but we + can't subtract into that. Fortunately a mov from lo to hi + does not clobber the condition codes. */ + output_asm_insn (\"sub\\t%1, %2, %3\", operands); + output_asm_insn (\"mov\\t%0, %1\", operands); + } + else + { + /* Similarly, but the target is memory. */ + output_asm_insn (\"sub\\t%1, %2, %3\", operands); + output_asm_insn (\"str\\t%1, %0\", operands); + } + + switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0)) + { + case 4: + return \"b%d4\\t%l5\"; + case 6: + return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\"; + default: + return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\"; + } + } + " + [(set (attr "far_jump") + (if_then_else + (ior (and (eq (symbol_ref ("which_alternative")) + (const_int 0)) + (eq_attr "length" "8")) + (eq_attr "length" "10")) + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (eq (symbol_ref ("which_alternative")) + (const_int 0)) + (if_then_else + (and (ge (minus (match_dup 5) (pc)) (const_int -250)) + (le (minus (match_dup 5) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 5) (pc)) (const_int -2040)) + (le (minus (match_dup 5) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))) + (if_then_else + (and (ge (minus (match_dup 5) (pc)) (const_int -248)) + (le (minus (match_dup 5) (pc)) (const_int 256))) + (const_int 6) + (if_then_else + (and (ge (minus (match_dup 5) (pc)) (const_int -2038)) + (le (minus (match_dup 5) (pc)) (const_int 2048))) + (const_int 8) + (const_int 10)))))] +) + +(define_insn "*subsi3_cbranch_scratch" + [(set (pc) + (if_then_else + (match_operator 0 "arm_comparison_operator" + [(minus:SI (match_operand:SI 1 "register_operand" "l") + (match_operand:SI 2 "nonmemory_operand" "l")) + (const_int 0)]) + (label_ref (match_operand 3 "" "")) + (pc)))] + "TARGET_THUMB + && (GET_CODE (operands[0]) == EQ + || GET_CODE (operands[0]) == NE + || GET_CODE (operands[0]) == GE + || GET_CODE (operands[0]) == LT)" + "* + output_asm_insn (\"cmp\\t%1, %2\", operands); + switch (get_attr_length (insn)) + { + case 4: return \"b%d0\\t%l3\"; + case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\"; + default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\"; + } + " + [(set (attr "far_jump") + (if_then_else + (eq_attr "length" "8") + (const_string "yes") + (const_string "no"))) + (set (attr "length") + (if_then_else + (and (ge (minus (match_dup 3) (pc)) (const_int -250)) + (le (minus (match_dup 3) (pc)) (const_int 256))) + (const_int 4) + (if_then_else + (and (ge (minus (match_dup 3) (pc)) (const_int -2040)) + (le (minus (match_dup 3) (pc)) (const_int 2048))) + (const_int 6) + (const_int 8))))] +) + ;; Comparison and test insns (define_expand "cmpsi" -- 2.11.4.GIT