1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2013 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (IP_REGNUM 12) ; Scratch register
34 (SP_REGNUM 13) ; Stack pointer
35 (LR_REGNUM 14) ; Return address register
36 (PC_REGNUM 15) ; Program counter
37 (LAST_ARM_REGNUM 15) ;
38 (CC_REGNUM 100) ; Condition code pseudo register
39 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ;; 3rd operand to select_dominance_cc_mode
49 ;; conditional compare combination
60 ;;---------------------------------------------------------------------------
63 ;; Processor type. This is created automatically from arm-cores.def.
64 (include "arm-tune.md")
66 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
67 ; generating ARM code. This is used to control the length of some insn
68 ; patterns that share the same RTL in both ARM and Thumb code.
69 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
71 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
72 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
74 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
75 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
77 ; We use this attribute to disable alternatives that can produce 32-bit
78 ; instructions inside an IT-block in Thumb2 state. ARMv8 deprecates IT blocks
79 ; that contain 32-bit instructions.
80 (define_attr "enabled_for_depr_it" "no,yes" (const_string "yes"))
82 ; This attribute is used to disable a predicated alternative when we have
84 (define_attr "predicable_short_it" "no,yes" (const_string "yes"))
86 ;; Operand number of an input operand that is shifted. Zero if the
87 ;; given instruction does not shift one of its input operands.
88 (define_attr "shift" "" (const_int 0))
90 ; Floating Point Unit. If we only have floating point emulation, then there
91 ; is no point in scheduling the floating point insns. (Well, for best
92 ; performance we should try and group them together).
93 (define_attr "fpu" "none,vfp"
94 (const (symbol_ref "arm_fpu_attr")))
96 (define_attr "predicated" "yes,no" (const_string "no"))
98 ; LENGTH of an instruction (in bytes)
99 (define_attr "length" ""
102 ; The architecture which supports the instruction (or alternative).
103 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
104 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
105 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
106 ; arm_arch6. This attribute is used to compute attribute "enabled",
107 ; use type "any" to enable an alternative in all cases.
108 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,neon_for_64bits,avoid_neon_for_64bits,iwmmxt,iwmmxt2"
109 (const_string "any"))
111 (define_attr "arch_enabled" "no,yes"
112 (cond [(eq_attr "arch" "any")
115 (and (eq_attr "arch" "a")
116 (match_test "TARGET_ARM"))
119 (and (eq_attr "arch" "t")
120 (match_test "TARGET_THUMB"))
123 (and (eq_attr "arch" "t1")
124 (match_test "TARGET_THUMB1"))
127 (and (eq_attr "arch" "t2")
128 (match_test "TARGET_THUMB2"))
131 (and (eq_attr "arch" "32")
132 (match_test "TARGET_32BIT"))
135 (and (eq_attr "arch" "v6")
136 (match_test "TARGET_32BIT && arm_arch6"))
139 (and (eq_attr "arch" "nov6")
140 (match_test "TARGET_32BIT && !arm_arch6"))
143 (and (eq_attr "arch" "avoid_neon_for_64bits")
144 (match_test "TARGET_NEON")
145 (not (match_test "TARGET_PREFER_NEON_64BITS")))
148 (and (eq_attr "arch" "neon_for_64bits")
149 (match_test "TARGET_NEON")
150 (match_test "TARGET_PREFER_NEON_64BITS"))
153 (and (eq_attr "arch" "iwmmxt2")
154 (match_test "TARGET_REALLY_IWMMXT2"))
155 (const_string "yes")]
157 (const_string "no")))
159 (define_attr "opt" "any,speed,size"
160 (const_string "any"))
162 (define_attr "opt_enabled" "no,yes"
163 (cond [(eq_attr "opt" "any")
166 (and (eq_attr "opt" "speed")
167 (match_test "optimize_function_for_speed_p (cfun)"))
170 (and (eq_attr "opt" "size")
171 (match_test "optimize_function_for_size_p (cfun)"))
172 (const_string "yes")]
173 (const_string "no")))
175 ; Allows an insn to disable certain alternatives for reasons other than
177 (define_attr "insn_enabled" "no,yes"
178 (const_string "yes"))
180 ; Enable all alternatives that are both arch_enabled and insn_enabled.
181 (define_attr "enabled" "no,yes"
182 (cond [(eq_attr "insn_enabled" "no")
185 (and (eq_attr "predicable_short_it" "no")
186 (and (eq_attr "predicated" "yes")
187 (match_test "arm_restrict_it")))
190 (and (eq_attr "enabled_for_depr_it" "no")
191 (match_test "arm_restrict_it"))
194 (eq_attr "arch_enabled" "no")
197 (eq_attr "opt_enabled" "no")
199 (const_string "yes")))
201 ; POOL_RANGE is how far away from a constant pool entry that this insn
202 ; can be placed. If the distance is zero, then this insn will never
203 ; reference the pool.
204 ; Note that for Thumb constant pools the PC value is rounded down to the
205 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
206 ; Thumb insns) should be set to <max_range> - 2.
207 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
208 ; before its address. It is set to <max_range> - (8 + <data_size>).
209 (define_attr "arm_pool_range" "" (const_int 0))
210 (define_attr "thumb2_pool_range" "" (const_int 0))
211 (define_attr "arm_neg_pool_range" "" (const_int 0))
212 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
214 (define_attr "pool_range" ""
215 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
216 (attr "arm_pool_range")))
217 (define_attr "neg_pool_range" ""
218 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
219 (attr "arm_neg_pool_range")))
221 ; An assembler sequence may clobber the condition codes without us knowing.
222 ; If such an insn references the pool, then we have no way of knowing how,
223 ; so use the most conservative value for pool_range.
224 (define_asm_attributes
225 [(set_attr "conds" "clob")
226 (set_attr "length" "4")
227 (set_attr "pool_range" "250")])
229 ;; The instruction used to implement a particular pattern. This
230 ;; information is used by pipeline descriptions to provide accurate
231 ;; scheduling information.
234 "mov,mvn,clz,mrs,msr,xtab,sat,other"
235 (const_string "other"))
237 ; TYPE attribute is used to classify instructions for use in scheduling.
239 ; Instruction classification:
241 ; alu_reg any alu instruction that doesn't hit memory or fp
242 ; regs or have a shifted source operand and does not have
243 ; an immediate operand. This is also the default.
244 ; alu_shift any data instruction that doesn't hit memory or fp.
245 ; regs, but has a source operand shifted by a constant.
246 ; alu_shift_reg any data instruction that doesn't hit memory or fp.
247 ; block blockage insn, this blocks all functional units.
249 ; call subroutine call.
250 ; f_2_r transfer from float to core (no memory needed).
251 ; f_cvt conversion between float and integral.
252 ; f_flag transfer of co-processor flags to the CPSR.
253 ; f_load[d,s] double/single load from memory. Used for VFP unit.
254 ; f_minmax[d,s] double/single floating point minimum/maximum.
255 ; f_rint[d,s] double/single floating point rount to integral.
256 ; f_sel[d,s] double/single floating byte select.
257 ; f_store[d,s] double/single store to memory. Used for VFP unit.
258 ; fadd[d,s] double/single floating-point scalar addition.
259 ; fcmp[d,s] double/single floating-point compare.
260 ; fconst[d,s] double/single load immediate.
261 ; fcpys single precision floating point cpy.
262 ; fdiv[d,s] double/single precision floating point division.
263 ; ffarith[d,s] double/single floating point abs/neg/cpy.
264 ; ffma[d,s] double/single floating point fused multiply-accumulate.
265 ; float floating point arithmetic operation.
266 ; fmac[d,s] double/single floating point multiply-accumulate.
267 ; fmul[d,s] double/single floating point multiply.
268 ; load_byte load byte(s) from memory to arm registers.
269 ; load1 load 1 word from memory to arm registers.
270 ; load2 load 2 words from memory to arm registers.
271 ; load3 load 3 words from memory to arm registers.
272 ; load4 load 4 words from memory to arm registers.
273 ; mla integer multiply accumulate.
274 ; mlas integer multiply accumulate, flag setting.
276 ; mul integer multiply.
277 ; muls integer multiply, flag setting.
278 ; r_2_f transfer from core to float.
279 ; sdiv signed division.
280 ; simple_alu_imm simple alu instruction that doesn't hit memory or fp
281 ; regs or have a shifted source operand and has an
282 ; immediate operand. This currently only tracks very basic
283 ; immediate alu operations.
284 ; simple_alu_shift simple alu instruction with a shifted source operand.
285 ; smlad signed multiply accumulate dual.
286 ; smladx signed multiply accumulate dual reverse.
287 ; smlal signed multiply accumulate long.
288 ; smlald signed multiply accumulate long dual.
289 ; smlals signed multiply accumulate long, flag setting.
290 ; smlalxy signed multiply accumulate, 16x16-bit, 64-bit accumulate.
291 ; smlawx signed multiply accumulate, 32x16-bit, 32-bit accumulate.
292 ; smlawy signed multiply accumulate wide, 32x16-bit,
294 ; smlaxy signed multiply accumulate, 16x16-bit, 32-bit accumulate.
295 ; smlsd signed multiply subtract dual.
296 ; smlsdx signed multiply subtract dual reverse.
297 ; smlsld signed multiply subtract long dual.
298 ; smmla signed most significant word multiply accumulate.
299 ; smmul signed most significant word multiply.
300 ; smmulr signed most significant word multiply, rounded.
301 ; smuad signed dual multiply add.
302 ; smuadx signed dual multiply add reverse.
303 ; smull signed multiply long.
304 ; smulls signed multiply long, flag setting.
305 ; smulwy signed multiply wide, 32x16-bit, 32-bit accumulate.
306 ; smulxy signed multiply, 16x16-bit, 32-bit accumulate.
307 ; smusd signed dual multiply subtract.
308 ; smusdx signed dual multiply subtract reverse.
309 ; store1 store 1 word to memory from arm registers.
310 ; store2 store 2 words to memory from arm registers.
311 ; store3 store 3 words to memory from arm registers.
312 ; store4 store 4 (or more) words to memory from arm registers.
313 ; udiv unsigned division.
314 ; umaal unsigned multiply accumulate accumulate long.
315 ; umlal unsigned multiply accumulate long.
316 ; umlals unsigned multiply accumulate long, flag setting.
317 ; umull unsigned multiply long.
318 ; umulls unsigned multiply long, flag setting.
320 ; The classification below is for instructions used by the Wireless MMX
321 ; Technology. Each attribute value is used to classify an instruction of the
322 ; same name or family.
527 (const_string "alu_reg"))
529 ; Is this an (integer side) multiply with a 32-bit (or smaller) result?
530 (define_attr "mul32" "no,yes"
533 "smulxy,smlaxy,smulwy,smlawx,mul,muls,mla,mlas,smlawy,smuad,smuadx,\
534 smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,smlald,smlsld")
536 (const_string "no")))
538 ; Is this an (integer side) multiply with a 64-bit result?
539 (define_attr "mul64" "no,yes"
542 "smlalxy,umull,umulls,umaal,umlal,umlals,smull,smulls,smlal,smlals")
544 (const_string "no")))
546 ; Load scheduling, set from the arm_ld_sched variable
547 ; initialized by arm_option_override()
548 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
550 ;; Classification of NEON instructions for scheduling purposes.
551 (define_attr "neon_type"
562 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
563 neon_mul_qqq_8_16_32_ddd_32,\
564 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
565 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
567 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
568 neon_mla_qqq_32_qqd_32_scalar,\
569 neon_mul_ddd_16_scalar_32_16_long_scalar,\
570 neon_mul_qqd_32_scalar,\
571 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
576 neon_vqshl_vrshl_vqrshl_qqq,\
578 neon_fp_vadd_ddd_vabs_dd,\
579 neon_fp_vadd_qqq_vabs_qq,\
585 neon_fp_vmla_ddd_scalar,\
586 neon_fp_vmla_qqq_scalar,\
587 neon_fp_vrecps_vrsqrts_ddd,\
588 neon_fp_vrecps_vrsqrts_qqq,\
596 neon_vld2_2_regs_vld1_vld2_all_lanes,\
599 neon_vst1_1_2_regs_vst2_2_regs,\
601 neon_vst2_4_regs_vst3_vst4,\
603 neon_vld1_vld2_lane,\
604 neon_vld3_vld4_lane,\
605 neon_vst1_vst2_lane,\
606 neon_vst3_vst4_lane,\
607 neon_vld3_vld4_all_lanes,\
615 (const_string "none"))
617 ; condition codes: this one is used by final_prescan_insn to speed up
618 ; conditionalizing instructions. It saves having to scan the rtl to see if
619 ; it uses or alters the condition codes.
621 ; USE means that the condition codes are used by the insn in the process of
622 ; outputting code, this means (at present) that we can't use the insn in
625 ; SET means that the purpose of the insn is to set the condition codes in a
626 ; well defined manner.
628 ; CLOB means that the condition codes are altered in an undefined manner, if
629 ; they are altered at all
631 ; UNCONDITIONAL means the instruction can not be conditionally executed and
632 ; that the instruction does not use or alter the condition codes.
634 ; NOCOND means that the instruction does not use or alter the condition
635 ; codes but can be converted into a conditionally exectuted instruction.
637 (define_attr "conds" "use,set,clob,unconditional,nocond"
639 (ior (eq_attr "is_thumb1" "yes")
640 (eq_attr "type" "call"))
641 (const_string "clob")
642 (if_then_else (eq_attr "neon_type" "none")
643 (const_string "nocond")
644 (const_string "unconditional"))))
646 ; Predicable means that the insn can be conditionally executed based on
647 ; an automatically added predicate (additional patterns are generated by
648 ; gen...). We default to 'no' because no Thumb patterns match this rule
649 ; and not all ARM patterns do.
650 (define_attr "predicable" "no,yes" (const_string "no"))
652 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
653 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
654 ; suffer blockages enough to warrant modelling this (and it can adversely
655 ; affect the schedule).
656 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
658 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
659 ; to stall the processor. Used with model_wbuf above.
660 (define_attr "write_conflict" "no,yes"
661 (if_then_else (eq_attr "type"
664 (const_string "no")))
666 ; Classify the insns into those that take one cycle and those that take more
667 ; than one on the main cpu execution unit.
668 (define_attr "core_cycles" "single,multi"
669 (if_then_else (eq_attr "type"
670 "simple_alu_imm, alu_reg,\
671 simple_alu_shift, alu_shift, float, fdivd, fdivs,\
672 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
673 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
674 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
675 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
676 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
677 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
678 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
679 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
680 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
681 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
682 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
683 (const_string "single")
684 (const_string "multi")))
686 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
687 ;; distant label. Only applicable to Thumb code.
688 (define_attr "far_jump" "yes,no" (const_string "no"))
691 ;; The number of machine instructions this pattern expands to.
692 ;; Used for Thumb-2 conditional execution.
693 (define_attr "ce_count" "" (const_int 1))
695 ;;---------------------------------------------------------------------------
698 (include "unspecs.md")
700 ;;---------------------------------------------------------------------------
703 (include "iterators.md")
705 ;;---------------------------------------------------------------------------
708 (include "predicates.md")
709 (include "constraints.md")
711 ;;---------------------------------------------------------------------------
712 ;; Pipeline descriptions
714 (define_attr "tune_cortexr4" "yes,no"
716 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
718 (const_string "no"))))
720 ;; True if the generic scheduling description should be used.
722 (define_attr "generic_sched" "yes,no"
724 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa7,cortexa8,cortexa9,cortexa15,cortexa53,cortexm4,marvell_pj4")
725 (eq_attr "tune_cortexr4" "yes"))
727 (const_string "yes"))))
729 (define_attr "generic_vfp" "yes,no"
731 (and (eq_attr "fpu" "vfp")
732 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa7,cortexa8,cortexa9,cortexa53,cortexm4,marvell_pj4")
733 (eq_attr "tune_cortexr4" "no"))
735 (const_string "no"))))
737 (include "marvell-f-iwmmxt.md")
738 (include "arm-generic.md")
739 (include "arm926ejs.md")
740 (include "arm1020e.md")
741 (include "arm1026ejs.md")
742 (include "arm1136jfs.md")
744 (include "fa606te.md")
745 (include "fa626te.md")
746 (include "fmp626.md")
747 (include "fa726te.md")
748 (include "cortex-a5.md")
749 (include "cortex-a7.md")
750 (include "cortex-a8.md")
751 (include "cortex-a9.md")
752 (include "cortex-a15.md")
753 (include "cortex-a53.md")
754 (include "cortex-r4.md")
755 (include "cortex-r4f.md")
756 (include "cortex-m4.md")
757 (include "cortex-m4-fpu.md")
759 (include "marvell-pj4.md")
762 ;;---------------------------------------------------------------------------
767 ;; Note: For DImode insns, there is normally no reason why operands should
768 ;; not be in the same register, what we don't want is for something being
769 ;; written to partially overlap something that is an input.
771 (define_expand "adddi3"
773 [(set (match_operand:DI 0 "s_register_operand" "")
774 (plus:DI (match_operand:DI 1 "s_register_operand" "")
775 (match_operand:DI 2 "arm_adddi_operand" "")))
776 (clobber (reg:CC CC_REGNUM))])]
781 if (!REG_P (operands[1]))
782 operands[1] = force_reg (DImode, operands[1]);
783 if (!REG_P (operands[2]))
784 operands[2] = force_reg (DImode, operands[2]);
789 (define_insn "*thumb1_adddi3"
790 [(set (match_operand:DI 0 "register_operand" "=l")
791 (plus:DI (match_operand:DI 1 "register_operand" "%0")
792 (match_operand:DI 2 "register_operand" "l")))
793 (clobber (reg:CC CC_REGNUM))
796 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
797 [(set_attr "length" "4")]
800 (define_insn_and_split "*arm_adddi3"
801 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r,&r,&r")
802 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0, r, 0, r")
803 (match_operand:DI 2 "arm_adddi_operand" "r, 0, r, Dd, Dd")))
804 (clobber (reg:CC CC_REGNUM))]
805 "TARGET_32BIT && !TARGET_NEON"
807 "TARGET_32BIT && reload_completed
808 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
809 [(parallel [(set (reg:CC_C CC_REGNUM)
810 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
812 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
813 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
814 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
817 operands[3] = gen_highpart (SImode, operands[0]);
818 operands[0] = gen_lowpart (SImode, operands[0]);
819 operands[4] = gen_highpart (SImode, operands[1]);
820 operands[1] = gen_lowpart (SImode, operands[1]);
821 operands[5] = gen_highpart_mode (SImode, DImode, operands[2]);
822 operands[2] = gen_lowpart (SImode, operands[2]);
824 [(set_attr "conds" "clob")
825 (set_attr "length" "8")]
828 (define_insn_and_split "*adddi_sesidi_di"
829 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
830 (plus:DI (sign_extend:DI
831 (match_operand:SI 2 "s_register_operand" "r,r"))
832 (match_operand:DI 1 "s_register_operand" "0,r")))
833 (clobber (reg:CC CC_REGNUM))]
836 "TARGET_32BIT && reload_completed"
837 [(parallel [(set (reg:CC_C CC_REGNUM)
838 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
840 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
841 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
844 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
847 operands[3] = gen_highpart (SImode, operands[0]);
848 operands[0] = gen_lowpart (SImode, operands[0]);
849 operands[4] = gen_highpart (SImode, operands[1]);
850 operands[1] = gen_lowpart (SImode, operands[1]);
851 operands[2] = gen_lowpart (SImode, operands[2]);
853 [(set_attr "conds" "clob")
854 (set_attr "length" "8")]
857 (define_insn_and_split "*adddi_zesidi_di"
858 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
859 (plus:DI (zero_extend:DI
860 (match_operand:SI 2 "s_register_operand" "r,r"))
861 (match_operand:DI 1 "s_register_operand" "0,r")))
862 (clobber (reg:CC CC_REGNUM))]
865 "TARGET_32BIT && reload_completed"
866 [(parallel [(set (reg:CC_C CC_REGNUM)
867 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
869 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
870 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
871 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
874 operands[3] = gen_highpart (SImode, operands[0]);
875 operands[0] = gen_lowpart (SImode, operands[0]);
876 operands[4] = gen_highpart (SImode, operands[1]);
877 operands[1] = gen_lowpart (SImode, operands[1]);
878 operands[2] = gen_lowpart (SImode, operands[2]);
880 [(set_attr "conds" "clob")
881 (set_attr "length" "8")]
884 (define_expand "addsi3"
885 [(set (match_operand:SI 0 "s_register_operand" "")
886 (plus:SI (match_operand:SI 1 "s_register_operand" "")
887 (match_operand:SI 2 "reg_or_int_operand" "")))]
890 if (TARGET_32BIT && CONST_INT_P (operands[2]))
892 arm_split_constant (PLUS, SImode, NULL_RTX,
893 INTVAL (operands[2]), operands[0], operands[1],
894 optimize && can_create_pseudo_p ());
900 ; If there is a scratch available, this will be faster than synthesizing the
903 [(match_scratch:SI 3 "r")
904 (set (match_operand:SI 0 "arm_general_register_operand" "")
905 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
906 (match_operand:SI 2 "const_int_operand" "")))]
908 !(const_ok_for_arm (INTVAL (operands[2]))
909 || const_ok_for_arm (-INTVAL (operands[2])))
910 && const_ok_for_arm (~INTVAL (operands[2]))"
911 [(set (match_dup 3) (match_dup 2))
912 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
916 ;; The r/r/k alternative is required when reloading the address
917 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
918 ;; put the duplicated register first, and not try the commutative version.
919 (define_insn_and_split "*arm_addsi3"
920 [(set (match_operand:SI 0 "s_register_operand" "=rk, r,k, r,r, k, r, k,k,r, k, r")
921 (plus:SI (match_operand:SI 1 "s_register_operand" "%0, rk,k, r,rk,k, rk,k,r,rk,k, rk")
922 (match_operand:SI 2 "reg_or_int_operand" "rk, rI,rI,k,Pj,Pj,L, L,L,PJ,PJ,?n")))]
934 subw%?\\t%0, %1, #%n2
935 subw%?\\t%0, %1, #%n2
938 && CONST_INT_P (operands[2])
939 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
940 && (reload_completed || !arm_eliminable_register (operands[1]))"
941 [(clobber (const_int 0))]
943 arm_split_constant (PLUS, SImode, curr_insn,
944 INTVAL (operands[2]), operands[0],
948 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,16")
949 (set_attr "predicable" "yes")
950 (set_attr "arch" "t2,*,*,*,t2,t2,*,*,a,t2,t2,*")
951 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
952 (const_string "simple_alu_imm")
953 (const_string "alu_reg")))
957 (define_insn_and_split "*thumb1_addsi3"
958 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
959 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
960 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
963 static const char * const asms[] =
965 \"add\\t%0, %0, %2\",
966 \"sub\\t%0, %0, #%n2\",
967 \"add\\t%0, %1, %2\",
968 \"add\\t%0, %0, %2\",
969 \"add\\t%0, %0, %2\",
970 \"add\\t%0, %1, %2\",
971 \"add\\t%0, %1, %2\",
976 if ((which_alternative == 2 || which_alternative == 6)
977 && CONST_INT_P (operands[2])
978 && INTVAL (operands[2]) < 0)
979 return \"sub\\t%0, %1, #%n2\";
980 return asms[which_alternative];
982 "&& reload_completed && CONST_INT_P (operands[2])
983 && ((operands[1] != stack_pointer_rtx
984 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
985 || (operands[1] == stack_pointer_rtx
986 && INTVAL (operands[2]) > 1020))"
987 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
988 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
990 HOST_WIDE_INT offset = INTVAL (operands[2]);
991 if (operands[1] == stack_pointer_rtx)
997 else if (offset < -255)
1000 operands[3] = GEN_INT (offset);
1001 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
1003 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
1006 ;; Reloading and elimination of the frame pointer can
1007 ;; sometimes cause this optimization to be missed.
1009 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1010 (match_operand:SI 1 "const_int_operand" ""))
1012 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
1014 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
1015 && (INTVAL (operands[1]) & 3) == 0"
1016 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
1020 (define_insn "addsi3_compare0"
1021 [(set (reg:CC_NOOV CC_REGNUM)
1023 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1024 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1026 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1027 (plus:SI (match_dup 1) (match_dup 2)))]
1031 sub%.\\t%0, %1, #%n2
1033 [(set_attr "conds" "set")
1034 (set_attr "type" "simple_alu_imm, simple_alu_imm, *")]
1037 (define_insn "*addsi3_compare0_scratch"
1038 [(set (reg:CC_NOOV CC_REGNUM)
1040 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1041 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1048 [(set_attr "conds" "set")
1049 (set_attr "predicable" "yes")
1050 (set_attr "type" "simple_alu_imm, simple_alu_imm, *")
1054 (define_insn "*compare_negsi_si"
1055 [(set (reg:CC_Z CC_REGNUM)
1057 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
1058 (match_operand:SI 1 "s_register_operand" "r")))]
1061 [(set_attr "conds" "set")
1062 (set_attr "predicable" "yes")]
1065 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
1066 ;; addend is a constant.
1067 (define_insn "cmpsi2_addneg"
1068 [(set (reg:CC CC_REGNUM)
1070 (match_operand:SI 1 "s_register_operand" "r,r")
1071 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
1072 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1073 (plus:SI (match_dup 1)
1074 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
1075 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
1078 sub%.\\t%0, %1, #%n3"
1079 [(set_attr "conds" "set")]
1082 ;; Convert the sequence
1084 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1088 ;; bcs dest ((unsigned)rn >= 1)
1089 ;; similarly for the beq variant using bcc.
1090 ;; This is a common looping idiom (while (n--))
1092 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1093 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1095 (set (match_operand 2 "cc_register" "")
1096 (compare (match_dup 0) (const_int -1)))
1098 (if_then_else (match_operator 3 "equality_operator"
1099 [(match_dup 2) (const_int 0)])
1100 (match_operand 4 "" "")
1101 (match_operand 5 "" "")))]
1102 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1106 (match_dup 1) (const_int 1)))
1107 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1109 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1112 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1113 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1116 operands[2], const0_rtx);"
1119 ;; The next four insns work because they compare the result with one of
1120 ;; the operands, and we know that the use of the condition code is
1121 ;; either GEU or LTU, so we can use the carry flag from the addition
1122 ;; instead of doing the compare a second time.
1123 (define_insn "*addsi3_compare_op1"
1124 [(set (reg:CC_C CC_REGNUM)
1126 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1127 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1129 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1130 (plus:SI (match_dup 1) (match_dup 2)))]
1134 sub%.\\t%0, %1, #%n2
1136 [(set_attr "conds" "set")
1137 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
1140 (define_insn "*addsi3_compare_op2"
1141 [(set (reg:CC_C CC_REGNUM)
1143 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1144 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1146 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1147 (plus:SI (match_dup 1) (match_dup 2)))]
1152 sub%.\\t%0, %1, #%n2"
1153 [(set_attr "conds" "set")
1154 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
1157 (define_insn "*compare_addsi2_op0"
1158 [(set (reg:CC_C CC_REGNUM)
1160 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1161 (match_operand:SI 1 "arm_add_operand" "I,L,r"))
1168 [(set_attr "conds" "set")
1169 (set_attr "predicable" "yes")
1170 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
1173 (define_insn "*compare_addsi2_op1"
1174 [(set (reg:CC_C CC_REGNUM)
1176 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1177 (match_operand:SI 1 "arm_add_operand" "I,L,r"))
1184 [(set_attr "conds" "set")
1185 (set_attr "predicable" "yes")
1186 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
1189 (define_insn "*addsi3_carryin_<optab>"
1190 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1191 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r")
1192 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1193 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1197 sbc%?\\t%0, %1, #%B2"
1198 [(set_attr "conds" "use")
1199 (set_attr "predicable" "yes")]
1202 (define_insn "*addsi3_carryin_alt2_<optab>"
1203 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1204 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
1205 (match_operand:SI 1 "s_register_operand" "%r,r"))
1206 (match_operand:SI 2 "arm_rhs_operand" "rI,K")))]
1210 sbc%?\\t%0, %1, #%B2"
1211 [(set_attr "conds" "use")
1212 (set_attr "predicable" "yes")]
1215 (define_insn "*addsi3_carryin_shift_<optab>"
1216 [(set (match_operand:SI 0 "s_register_operand" "=r")
1218 (match_operator:SI 2 "shift_operator"
1219 [(match_operand:SI 3 "s_register_operand" "r")
1220 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1221 (match_operand:SI 1 "s_register_operand" "r"))
1222 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1224 "adc%?\\t%0, %1, %3%S2"
1225 [(set_attr "conds" "use")
1226 (set_attr "predicable" "yes")
1227 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1228 (const_string "alu_shift")
1229 (const_string "alu_shift_reg")))]
1232 (define_insn "*addsi3_carryin_clobercc_<optab>"
1233 [(set (match_operand:SI 0 "s_register_operand" "=r")
1234 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1235 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1236 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1237 (clobber (reg:CC CC_REGNUM))]
1239 "adc%.\\t%0, %1, %2"
1240 [(set_attr "conds" "set")]
1243 (define_insn "*subsi3_carryin"
1244 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1245 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I")
1246 (match_operand:SI 2 "s_register_operand" "r,r"))
1247 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1252 [(set_attr "conds" "use")
1253 (set_attr "arch" "*,a")
1254 (set_attr "predicable" "yes")]
1257 (define_insn "*subsi3_carryin_const"
1258 [(set (match_operand:SI 0 "s_register_operand" "=r")
1259 (minus:SI (plus:SI (match_operand:SI 1 "reg_or_int_operand" "r")
1260 (match_operand:SI 2 "arm_not_operand" "K"))
1261 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1263 "sbc\\t%0, %1, #%B2"
1264 [(set_attr "conds" "use")]
1267 (define_insn "*subsi3_carryin_compare"
1268 [(set (reg:CC CC_REGNUM)
1269 (compare:CC (match_operand:SI 1 "s_register_operand" "r")
1270 (match_operand:SI 2 "s_register_operand" "r")))
1271 (set (match_operand:SI 0 "s_register_operand" "=r")
1272 (minus:SI (minus:SI (match_dup 1)
1274 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1277 [(set_attr "conds" "set")]
1280 (define_insn "*subsi3_carryin_compare_const"
1281 [(set (reg:CC CC_REGNUM)
1282 (compare:CC (match_operand:SI 1 "reg_or_int_operand" "r")
1283 (match_operand:SI 2 "arm_not_operand" "K")))
1284 (set (match_operand:SI 0 "s_register_operand" "=r")
1285 (minus:SI (plus:SI (match_dup 1)
1287 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1289 "sbcs\\t%0, %1, #%B2"
1290 [(set_attr "conds" "set")]
1293 (define_insn "*subsi3_carryin_shift"
1294 [(set (match_operand:SI 0 "s_register_operand" "=r")
1296 (match_operand:SI 1 "s_register_operand" "r")
1297 (match_operator:SI 2 "shift_operator"
1298 [(match_operand:SI 3 "s_register_operand" "r")
1299 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1300 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1302 "sbc%?\\t%0, %1, %3%S2"
1303 [(set_attr "conds" "use")
1304 (set_attr "predicable" "yes")
1305 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1306 (const_string "alu_shift")
1307 (const_string "alu_shift_reg")))]
1310 (define_insn "*rsbsi3_carryin_shift"
1311 [(set (match_operand:SI 0 "s_register_operand" "=r")
1313 (match_operator:SI 2 "shift_operator"
1314 [(match_operand:SI 3 "s_register_operand" "r")
1315 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1316 (match_operand:SI 1 "s_register_operand" "r"))
1317 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1319 "rsc%?\\t%0, %1, %3%S2"
1320 [(set_attr "conds" "use")
1321 (set_attr "predicable" "yes")
1322 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1323 (const_string "alu_shift")
1324 (const_string "alu_shift_reg")))]
1327 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1329 [(set (match_operand:SI 0 "s_register_operand" "")
1330 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1331 (match_operand:SI 2 "s_register_operand" ""))
1333 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1335 [(set (match_dup 3) (match_dup 1))
1336 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1338 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1341 (define_expand "addsf3"
1342 [(set (match_operand:SF 0 "s_register_operand" "")
1343 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1344 (match_operand:SF 2 "s_register_operand" "")))]
1345 "TARGET_32BIT && TARGET_HARD_FLOAT"
1349 (define_expand "adddf3"
1350 [(set (match_operand:DF 0 "s_register_operand" "")
1351 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1352 (match_operand:DF 2 "s_register_operand" "")))]
1353 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1357 (define_expand "subdi3"
1359 [(set (match_operand:DI 0 "s_register_operand" "")
1360 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1361 (match_operand:DI 2 "s_register_operand" "")))
1362 (clobber (reg:CC CC_REGNUM))])]
1367 if (!REG_P (operands[1]))
1368 operands[1] = force_reg (DImode, operands[1]);
1369 if (!REG_P (operands[2]))
1370 operands[2] = force_reg (DImode, operands[2]);
1375 (define_insn_and_split "*arm_subdi3"
1376 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1377 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1378 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1379 (clobber (reg:CC CC_REGNUM))]
1380 "TARGET_32BIT && !TARGET_NEON"
1381 "#" ; "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1382 "&& reload_completed"
1383 [(parallel [(set (reg:CC CC_REGNUM)
1384 (compare:CC (match_dup 1) (match_dup 2)))
1385 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1386 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4) (match_dup 5))
1387 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1389 operands[3] = gen_highpart (SImode, operands[0]);
1390 operands[0] = gen_lowpart (SImode, operands[0]);
1391 operands[4] = gen_highpart (SImode, operands[1]);
1392 operands[1] = gen_lowpart (SImode, operands[1]);
1393 operands[5] = gen_highpart (SImode, operands[2]);
1394 operands[2] = gen_lowpart (SImode, operands[2]);
1396 [(set_attr "conds" "clob")
1397 (set_attr "length" "8")]
1400 (define_insn "*thumb_subdi3"
1401 [(set (match_operand:DI 0 "register_operand" "=l")
1402 (minus:DI (match_operand:DI 1 "register_operand" "0")
1403 (match_operand:DI 2 "register_operand" "l")))
1404 (clobber (reg:CC CC_REGNUM))]
1406 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1407 [(set_attr "length" "4")]
1410 (define_insn_and_split "*subdi_di_zesidi"
1411 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1412 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1414 (match_operand:SI 2 "s_register_operand" "r,r"))))
1415 (clobber (reg:CC CC_REGNUM))]
1417 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1418 "&& reload_completed"
1419 [(parallel [(set (reg:CC CC_REGNUM)
1420 (compare:CC (match_dup 1) (match_dup 2)))
1421 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1422 (set (match_dup 3) (minus:SI (plus:SI (match_dup 4) (match_dup 5))
1423 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1425 operands[3] = gen_highpart (SImode, operands[0]);
1426 operands[0] = gen_lowpart (SImode, operands[0]);
1427 operands[4] = gen_highpart (SImode, operands[1]);
1428 operands[1] = gen_lowpart (SImode, operands[1]);
1429 operands[5] = GEN_INT (~0);
1431 [(set_attr "conds" "clob")
1432 (set_attr "length" "8")]
1435 (define_insn_and_split "*subdi_di_sesidi"
1436 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1437 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1439 (match_operand:SI 2 "s_register_operand" "r,r"))))
1440 (clobber (reg:CC CC_REGNUM))]
1442 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1443 "&& reload_completed"
1444 [(parallel [(set (reg:CC CC_REGNUM)
1445 (compare:CC (match_dup 1) (match_dup 2)))
1446 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1447 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4)
1448 (ashiftrt:SI (match_dup 2)
1450 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1452 operands[3] = gen_highpart (SImode, operands[0]);
1453 operands[0] = gen_lowpart (SImode, operands[0]);
1454 operands[4] = gen_highpart (SImode, operands[1]);
1455 operands[1] = gen_lowpart (SImode, operands[1]);
1457 [(set_attr "conds" "clob")
1458 (set_attr "length" "8")]
1461 (define_insn_and_split "*subdi_zesidi_di"
1462 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1463 (minus:DI (zero_extend:DI
1464 (match_operand:SI 2 "s_register_operand" "r,r"))
1465 (match_operand:DI 1 "s_register_operand" "0,r")))
1466 (clobber (reg:CC CC_REGNUM))]
1468 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1470 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, #0"
1471 "&& reload_completed"
1472 [(parallel [(set (reg:CC CC_REGNUM)
1473 (compare:CC (match_dup 2) (match_dup 1)))
1474 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1475 (set (match_dup 3) (minus:SI (minus:SI (const_int 0) (match_dup 4))
1476 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1478 operands[3] = gen_highpart (SImode, operands[0]);
1479 operands[0] = gen_lowpart (SImode, operands[0]);
1480 operands[4] = gen_highpart (SImode, operands[1]);
1481 operands[1] = gen_lowpart (SImode, operands[1]);
1483 [(set_attr "conds" "clob")
1484 (set_attr "length" "8")]
1487 (define_insn_and_split "*subdi_sesidi_di"
1488 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1489 (minus:DI (sign_extend:DI
1490 (match_operand:SI 2 "s_register_operand" "r,r"))
1491 (match_operand:DI 1 "s_register_operand" "0,r")))
1492 (clobber (reg:CC CC_REGNUM))]
1494 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1496 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, %2, asr #31"
1497 "&& reload_completed"
1498 [(parallel [(set (reg:CC CC_REGNUM)
1499 (compare:CC (match_dup 2) (match_dup 1)))
1500 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1501 (set (match_dup 3) (minus:SI (minus:SI
1502 (ashiftrt:SI (match_dup 2)
1505 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1507 operands[3] = gen_highpart (SImode, operands[0]);
1508 operands[0] = gen_lowpart (SImode, operands[0]);
1509 operands[4] = gen_highpart (SImode, operands[1]);
1510 operands[1] = gen_lowpart (SImode, operands[1]);
1512 [(set_attr "conds" "clob")
1513 (set_attr "length" "8")]
1516 (define_insn_and_split "*subdi_zesidi_zesidi"
1517 [(set (match_operand:DI 0 "s_register_operand" "=r")
1518 (minus:DI (zero_extend:DI
1519 (match_operand:SI 1 "s_register_operand" "r"))
1521 (match_operand:SI 2 "s_register_operand" "r"))))
1522 (clobber (reg:CC CC_REGNUM))]
1524 "#" ; "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1525 "&& reload_completed"
1526 [(parallel [(set (reg:CC CC_REGNUM)
1527 (compare:CC (match_dup 1) (match_dup 2)))
1528 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1529 (set (match_dup 3) (minus:SI (minus:SI (match_dup 1) (match_dup 1))
1530 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1532 operands[3] = gen_highpart (SImode, operands[0]);
1533 operands[0] = gen_lowpart (SImode, operands[0]);
1535 [(set_attr "conds" "clob")
1536 (set_attr "length" "8")]
1539 (define_expand "subsi3"
1540 [(set (match_operand:SI 0 "s_register_operand" "")
1541 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1542 (match_operand:SI 2 "s_register_operand" "")))]
1545 if (CONST_INT_P (operands[1]))
1549 arm_split_constant (MINUS, SImode, NULL_RTX,
1550 INTVAL (operands[1]), operands[0],
1551 operands[2], optimize && can_create_pseudo_p ());
1554 else /* TARGET_THUMB1 */
1555 operands[1] = force_reg (SImode, operands[1]);
1560 (define_insn "thumb1_subsi3_insn"
1561 [(set (match_operand:SI 0 "register_operand" "=l")
1562 (minus:SI (match_operand:SI 1 "register_operand" "l")
1563 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1566 [(set_attr "length" "2")
1567 (set_attr "conds" "set")])
1569 ; ??? Check Thumb-2 split length
1570 (define_insn_and_split "*arm_subsi3_insn"
1571 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,rk,r")
1572 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,r,k,?n")
1573 (match_operand:SI 2 "reg_or_int_operand" "r,I,r,r, r")))]
1581 "&& (CONST_INT_P (operands[1])
1582 && !const_ok_for_arm (INTVAL (operands[1])))"
1583 [(clobber (const_int 0))]
1585 arm_split_constant (MINUS, SImode, curr_insn,
1586 INTVAL (operands[1]), operands[0], operands[2], 0);
1589 [(set_attr "length" "4,4,4,4,16")
1590 (set_attr "predicable" "yes")
1591 (set_attr "type" "*,simple_alu_imm,*,*,*")]
1595 [(match_scratch:SI 3 "r")
1596 (set (match_operand:SI 0 "arm_general_register_operand" "")
1597 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1598 (match_operand:SI 2 "arm_general_register_operand" "")))]
1600 && !const_ok_for_arm (INTVAL (operands[1]))
1601 && const_ok_for_arm (~INTVAL (operands[1]))"
1602 [(set (match_dup 3) (match_dup 1))
1603 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1607 (define_insn "*subsi3_compare0"
1608 [(set (reg:CC_NOOV CC_REGNUM)
1610 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1611 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1613 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1614 (minus:SI (match_dup 1) (match_dup 2)))]
1620 [(set_attr "conds" "set")
1621 (set_attr "type" "simple_alu_imm,*,*")]
1624 (define_insn "subsi3_compare"
1625 [(set (reg:CC CC_REGNUM)
1626 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1627 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1628 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1629 (minus:SI (match_dup 1) (match_dup 2)))]
1635 [(set_attr "conds" "set")
1636 (set_attr "type" "simple_alu_imm,*,*")]
1639 (define_expand "subsf3"
1640 [(set (match_operand:SF 0 "s_register_operand" "")
1641 (minus:SF (match_operand:SF 1 "s_register_operand" "")
1642 (match_operand:SF 2 "s_register_operand" "")))]
1643 "TARGET_32BIT && TARGET_HARD_FLOAT"
1647 (define_expand "subdf3"
1648 [(set (match_operand:DF 0 "s_register_operand" "")
1649 (minus:DF (match_operand:DF 1 "s_register_operand" "")
1650 (match_operand:DF 2 "s_register_operand" "")))]
1651 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1656 ;; Multiplication insns
1658 (define_expand "mulhi3"
1659 [(set (match_operand:HI 0 "s_register_operand" "")
1660 (mult:HI (match_operand:HI 1 "s_register_operand" "")
1661 (match_operand:HI 2 "s_register_operand" "")))]
1662 "TARGET_DSP_MULTIPLY"
1665 rtx result = gen_reg_rtx (SImode);
1666 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1667 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1672 (define_expand "mulsi3"
1673 [(set (match_operand:SI 0 "s_register_operand" "")
1674 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1675 (match_operand:SI 1 "s_register_operand" "")))]
1680 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1681 (define_insn "*arm_mulsi3"
1682 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1683 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1684 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1685 "TARGET_32BIT && !arm_arch6"
1686 "mul%?\\t%0, %2, %1"
1687 [(set_attr "type" "mul")
1688 (set_attr "predicable" "yes")]
1691 (define_insn "*arm_mulsi3_v6"
1692 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
1693 (mult:SI (match_operand:SI 1 "s_register_operand" "0,l,r")
1694 (match_operand:SI 2 "s_register_operand" "l,0,r")))]
1695 "TARGET_32BIT && arm_arch6"
1696 "mul%?\\t%0, %1, %2"
1697 [(set_attr "type" "mul")
1698 (set_attr "predicable" "yes")
1699 (set_attr "arch" "t2,t2,*")
1700 (set_attr "length" "4")
1701 (set_attr "predicable_short_it" "yes,yes,no")]
1704 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1705 ; 1 and 2; are the same, because reload will make operand 0 match
1706 ; operand 1 without realizing that this conflicts with operand 2. We fix
1707 ; this by adding another alternative to match this case, and then `reload'
1708 ; it ourselves. This alternative must come first.
1709 (define_insn "*thumb_mulsi3"
1710 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1711 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1712 (match_operand:SI 2 "register_operand" "l,l,l")))]
1713 "TARGET_THUMB1 && !arm_arch6"
1715 if (which_alternative < 2)
1716 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1718 return \"mul\\t%0, %2\";
1720 [(set_attr "length" "4,4,2")
1721 (set_attr "type" "muls")]
1724 (define_insn "*thumb_mulsi3_v6"
1725 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1726 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1727 (match_operand:SI 2 "register_operand" "l,0,0")))]
1728 "TARGET_THUMB1 && arm_arch6"
1733 [(set_attr "length" "2")
1734 (set_attr "type" "muls")]
1737 (define_insn "*mulsi3_compare0"
1738 [(set (reg:CC_NOOV CC_REGNUM)
1739 (compare:CC_NOOV (mult:SI
1740 (match_operand:SI 2 "s_register_operand" "r,r")
1741 (match_operand:SI 1 "s_register_operand" "%0,r"))
1743 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1744 (mult:SI (match_dup 2) (match_dup 1)))]
1745 "TARGET_ARM && !arm_arch6"
1746 "mul%.\\t%0, %2, %1"
1747 [(set_attr "conds" "set")
1748 (set_attr "type" "muls")]
1751 (define_insn "*mulsi3_compare0_v6"
1752 [(set (reg:CC_NOOV CC_REGNUM)
1753 (compare:CC_NOOV (mult:SI
1754 (match_operand:SI 2 "s_register_operand" "r")
1755 (match_operand:SI 1 "s_register_operand" "r"))
1757 (set (match_operand:SI 0 "s_register_operand" "=r")
1758 (mult:SI (match_dup 2) (match_dup 1)))]
1759 "TARGET_ARM && arm_arch6 && optimize_size"
1760 "mul%.\\t%0, %2, %1"
1761 [(set_attr "conds" "set")
1762 (set_attr "type" "muls")]
1765 (define_insn "*mulsi_compare0_scratch"
1766 [(set (reg:CC_NOOV CC_REGNUM)
1767 (compare:CC_NOOV (mult:SI
1768 (match_operand:SI 2 "s_register_operand" "r,r")
1769 (match_operand:SI 1 "s_register_operand" "%0,r"))
1771 (clobber (match_scratch:SI 0 "=&r,&r"))]
1772 "TARGET_ARM && !arm_arch6"
1773 "mul%.\\t%0, %2, %1"
1774 [(set_attr "conds" "set")
1775 (set_attr "type" "muls")]
1778 (define_insn "*mulsi_compare0_scratch_v6"
1779 [(set (reg:CC_NOOV CC_REGNUM)
1780 (compare:CC_NOOV (mult:SI
1781 (match_operand:SI 2 "s_register_operand" "r")
1782 (match_operand:SI 1 "s_register_operand" "r"))
1784 (clobber (match_scratch:SI 0 "=r"))]
1785 "TARGET_ARM && arm_arch6 && optimize_size"
1786 "mul%.\\t%0, %2, %1"
1787 [(set_attr "conds" "set")
1788 (set_attr "type" "muls")]
1791 ;; Unnamed templates to match MLA instruction.
1793 (define_insn "*mulsi3addsi"
1794 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1796 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1797 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1798 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1799 "TARGET_32BIT && !arm_arch6"
1800 "mla%?\\t%0, %2, %1, %3"
1801 [(set_attr "type" "mla")
1802 (set_attr "predicable" "yes")]
1805 (define_insn "*mulsi3addsi_v6"
1806 [(set (match_operand:SI 0 "s_register_operand" "=r")
1808 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1809 (match_operand:SI 1 "s_register_operand" "r"))
1810 (match_operand:SI 3 "s_register_operand" "r")))]
1811 "TARGET_32BIT && arm_arch6"
1812 "mla%?\\t%0, %2, %1, %3"
1813 [(set_attr "type" "mla")
1814 (set_attr "predicable" "yes")
1815 (set_attr "predicable_short_it" "no")]
1818 (define_insn "*mulsi3addsi_compare0"
1819 [(set (reg:CC_NOOV CC_REGNUM)
1822 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1823 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1824 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1826 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1827 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1829 "TARGET_ARM && arm_arch6"
1830 "mla%.\\t%0, %2, %1, %3"
1831 [(set_attr "conds" "set")
1832 (set_attr "type" "mlas")]
1835 (define_insn "*mulsi3addsi_compare0_v6"
1836 [(set (reg:CC_NOOV CC_REGNUM)
1839 (match_operand:SI 2 "s_register_operand" "r")
1840 (match_operand:SI 1 "s_register_operand" "r"))
1841 (match_operand:SI 3 "s_register_operand" "r"))
1843 (set (match_operand:SI 0 "s_register_operand" "=r")
1844 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1846 "TARGET_ARM && arm_arch6 && optimize_size"
1847 "mla%.\\t%0, %2, %1, %3"
1848 [(set_attr "conds" "set")
1849 (set_attr "type" "mlas")]
1852 (define_insn "*mulsi3addsi_compare0_scratch"
1853 [(set (reg:CC_NOOV CC_REGNUM)
1856 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1857 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1858 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1860 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1861 "TARGET_ARM && !arm_arch6"
1862 "mla%.\\t%0, %2, %1, %3"
1863 [(set_attr "conds" "set")
1864 (set_attr "type" "mlas")]
1867 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1868 [(set (reg:CC_NOOV CC_REGNUM)
1871 (match_operand:SI 2 "s_register_operand" "r")
1872 (match_operand:SI 1 "s_register_operand" "r"))
1873 (match_operand:SI 3 "s_register_operand" "r"))
1875 (clobber (match_scratch:SI 0 "=r"))]
1876 "TARGET_ARM && arm_arch6 && optimize_size"
1877 "mla%.\\t%0, %2, %1, %3"
1878 [(set_attr "conds" "set")
1879 (set_attr "type" "mlas")]
1882 (define_insn "*mulsi3subsi"
1883 [(set (match_operand:SI 0 "s_register_operand" "=r")
1885 (match_operand:SI 3 "s_register_operand" "r")
1886 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1887 (match_operand:SI 1 "s_register_operand" "r"))))]
1888 "TARGET_32BIT && arm_arch_thumb2"
1889 "mls%?\\t%0, %2, %1, %3"
1890 [(set_attr "type" "mla")
1891 (set_attr "predicable" "yes")
1892 (set_attr "predicable_short_it" "no")]
1895 (define_expand "maddsidi4"
1896 [(set (match_operand:DI 0 "s_register_operand" "")
1899 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1900 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1901 (match_operand:DI 3 "s_register_operand" "")))]
1902 "TARGET_32BIT && arm_arch3m"
1905 (define_insn "*mulsidi3adddi"
1906 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1909 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1910 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1911 (match_operand:DI 1 "s_register_operand" "0")))]
1912 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1913 "smlal%?\\t%Q0, %R0, %3, %2"
1914 [(set_attr "type" "smlal")
1915 (set_attr "predicable" "yes")]
1918 (define_insn "*mulsidi3adddi_v6"
1919 [(set (match_operand:DI 0 "s_register_operand" "=r")
1922 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1923 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1924 (match_operand:DI 1 "s_register_operand" "0")))]
1925 "TARGET_32BIT && arm_arch6"
1926 "smlal%?\\t%Q0, %R0, %3, %2"
1927 [(set_attr "type" "smlal")
1928 (set_attr "predicable" "yes")
1929 (set_attr "predicable_short_it" "no")]
1932 ;; 32x32->64 widening multiply.
1933 ;; As with mulsi3, the only difference between the v3-5 and v6+
1934 ;; versions of these patterns is the requirement that the output not
1935 ;; overlap the inputs, but that still means we have to have a named
1936 ;; expander and two different starred insns.
1938 (define_expand "mulsidi3"
1939 [(set (match_operand:DI 0 "s_register_operand" "")
1941 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1942 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1943 "TARGET_32BIT && arm_arch3m"
1947 (define_insn "*mulsidi3_nov6"
1948 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1950 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1951 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1952 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1953 "smull%?\\t%Q0, %R0, %1, %2"
1954 [(set_attr "type" "smull")
1955 (set_attr "predicable" "yes")]
1958 (define_insn "*mulsidi3_v6"
1959 [(set (match_operand:DI 0 "s_register_operand" "=r")
1961 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1962 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1963 "TARGET_32BIT && arm_arch6"
1964 "smull%?\\t%Q0, %R0, %1, %2"
1965 [(set_attr "type" "smull")
1966 (set_attr "predicable" "yes")
1967 (set_attr "predicable_short_it" "no")]
1970 (define_expand "umulsidi3"
1971 [(set (match_operand:DI 0 "s_register_operand" "")
1973 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1974 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1975 "TARGET_32BIT && arm_arch3m"
1979 (define_insn "*umulsidi3_nov6"
1980 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1982 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1983 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1984 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1985 "umull%?\\t%Q0, %R0, %1, %2"
1986 [(set_attr "type" "umull")
1987 (set_attr "predicable" "yes")]
1990 (define_insn "*umulsidi3_v6"
1991 [(set (match_operand:DI 0 "s_register_operand" "=r")
1993 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1994 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1995 "TARGET_32BIT && arm_arch6"
1996 "umull%?\\t%Q0, %R0, %1, %2"
1997 [(set_attr "type" "umull")
1998 (set_attr "predicable" "yes")
1999 (set_attr "predicable_short_it" "no")]
2002 (define_expand "umaddsidi4"
2003 [(set (match_operand:DI 0 "s_register_operand" "")
2006 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2007 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
2008 (match_operand:DI 3 "s_register_operand" "")))]
2009 "TARGET_32BIT && arm_arch3m"
2012 (define_insn "*umulsidi3adddi"
2013 [(set (match_operand:DI 0 "s_register_operand" "=&r")
2016 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
2017 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
2018 (match_operand:DI 1 "s_register_operand" "0")))]
2019 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2020 "umlal%?\\t%Q0, %R0, %3, %2"
2021 [(set_attr "type" "umlal")
2022 (set_attr "predicable" "yes")]
2025 (define_insn "*umulsidi3adddi_v6"
2026 [(set (match_operand:DI 0 "s_register_operand" "=r")
2029 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
2030 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
2031 (match_operand:DI 1 "s_register_operand" "0")))]
2032 "TARGET_32BIT && arm_arch6"
2033 "umlal%?\\t%Q0, %R0, %3, %2"
2034 [(set_attr "type" "umlal")
2035 (set_attr "predicable" "yes")
2036 (set_attr "predicable_short_it" "no")]
2039 (define_expand "smulsi3_highpart"
2041 [(set (match_operand:SI 0 "s_register_operand" "")
2045 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2046 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
2048 (clobber (match_scratch:SI 3 ""))])]
2049 "TARGET_32BIT && arm_arch3m"
2053 (define_insn "*smulsi3_highpart_nov6"
2054 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2058 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
2059 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
2061 (clobber (match_scratch:SI 3 "=&r,&r"))]
2062 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2063 "smull%?\\t%3, %0, %2, %1"
2064 [(set_attr "type" "smull")
2065 (set_attr "predicable" "yes")]
2068 (define_insn "*smulsi3_highpart_v6"
2069 [(set (match_operand:SI 0 "s_register_operand" "=r")
2073 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
2074 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
2076 (clobber (match_scratch:SI 3 "=r"))]
2077 "TARGET_32BIT && arm_arch6"
2078 "smull%?\\t%3, %0, %2, %1"
2079 [(set_attr "type" "smull")
2080 (set_attr "predicable" "yes")
2081 (set_attr "predicable_short_it" "no")]
2084 (define_expand "umulsi3_highpart"
2086 [(set (match_operand:SI 0 "s_register_operand" "")
2090 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2091 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
2093 (clobber (match_scratch:SI 3 ""))])]
2094 "TARGET_32BIT && arm_arch3m"
2098 (define_insn "*umulsi3_highpart_nov6"
2099 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2103 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
2104 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
2106 (clobber (match_scratch:SI 3 "=&r,&r"))]
2107 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2108 "umull%?\\t%3, %0, %2, %1"
2109 [(set_attr "type" "umull")
2110 (set_attr "predicable" "yes")]
2113 (define_insn "*umulsi3_highpart_v6"
2114 [(set (match_operand:SI 0 "s_register_operand" "=r")
2118 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
2119 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
2121 (clobber (match_scratch:SI 3 "=r"))]
2122 "TARGET_32BIT && arm_arch6"
2123 "umull%?\\t%3, %0, %2, %1"
2124 [(set_attr "type" "umull")
2125 (set_attr "predicable" "yes")
2126 (set_attr "predicable_short_it" "no")]
2129 (define_insn "mulhisi3"
2130 [(set (match_operand:SI 0 "s_register_operand" "=r")
2131 (mult:SI (sign_extend:SI
2132 (match_operand:HI 1 "s_register_operand" "%r"))
2134 (match_operand:HI 2 "s_register_operand" "r"))))]
2135 "TARGET_DSP_MULTIPLY"
2136 "smulbb%?\\t%0, %1, %2"
2137 [(set_attr "type" "smulxy")
2138 (set_attr "predicable" "yes")]
2141 (define_insn "*mulhisi3tb"
2142 [(set (match_operand:SI 0 "s_register_operand" "=r")
2143 (mult:SI (ashiftrt:SI
2144 (match_operand:SI 1 "s_register_operand" "r")
2147 (match_operand:HI 2 "s_register_operand" "r"))))]
2148 "TARGET_DSP_MULTIPLY"
2149 "smultb%?\\t%0, %1, %2"
2150 [(set_attr "type" "smulxy")
2151 (set_attr "predicable" "yes")
2152 (set_attr "predicable_short_it" "no")]
2155 (define_insn "*mulhisi3bt"
2156 [(set (match_operand:SI 0 "s_register_operand" "=r")
2157 (mult:SI (sign_extend:SI
2158 (match_operand:HI 1 "s_register_operand" "r"))
2160 (match_operand:SI 2 "s_register_operand" "r")
2162 "TARGET_DSP_MULTIPLY"
2163 "smulbt%?\\t%0, %1, %2"
2164 [(set_attr "type" "smulxy")
2165 (set_attr "predicable" "yes")
2166 (set_attr "predicable_short_it" "no")]
2169 (define_insn "*mulhisi3tt"
2170 [(set (match_operand:SI 0 "s_register_operand" "=r")
2171 (mult:SI (ashiftrt:SI
2172 (match_operand:SI 1 "s_register_operand" "r")
2175 (match_operand:SI 2 "s_register_operand" "r")
2177 "TARGET_DSP_MULTIPLY"
2178 "smultt%?\\t%0, %1, %2"
2179 [(set_attr "type" "smulxy")
2180 (set_attr "predicable" "yes")
2181 (set_attr "predicable_short_it" "no")]
2184 (define_insn "maddhisi4"
2185 [(set (match_operand:SI 0 "s_register_operand" "=r")
2186 (plus:SI (mult:SI (sign_extend:SI
2187 (match_operand:HI 1 "s_register_operand" "r"))
2189 (match_operand:HI 2 "s_register_operand" "r")))
2190 (match_operand:SI 3 "s_register_operand" "r")))]
2191 "TARGET_DSP_MULTIPLY"
2192 "smlabb%?\\t%0, %1, %2, %3"
2193 [(set_attr "type" "smlaxy")
2194 (set_attr "predicable" "yes")
2195 (set_attr "predicable_short_it" "no")]
2198 ;; Note: there is no maddhisi4ibt because this one is canonical form
2199 (define_insn "*maddhisi4tb"
2200 [(set (match_operand:SI 0 "s_register_operand" "=r")
2201 (plus:SI (mult:SI (ashiftrt:SI
2202 (match_operand:SI 1 "s_register_operand" "r")
2205 (match_operand:HI 2 "s_register_operand" "r")))
2206 (match_operand:SI 3 "s_register_operand" "r")))]
2207 "TARGET_DSP_MULTIPLY"
2208 "smlatb%?\\t%0, %1, %2, %3"
2209 [(set_attr "type" "smlaxy")
2210 (set_attr "predicable" "yes")
2211 (set_attr "predicable_short_it" "no")]
2214 (define_insn "*maddhisi4tt"
2215 [(set (match_operand:SI 0 "s_register_operand" "=r")
2216 (plus:SI (mult:SI (ashiftrt:SI
2217 (match_operand:SI 1 "s_register_operand" "r")
2220 (match_operand:SI 2 "s_register_operand" "r")
2222 (match_operand:SI 3 "s_register_operand" "r")))]
2223 "TARGET_DSP_MULTIPLY"
2224 "smlatt%?\\t%0, %1, %2, %3"
2225 [(set_attr "type" "smlaxy")
2226 (set_attr "predicable" "yes")
2227 (set_attr "predicable_short_it" "no")]
2230 (define_insn "maddhidi4"
2231 [(set (match_operand:DI 0 "s_register_operand" "=r")
2233 (mult:DI (sign_extend:DI
2234 (match_operand:HI 1 "s_register_operand" "r"))
2236 (match_operand:HI 2 "s_register_operand" "r")))
2237 (match_operand:DI 3 "s_register_operand" "0")))]
2238 "TARGET_DSP_MULTIPLY"
2239 "smlalbb%?\\t%Q0, %R0, %1, %2"
2240 [(set_attr "type" "smlalxy")
2241 (set_attr "predicable" "yes")
2242 (set_attr "predicable_short_it" "no")])
2244 ;; Note: there is no maddhidi4ibt because this one is canonical form
2245 (define_insn "*maddhidi4tb"
2246 [(set (match_operand:DI 0 "s_register_operand" "=r")
2248 (mult:DI (sign_extend:DI
2250 (match_operand:SI 1 "s_register_operand" "r")
2253 (match_operand:HI 2 "s_register_operand" "r")))
2254 (match_operand:DI 3 "s_register_operand" "0")))]
2255 "TARGET_DSP_MULTIPLY"
2256 "smlaltb%?\\t%Q0, %R0, %1, %2"
2257 [(set_attr "type" "smlalxy")
2258 (set_attr "predicable" "yes")
2259 (set_attr "predicable_short_it" "no")])
2261 (define_insn "*maddhidi4tt"
2262 [(set (match_operand:DI 0 "s_register_operand" "=r")
2264 (mult:DI (sign_extend:DI
2266 (match_operand:SI 1 "s_register_operand" "r")
2270 (match_operand:SI 2 "s_register_operand" "r")
2272 (match_operand:DI 3 "s_register_operand" "0")))]
2273 "TARGET_DSP_MULTIPLY"
2274 "smlaltt%?\\t%Q0, %R0, %1, %2"
2275 [(set_attr "type" "smlalxy")
2276 (set_attr "predicable" "yes")
2277 (set_attr "predicable_short_it" "no")])
2279 (define_expand "mulsf3"
2280 [(set (match_operand:SF 0 "s_register_operand" "")
2281 (mult:SF (match_operand:SF 1 "s_register_operand" "")
2282 (match_operand:SF 2 "s_register_operand" "")))]
2283 "TARGET_32BIT && TARGET_HARD_FLOAT"
2287 (define_expand "muldf3"
2288 [(set (match_operand:DF 0 "s_register_operand" "")
2289 (mult:DF (match_operand:DF 1 "s_register_operand" "")
2290 (match_operand:DF 2 "s_register_operand" "")))]
2291 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2297 (define_expand "divsf3"
2298 [(set (match_operand:SF 0 "s_register_operand" "")
2299 (div:SF (match_operand:SF 1 "s_register_operand" "")
2300 (match_operand:SF 2 "s_register_operand" "")))]
2301 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
2304 (define_expand "divdf3"
2305 [(set (match_operand:DF 0 "s_register_operand" "")
2306 (div:DF (match_operand:DF 1 "s_register_operand" "")
2307 (match_operand:DF 2 "s_register_operand" "")))]
2308 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2311 ;; Boolean and,ior,xor insns
2313 ;; Split up double word logical operations
2315 ;; Split up simple DImode logical operations. Simply perform the logical
2316 ;; operation on the upper and lower halves of the registers.
2318 [(set (match_operand:DI 0 "s_register_operand" "")
2319 (match_operator:DI 6 "logical_binary_operator"
2320 [(match_operand:DI 1 "s_register_operand" "")
2321 (match_operand:DI 2 "s_register_operand" "")]))]
2322 "TARGET_32BIT && reload_completed
2323 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2324 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2325 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2326 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2329 operands[3] = gen_highpart (SImode, operands[0]);
2330 operands[0] = gen_lowpart (SImode, operands[0]);
2331 operands[4] = gen_highpart (SImode, operands[1]);
2332 operands[1] = gen_lowpart (SImode, operands[1]);
2333 operands[5] = gen_highpart (SImode, operands[2]);
2334 operands[2] = gen_lowpart (SImode, operands[2]);
2339 [(set (match_operand:DI 0 "s_register_operand" "")
2340 (match_operator:DI 6 "logical_binary_operator"
2341 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2342 (match_operand:DI 1 "s_register_operand" "")]))]
2343 "TARGET_32BIT && reload_completed"
2344 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2345 (set (match_dup 3) (match_op_dup:SI 6
2346 [(ashiftrt:SI (match_dup 2) (const_int 31))
2350 operands[3] = gen_highpart (SImode, operands[0]);
2351 operands[0] = gen_lowpart (SImode, operands[0]);
2352 operands[4] = gen_highpart (SImode, operands[1]);
2353 operands[1] = gen_lowpart (SImode, operands[1]);
2354 operands[5] = gen_highpart (SImode, operands[2]);
2355 operands[2] = gen_lowpart (SImode, operands[2]);
2359 ;; The zero extend of operand 2 means we can just copy the high part of
2360 ;; operand1 into operand0.
2362 [(set (match_operand:DI 0 "s_register_operand" "")
2364 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2365 (match_operand:DI 1 "s_register_operand" "")))]
2366 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2367 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2368 (set (match_dup 3) (match_dup 4))]
2371 operands[4] = gen_highpart (SImode, operands[1]);
2372 operands[3] = gen_highpart (SImode, operands[0]);
2373 operands[0] = gen_lowpart (SImode, operands[0]);
2374 operands[1] = gen_lowpart (SImode, operands[1]);
2378 ;; The zero extend of operand 2 means we can just copy the high part of
2379 ;; operand1 into operand0.
2381 [(set (match_operand:DI 0 "s_register_operand" "")
2383 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2384 (match_operand:DI 1 "s_register_operand" "")))]
2385 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2386 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2387 (set (match_dup 3) (match_dup 4))]
2390 operands[4] = gen_highpart (SImode, operands[1]);
2391 operands[3] = gen_highpart (SImode, operands[0]);
2392 operands[0] = gen_lowpart (SImode, operands[0]);
2393 operands[1] = gen_lowpart (SImode, operands[1]);
2397 (define_expand "anddi3"
2398 [(set (match_operand:DI 0 "s_register_operand" "")
2399 (and:DI (match_operand:DI 1 "s_register_operand" "")
2400 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2405 (define_insn_and_split "*anddi3_insn"
2406 [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
2407 (and:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
2408 (match_operand:DI 2 "arm_anddi_operand_neon" "w ,DL,r ,r ,De,De,w ,DL")))]
2409 "TARGET_32BIT && !TARGET_IWMMXT"
2411 switch (which_alternative)
2413 case 0: /* fall through */
2414 case 6: return "vand\t%P0, %P1, %P2";
2415 case 1: /* fall through */
2416 case 7: return neon_output_logic_immediate ("vand", &operands[2],
2417 DImode, 1, VALID_NEON_QREG_MODE (DImode));
2421 case 5: /* fall through */
2423 default: gcc_unreachable ();
2426 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
2427 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
2428 [(set (match_dup 3) (match_dup 4))
2429 (set (match_dup 5) (match_dup 6))]
2432 operands[3] = gen_lowpart (SImode, operands[0]);
2433 operands[5] = gen_highpart (SImode, operands[0]);
2435 operands[4] = simplify_gen_binary (AND, SImode,
2436 gen_lowpart (SImode, operands[1]),
2437 gen_lowpart (SImode, operands[2]));
2438 operands[6] = simplify_gen_binary (AND, SImode,
2439 gen_highpart (SImode, operands[1]),
2440 gen_highpart_mode (SImode, DImode, operands[2]));
2443 [(set_attr "neon_type" "neon_int_1,neon_int_1,*,*,*,*,neon_int_1,neon_int_1")
2444 (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,
2445 avoid_neon_for_64bits,avoid_neon_for_64bits")
2446 (set_attr "length" "*,*,8,8,8,8,*,*")
2450 (define_insn_and_split "*anddi_zesidi_di"
2451 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2452 (and:DI (zero_extend:DI
2453 (match_operand:SI 2 "s_register_operand" "r,r"))
2454 (match_operand:DI 1 "s_register_operand" "0,r")))]
2457 "TARGET_32BIT && reload_completed"
2458 ; The zero extend of operand 2 clears the high word of the output
2460 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2461 (set (match_dup 3) (const_int 0))]
2464 operands[3] = gen_highpart (SImode, operands[0]);
2465 operands[0] = gen_lowpart (SImode, operands[0]);
2466 operands[1] = gen_lowpart (SImode, operands[1]);
2468 [(set_attr "length" "8")]
2471 (define_insn "*anddi_sesdi_di"
2472 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2473 (and:DI (sign_extend:DI
2474 (match_operand:SI 2 "s_register_operand" "r,r"))
2475 (match_operand:DI 1 "s_register_operand" "0,r")))]
2478 [(set_attr "length" "8")]
2481 (define_expand "andsi3"
2482 [(set (match_operand:SI 0 "s_register_operand" "")
2483 (and:SI (match_operand:SI 1 "s_register_operand" "")
2484 (match_operand:SI 2 "reg_or_int_operand" "")))]
2489 if (CONST_INT_P (operands[2]))
2491 if (INTVAL (operands[2]) == 255 && arm_arch6)
2493 operands[1] = convert_to_mode (QImode, operands[1], 1);
2494 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2498 arm_split_constant (AND, SImode, NULL_RTX,
2499 INTVAL (operands[2]), operands[0],
2501 optimize && can_create_pseudo_p ());
2506 else /* TARGET_THUMB1 */
2508 if (!CONST_INT_P (operands[2]))
2510 rtx tmp = force_reg (SImode, operands[2]);
2511 if (rtx_equal_p (operands[0], operands[1]))
2515 operands[2] = operands[1];
2523 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2525 operands[2] = force_reg (SImode,
2526 GEN_INT (~INTVAL (operands[2])));
2528 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2533 for (i = 9; i <= 31; i++)
2535 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2537 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2541 else if ((((HOST_WIDE_INT) 1) << i) - 1
2542 == ~INTVAL (operands[2]))
2544 rtx shift = GEN_INT (i);
2545 rtx reg = gen_reg_rtx (SImode);
2547 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2548 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2554 operands[2] = force_reg (SImode, operands[2]);
2560 ; ??? Check split length for Thumb-2
2561 (define_insn_and_split "*arm_andsi3_insn"
2562 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
2563 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r,r")
2564 (match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
2568 bic%?\\t%0, %1, #%B2
2572 && CONST_INT_P (operands[2])
2573 && !(const_ok_for_arm (INTVAL (operands[2]))
2574 || const_ok_for_arm (~INTVAL (operands[2])))"
2575 [(clobber (const_int 0))]
2577 arm_split_constant (AND, SImode, curr_insn,
2578 INTVAL (operands[2]), operands[0], operands[1], 0);
2581 [(set_attr "length" "4,4,4,16")
2582 (set_attr "predicable" "yes")
2583 (set_attr "type" "simple_alu_imm,simple_alu_imm,*,simple_alu_imm")]
2586 (define_insn "*thumb1_andsi3_insn"
2587 [(set (match_operand:SI 0 "register_operand" "=l")
2588 (and:SI (match_operand:SI 1 "register_operand" "%0")
2589 (match_operand:SI 2 "register_operand" "l")))]
2592 [(set_attr "length" "2")
2593 (set_attr "type" "simple_alu_imm")
2594 (set_attr "conds" "set")])
2596 (define_insn "*andsi3_compare0"
2597 [(set (reg:CC_NOOV CC_REGNUM)
2599 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2600 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2602 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2603 (and:SI (match_dup 1) (match_dup 2)))]
2607 bic%.\\t%0, %1, #%B2
2609 [(set_attr "conds" "set")
2610 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2613 (define_insn "*andsi3_compare0_scratch"
2614 [(set (reg:CC_NOOV CC_REGNUM)
2616 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2617 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2619 (clobber (match_scratch:SI 2 "=X,r,X"))]
2623 bic%.\\t%2, %0, #%B1
2625 [(set_attr "conds" "set")
2626 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2629 (define_insn "*zeroextractsi_compare0_scratch"
2630 [(set (reg:CC_NOOV CC_REGNUM)
2631 (compare:CC_NOOV (zero_extract:SI
2632 (match_operand:SI 0 "s_register_operand" "r")
2633 (match_operand 1 "const_int_operand" "n")
2634 (match_operand 2 "const_int_operand" "n"))
2637 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2638 && INTVAL (operands[1]) > 0
2639 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2640 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2642 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2643 << INTVAL (operands[2]));
2644 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2647 [(set_attr "conds" "set")
2648 (set_attr "predicable" "yes")
2649 (set_attr "predicable_short_it" "no")
2650 (set_attr "type" "simple_alu_imm")]
2653 (define_insn_and_split "*ne_zeroextractsi"
2654 [(set (match_operand:SI 0 "s_register_operand" "=r")
2655 (ne:SI (zero_extract:SI
2656 (match_operand:SI 1 "s_register_operand" "r")
2657 (match_operand:SI 2 "const_int_operand" "n")
2658 (match_operand:SI 3 "const_int_operand" "n"))
2660 (clobber (reg:CC CC_REGNUM))]
2662 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2663 && INTVAL (operands[2]) > 0
2664 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2665 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2668 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2669 && INTVAL (operands[2]) > 0
2670 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2671 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2672 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2673 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2675 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2677 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2678 (match_dup 0) (const_int 1)))]
2680 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2681 << INTVAL (operands[3]));
2683 [(set_attr "conds" "clob")
2684 (set (attr "length")
2685 (if_then_else (eq_attr "is_thumb" "yes")
2690 (define_insn_and_split "*ne_zeroextractsi_shifted"
2691 [(set (match_operand:SI 0 "s_register_operand" "=r")
2692 (ne:SI (zero_extract:SI
2693 (match_operand:SI 1 "s_register_operand" "r")
2694 (match_operand:SI 2 "const_int_operand" "n")
2697 (clobber (reg:CC CC_REGNUM))]
2701 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2702 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2704 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2706 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2707 (match_dup 0) (const_int 1)))]
2709 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2711 [(set_attr "conds" "clob")
2712 (set_attr "length" "8")]
2715 (define_insn_and_split "*ite_ne_zeroextractsi"
2716 [(set (match_operand:SI 0 "s_register_operand" "=r")
2717 (if_then_else:SI (ne (zero_extract:SI
2718 (match_operand:SI 1 "s_register_operand" "r")
2719 (match_operand:SI 2 "const_int_operand" "n")
2720 (match_operand:SI 3 "const_int_operand" "n"))
2722 (match_operand:SI 4 "arm_not_operand" "rIK")
2724 (clobber (reg:CC CC_REGNUM))]
2726 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2727 && INTVAL (operands[2]) > 0
2728 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2729 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2730 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2733 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2734 && INTVAL (operands[2]) > 0
2735 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2736 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2737 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2738 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2739 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2741 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2743 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2744 (match_dup 0) (match_dup 4)))]
2746 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2747 << INTVAL (operands[3]));
2749 [(set_attr "conds" "clob")
2750 (set_attr "length" "8")]
2753 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2754 [(set (match_operand:SI 0 "s_register_operand" "=r")
2755 (if_then_else:SI (ne (zero_extract:SI
2756 (match_operand:SI 1 "s_register_operand" "r")
2757 (match_operand:SI 2 "const_int_operand" "n")
2760 (match_operand:SI 3 "arm_not_operand" "rIK")
2762 (clobber (reg:CC CC_REGNUM))]
2763 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2765 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2766 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2767 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2769 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2771 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2772 (match_dup 0) (match_dup 3)))]
2774 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2776 [(set_attr "conds" "clob")
2777 (set_attr "length" "8")]
2781 [(set (match_operand:SI 0 "s_register_operand" "")
2782 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2783 (match_operand:SI 2 "const_int_operand" "")
2784 (match_operand:SI 3 "const_int_operand" "")))
2785 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2787 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2788 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2790 HOST_WIDE_INT temp = INTVAL (operands[2]);
2792 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2793 operands[3] = GEN_INT (32 - temp);
2797 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2799 [(set (match_operand:SI 0 "s_register_operand" "")
2800 (match_operator:SI 1 "shiftable_operator"
2801 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2802 (match_operand:SI 3 "const_int_operand" "")
2803 (match_operand:SI 4 "const_int_operand" ""))
2804 (match_operand:SI 5 "s_register_operand" "")]))
2805 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2807 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2810 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2813 HOST_WIDE_INT temp = INTVAL (operands[3]);
2815 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2816 operands[4] = GEN_INT (32 - temp);
2821 [(set (match_operand:SI 0 "s_register_operand" "")
2822 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2823 (match_operand:SI 2 "const_int_operand" "")
2824 (match_operand:SI 3 "const_int_operand" "")))]
2826 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2827 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2829 HOST_WIDE_INT temp = INTVAL (operands[2]);
2831 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2832 operands[3] = GEN_INT (32 - temp);
2837 [(set (match_operand:SI 0 "s_register_operand" "")
2838 (match_operator:SI 1 "shiftable_operator"
2839 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2840 (match_operand:SI 3 "const_int_operand" "")
2841 (match_operand:SI 4 "const_int_operand" ""))
2842 (match_operand:SI 5 "s_register_operand" "")]))
2843 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2845 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2848 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2851 HOST_WIDE_INT temp = INTVAL (operands[3]);
2853 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2854 operands[4] = GEN_INT (32 - temp);
2858 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2859 ;;; represented by the bitfield, then this will produce incorrect results.
2860 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2861 ;;; which have a real bit-field insert instruction, the truncation happens
2862 ;;; in the bit-field insert instruction itself. Since arm does not have a
2863 ;;; bit-field insert instruction, we would have to emit code here to truncate
2864 ;;; the value before we insert. This loses some of the advantage of having
2865 ;;; this insv pattern, so this pattern needs to be reevalutated.
2867 (define_expand "insv"
2868 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2869 (match_operand 1 "general_operand" "")
2870 (match_operand 2 "general_operand" ""))
2871 (match_operand 3 "reg_or_int_operand" ""))]
2872 "TARGET_ARM || arm_arch_thumb2"
2875 int start_bit = INTVAL (operands[2]);
2876 int width = INTVAL (operands[1]);
2877 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2878 rtx target, subtarget;
2880 if (arm_arch_thumb2)
2882 if (unaligned_access && MEM_P (operands[0])
2883 && s_register_operand (operands[3], GET_MODE (operands[3]))
2884 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2888 if (BYTES_BIG_ENDIAN)
2889 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2894 base_addr = adjust_address (operands[0], SImode,
2895 start_bit / BITS_PER_UNIT);
2896 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2900 rtx tmp = gen_reg_rtx (HImode);
2902 base_addr = adjust_address (operands[0], HImode,
2903 start_bit / BITS_PER_UNIT);
2904 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2905 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2909 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2911 bool use_bfi = TRUE;
2913 if (CONST_INT_P (operands[3]))
2915 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2919 emit_insn (gen_insv_zero (operands[0], operands[1],
2924 /* See if the set can be done with a single orr instruction. */
2925 if (val == mask && const_ok_for_arm (val << start_bit))
2931 if (!REG_P (operands[3]))
2932 operands[3] = force_reg (SImode, operands[3]);
2934 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2943 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2946 target = copy_rtx (operands[0]);
2947 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2948 subreg as the final target. */
2949 if (GET_CODE (target) == SUBREG)
2951 subtarget = gen_reg_rtx (SImode);
2952 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2953 < GET_MODE_SIZE (SImode))
2954 target = SUBREG_REG (target);
2959 if (CONST_INT_P (operands[3]))
2961 /* Since we are inserting a known constant, we may be able to
2962 reduce the number of bits that we have to clear so that
2963 the mask becomes simple. */
2964 /* ??? This code does not check to see if the new mask is actually
2965 simpler. It may not be. */
2966 rtx op1 = gen_reg_rtx (SImode);
2967 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2968 start of this pattern. */
2969 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2970 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2972 emit_insn (gen_andsi3 (op1, operands[0],
2973 gen_int_mode (~mask2, SImode)));
2974 emit_insn (gen_iorsi3 (subtarget, op1,
2975 gen_int_mode (op3_value << start_bit, SImode)));
2977 else if (start_bit == 0
2978 && !(const_ok_for_arm (mask)
2979 || const_ok_for_arm (~mask)))
2981 /* A Trick, since we are setting the bottom bits in the word,
2982 we can shift operand[3] up, operand[0] down, OR them together
2983 and rotate the result back again. This takes 3 insns, and
2984 the third might be mergeable into another op. */
2985 /* The shift up copes with the possibility that operand[3] is
2986 wider than the bitfield. */
2987 rtx op0 = gen_reg_rtx (SImode);
2988 rtx op1 = gen_reg_rtx (SImode);
2990 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2991 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2992 emit_insn (gen_iorsi3 (op1, op1, op0));
2993 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2995 else if ((width + start_bit == 32)
2996 && !(const_ok_for_arm (mask)
2997 || const_ok_for_arm (~mask)))
2999 /* Similar trick, but slightly less efficient. */
3001 rtx op0 = gen_reg_rtx (SImode);
3002 rtx op1 = gen_reg_rtx (SImode);
3004 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3005 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3006 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3007 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3011 rtx op0 = gen_int_mode (mask, SImode);
3012 rtx op1 = gen_reg_rtx (SImode);
3013 rtx op2 = gen_reg_rtx (SImode);
3015 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3017 rtx tmp = gen_reg_rtx (SImode);
3019 emit_insn (gen_movsi (tmp, op0));
3023 /* Mask out any bits in operand[3] that are not needed. */
3024 emit_insn (gen_andsi3 (op1, operands[3], op0));
3026 if (CONST_INT_P (op0)
3027 && (const_ok_for_arm (mask << start_bit)
3028 || const_ok_for_arm (~(mask << start_bit))))
3030 op0 = gen_int_mode (~(mask << start_bit), SImode);
3031 emit_insn (gen_andsi3 (op2, operands[0], op0));
3035 if (CONST_INT_P (op0))
3037 rtx tmp = gen_reg_rtx (SImode);
3039 emit_insn (gen_movsi (tmp, op0));
3044 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3046 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3050 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3052 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3055 if (subtarget != target)
3057 /* If TARGET is still a SUBREG, then it must be wider than a word,
3058 so we must be careful only to set the subword we were asked to. */
3059 if (GET_CODE (target) == SUBREG)
3060 emit_move_insn (target, subtarget);
3062 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3069 (define_insn "insv_zero"
3070 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3071 (match_operand:SI 1 "const_int_operand" "M")
3072 (match_operand:SI 2 "const_int_operand" "M"))
3076 [(set_attr "length" "4")
3077 (set_attr "predicable" "yes")
3078 (set_attr "predicable_short_it" "no")]
3081 (define_insn "insv_t2"
3082 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3083 (match_operand:SI 1 "const_int_operand" "M")
3084 (match_operand:SI 2 "const_int_operand" "M"))
3085 (match_operand:SI 3 "s_register_operand" "r"))]
3087 "bfi%?\t%0, %3, %2, %1"
3088 [(set_attr "length" "4")
3089 (set_attr "predicable" "yes")
3090 (set_attr "predicable_short_it" "no")]
3093 ; constants for op 2 will never be given to these patterns.
3094 (define_insn_and_split "*anddi_notdi_di"
3095 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3096 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
3097 (match_operand:DI 2 "s_register_operand" "r,0")))]
3100 "TARGET_32BIT && reload_completed
3101 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
3102 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
3103 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
3104 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
3107 operands[3] = gen_highpart (SImode, operands[0]);
3108 operands[0] = gen_lowpart (SImode, operands[0]);
3109 operands[4] = gen_highpart (SImode, operands[1]);
3110 operands[1] = gen_lowpart (SImode, operands[1]);
3111 operands[5] = gen_highpart (SImode, operands[2]);
3112 operands[2] = gen_lowpart (SImode, operands[2]);
3114 [(set_attr "length" "8")
3115 (set_attr "predicable" "yes")]
3118 (define_insn_and_split "*anddi_notzesidi_di"
3119 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3120 (and:DI (not:DI (zero_extend:DI
3121 (match_operand:SI 2 "s_register_operand" "r,r")))
3122 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3125 bic%?\\t%Q0, %Q1, %2
3127 ; (not (zero_extend ...)) allows us to just copy the high word from
3128 ; operand1 to operand0.
3131 && operands[0] != operands[1]"
3132 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
3133 (set (match_dup 3) (match_dup 4))]
3136 operands[3] = gen_highpart (SImode, operands[0]);
3137 operands[0] = gen_lowpart (SImode, operands[0]);
3138 operands[4] = gen_highpart (SImode, operands[1]);
3139 operands[1] = gen_lowpart (SImode, operands[1]);
3141 [(set_attr "length" "4,8")
3142 (set_attr "predicable" "yes")
3143 (set_attr "predicable_short_it" "no")]
3146 (define_insn_and_split "*anddi_notsesidi_di"
3147 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3148 (and:DI (not:DI (sign_extend:DI
3149 (match_operand:SI 2 "s_register_operand" "r,r")))
3150 (match_operand:DI 1 "s_register_operand" "0,r")))]
3153 "TARGET_32BIT && reload_completed"
3154 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
3155 (set (match_dup 3) (and:SI (not:SI
3156 (ashiftrt:SI (match_dup 2) (const_int 31)))
3160 operands[3] = gen_highpart (SImode, operands[0]);
3161 operands[0] = gen_lowpart (SImode, operands[0]);
3162 operands[4] = gen_highpart (SImode, operands[1]);
3163 operands[1] = gen_lowpart (SImode, operands[1]);
3165 [(set_attr "length" "8")
3166 (set_attr "predicable" "yes")
3167 (set_attr "predicable_short_it" "no")]
3170 (define_insn "andsi_notsi_si"
3171 [(set (match_operand:SI 0 "s_register_operand" "=r")
3172 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3173 (match_operand:SI 1 "s_register_operand" "r")))]
3175 "bic%?\\t%0, %1, %2"
3176 [(set_attr "predicable" "yes")
3177 (set_attr "predicable_short_it" "no")]
3180 (define_insn "thumb1_bicsi3"
3181 [(set (match_operand:SI 0 "register_operand" "=l")
3182 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
3183 (match_operand:SI 2 "register_operand" "0")))]
3186 [(set_attr "length" "2")
3187 (set_attr "conds" "set")])
3189 (define_insn "andsi_not_shiftsi_si"
3190 [(set (match_operand:SI 0 "s_register_operand" "=r")
3191 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3192 [(match_operand:SI 2 "s_register_operand" "r")
3193 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
3194 (match_operand:SI 1 "s_register_operand" "r")))]
3196 "bic%?\\t%0, %1, %2%S4"
3197 [(set_attr "predicable" "yes")
3198 (set_attr "shift" "2")
3199 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
3200 (const_string "alu_shift")
3201 (const_string "alu_shift_reg")))]
3204 (define_insn "*andsi_notsi_si_compare0"
3205 [(set (reg:CC_NOOV CC_REGNUM)
3207 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3208 (match_operand:SI 1 "s_register_operand" "r"))
3210 (set (match_operand:SI 0 "s_register_operand" "=r")
3211 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3213 "bic%.\\t%0, %1, %2"
3214 [(set_attr "conds" "set")]
3217 (define_insn "*andsi_notsi_si_compare0_scratch"
3218 [(set (reg:CC_NOOV CC_REGNUM)
3220 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3221 (match_operand:SI 1 "s_register_operand" "r"))
3223 (clobber (match_scratch:SI 0 "=r"))]
3225 "bic%.\\t%0, %1, %2"
3226 [(set_attr "conds" "set")]
3229 (define_expand "iordi3"
3230 [(set (match_operand:DI 0 "s_register_operand" "")
3231 (ior:DI (match_operand:DI 1 "s_register_operand" "")
3232 (match_operand:DI 2 "neon_logic_op2" "")))]
3237 (define_insn_and_split "*iordi3_insn"
3238 [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
3239 (ior:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
3240 (match_operand:DI 2 "arm_iordi_operand_neon" "w ,Dl,r ,r ,Df,Df,w ,Dl")))]
3241 "TARGET_32BIT && !TARGET_IWMMXT"
3243 switch (which_alternative)
3245 case 0: /* fall through */
3246 case 6: return "vorr\t%P0, %P1, %P2";
3247 case 1: /* fall through */
3248 case 7: return neon_output_logic_immediate ("vorr", &operands[2],
3249 DImode, 0, VALID_NEON_QREG_MODE (DImode));
3255 default: gcc_unreachable ();
3258 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
3259 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
3260 [(set (match_dup 3) (match_dup 4))
3261 (set (match_dup 5) (match_dup 6))]
3264 operands[3] = gen_lowpart (SImode, operands[0]);
3265 operands[5] = gen_highpart (SImode, operands[0]);
3267 operands[4] = simplify_gen_binary (IOR, SImode,
3268 gen_lowpart (SImode, operands[1]),
3269 gen_lowpart (SImode, operands[2]));
3270 operands[6] = simplify_gen_binary (IOR, SImode,
3271 gen_highpart (SImode, operands[1]),
3272 gen_highpart_mode (SImode, DImode, operands[2]));
3275 [(set_attr "neon_type" "neon_int_1,neon_int_1,*,*,*,*,neon_int_1,neon_int_1")
3276 (set_attr "length" "*,*,8,8,8,8,*,*")
3277 (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,avoid_neon_for_64bits,avoid_neon_for_64bits")]
3280 (define_insn "*iordi_zesidi_di"
3281 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3282 (ior:DI (zero_extend:DI
3283 (match_operand:SI 2 "s_register_operand" "r,r"))
3284 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3287 orr%?\\t%Q0, %Q1, %2
3289 [(set_attr "length" "4,8")
3290 (set_attr "predicable" "yes")
3291 (set_attr "predicable_short_it" "no")]
3294 (define_insn "*iordi_sesidi_di"
3295 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3296 (ior:DI (sign_extend:DI
3297 (match_operand:SI 2 "s_register_operand" "r,r"))
3298 (match_operand:DI 1 "s_register_operand" "0,r")))]
3301 [(set_attr "length" "8")
3302 (set_attr "predicable" "yes")]
3305 (define_expand "iorsi3"
3306 [(set (match_operand:SI 0 "s_register_operand" "")
3307 (ior:SI (match_operand:SI 1 "s_register_operand" "")
3308 (match_operand:SI 2 "reg_or_int_operand" "")))]
3311 if (CONST_INT_P (operands[2]))
3315 arm_split_constant (IOR, SImode, NULL_RTX,
3316 INTVAL (operands[2]), operands[0], operands[1],
3317 optimize && can_create_pseudo_p ());
3320 else /* TARGET_THUMB1 */
3322 rtx tmp = force_reg (SImode, operands[2]);
3323 if (rtx_equal_p (operands[0], operands[1]))
3327 operands[2] = operands[1];
3335 (define_insn_and_split "*iorsi3_insn"
3336 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
3337 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r,r")
3338 (match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
3342 orn%?\\t%0, %1, #%B2
3346 && CONST_INT_P (operands[2])
3347 && !(const_ok_for_arm (INTVAL (operands[2]))
3348 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3349 [(clobber (const_int 0))]
3351 arm_split_constant (IOR, SImode, curr_insn,
3352 INTVAL (operands[2]), operands[0], operands[1], 0);
3355 [(set_attr "length" "4,4,4,16")
3356 (set_attr "arch" "32,t2,32,32")
3357 (set_attr "predicable" "yes")
3358 (set_attr "type" "simple_alu_imm,simple_alu_imm,*,*")]
3361 (define_insn "*thumb1_iorsi3_insn"
3362 [(set (match_operand:SI 0 "register_operand" "=l")
3363 (ior:SI (match_operand:SI 1 "register_operand" "%0")
3364 (match_operand:SI 2 "register_operand" "l")))]
3367 [(set_attr "length" "2")
3368 (set_attr "conds" "set")])
3371 [(match_scratch:SI 3 "r")
3372 (set (match_operand:SI 0 "arm_general_register_operand" "")
3373 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3374 (match_operand:SI 2 "const_int_operand" "")))]
3376 && !const_ok_for_arm (INTVAL (operands[2]))
3377 && const_ok_for_arm (~INTVAL (operands[2]))"
3378 [(set (match_dup 3) (match_dup 2))
3379 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3383 (define_insn "*iorsi3_compare0"
3384 [(set (reg:CC_NOOV CC_REGNUM)
3385 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3386 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3388 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3389 (ior:SI (match_dup 1) (match_dup 2)))]
3391 "orr%.\\t%0, %1, %2"
3392 [(set_attr "conds" "set")
3393 (set_attr "type" "simple_alu_imm,*")]
3396 (define_insn "*iorsi3_compare0_scratch"
3397 [(set (reg:CC_NOOV CC_REGNUM)
3398 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3399 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3401 (clobber (match_scratch:SI 0 "=r,r"))]
3403 "orr%.\\t%0, %1, %2"
3404 [(set_attr "conds" "set")
3405 (set_attr "type" "simple_alu_imm, *")]
3408 (define_expand "xordi3"
3409 [(set (match_operand:DI 0 "s_register_operand" "")
3410 (xor:DI (match_operand:DI 1 "s_register_operand" "")
3411 (match_operand:DI 2 "arm_xordi_operand" "")))]
3416 (define_insn_and_split "*xordi3_insn"
3417 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,&r,&r,?w")
3418 (xor:DI (match_operand:DI 1 "s_register_operand" "w ,%0,r ,0 ,r ,w")
3419 (match_operand:DI 2 "arm_xordi_operand" "w ,r ,r ,Dg,Dg,w")))]
3420 "TARGET_32BIT && !TARGET_IWMMXT"
3422 switch (which_alternative)
3427 case 4: /* fall through */
3429 case 0: /* fall through */
3430 case 5: return "veor\t%P0, %P1, %P2";
3431 default: gcc_unreachable ();
3434 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
3435 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
3436 [(set (match_dup 3) (match_dup 4))
3437 (set (match_dup 5) (match_dup 6))]
3440 operands[3] = gen_lowpart (SImode, operands[0]);
3441 operands[5] = gen_highpart (SImode, operands[0]);
3443 operands[4] = simplify_gen_binary (XOR, SImode,
3444 gen_lowpart (SImode, operands[1]),
3445 gen_lowpart (SImode, operands[2]));
3446 operands[6] = simplify_gen_binary (XOR, SImode,
3447 gen_highpart (SImode, operands[1]),
3448 gen_highpart_mode (SImode, DImode, operands[2]));
3451 [(set_attr "length" "*,8,8,8,8,*")
3452 (set_attr "neon_type" "neon_int_1,*,*,*,*,neon_int_1")
3453 (set_attr "arch" "neon_for_64bits,*,*,*,*,avoid_neon_for_64bits")]
3456 (define_insn "*xordi_zesidi_di"
3457 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3458 (xor:DI (zero_extend:DI
3459 (match_operand:SI 2 "s_register_operand" "r,r"))
3460 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3463 eor%?\\t%Q0, %Q1, %2
3465 [(set_attr "length" "4,8")
3466 (set_attr "predicable" "yes")
3467 (set_attr "predicable_short_it" "no")]
3470 (define_insn "*xordi_sesidi_di"
3471 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3472 (xor:DI (sign_extend:DI
3473 (match_operand:SI 2 "s_register_operand" "r,r"))
3474 (match_operand:DI 1 "s_register_operand" "0,r")))]
3477 [(set_attr "length" "8")
3478 (set_attr "predicable" "yes")]
3481 (define_expand "xorsi3"
3482 [(set (match_operand:SI 0 "s_register_operand" "")
3483 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3484 (match_operand:SI 2 "reg_or_int_operand" "")))]
3486 "if (CONST_INT_P (operands[2]))
3490 arm_split_constant (XOR, SImode, NULL_RTX,
3491 INTVAL (operands[2]), operands[0], operands[1],
3492 optimize && can_create_pseudo_p ());
3495 else /* TARGET_THUMB1 */
3497 rtx tmp = force_reg (SImode, operands[2]);
3498 if (rtx_equal_p (operands[0], operands[1]))
3502 operands[2] = operands[1];
3509 (define_insn_and_split "*arm_xorsi3"
3510 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3511 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
3512 (match_operand:SI 2 "reg_or_int_operand" "I,r,?n")))]
3519 && CONST_INT_P (operands[2])
3520 && !const_ok_for_arm (INTVAL (operands[2]))"
3521 [(clobber (const_int 0))]
3523 arm_split_constant (XOR, SImode, curr_insn,
3524 INTVAL (operands[2]), operands[0], operands[1], 0);
3527 [(set_attr "length" "4,4,16")
3528 (set_attr "predicable" "yes")
3529 (set_attr "type" "simple_alu_imm,*,*")]
3532 (define_insn "*thumb1_xorsi3_insn"
3533 [(set (match_operand:SI 0 "register_operand" "=l")
3534 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3535 (match_operand:SI 2 "register_operand" "l")))]
3538 [(set_attr "length" "2")
3539 (set_attr "conds" "set")
3540 (set_attr "type" "simple_alu_imm")]
3543 (define_insn "*xorsi3_compare0"
3544 [(set (reg:CC_NOOV CC_REGNUM)
3545 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3546 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3548 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3549 (xor:SI (match_dup 1) (match_dup 2)))]
3551 "eor%.\\t%0, %1, %2"
3552 [(set_attr "conds" "set")
3553 (set_attr "type" "simple_alu_imm,*")]
3556 (define_insn "*xorsi3_compare0_scratch"
3557 [(set (reg:CC_NOOV CC_REGNUM)
3558 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3559 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3563 [(set_attr "conds" "set")
3564 (set_attr "type" "simple_alu_imm, *")]
3567 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3568 ; (NOT D) we can sometimes merge the final NOT into one of the following
3572 [(set (match_operand:SI 0 "s_register_operand" "")
3573 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3574 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3575 (match_operand:SI 3 "arm_rhs_operand" "")))
3576 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3578 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3579 (not:SI (match_dup 3))))
3580 (set (match_dup 0) (not:SI (match_dup 4)))]
3584 (define_insn_and_split "*andsi_iorsi3_notsi"
3585 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3586 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3587 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3588 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3590 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3591 "&& reload_completed"
3592 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3593 (set (match_dup 0) (and:SI (not:SI (match_dup 3)) (match_dup 0)))]
3595 [(set_attr "length" "8")
3596 (set_attr "ce_count" "2")
3597 (set_attr "predicable" "yes")
3598 (set_attr "predicable_short_it" "no")]
3601 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3602 ; insns are available?
3604 [(set (match_operand:SI 0 "s_register_operand" "")
3605 (match_operator:SI 1 "logical_binary_operator"
3606 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3607 (match_operand:SI 3 "const_int_operand" "")
3608 (match_operand:SI 4 "const_int_operand" ""))
3609 (match_operator:SI 9 "logical_binary_operator"
3610 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3611 (match_operand:SI 6 "const_int_operand" ""))
3612 (match_operand:SI 7 "s_register_operand" "")])]))
3613 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3615 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3616 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3619 [(ashift:SI (match_dup 2) (match_dup 4))
3623 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3626 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3630 [(set (match_operand:SI 0 "s_register_operand" "")
3631 (match_operator:SI 1 "logical_binary_operator"
3632 [(match_operator:SI 9 "logical_binary_operator"
3633 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3634 (match_operand:SI 6 "const_int_operand" ""))
3635 (match_operand:SI 7 "s_register_operand" "")])
3636 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3637 (match_operand:SI 3 "const_int_operand" "")
3638 (match_operand:SI 4 "const_int_operand" ""))]))
3639 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3641 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3642 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3645 [(ashift:SI (match_dup 2) (match_dup 4))
3649 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3652 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3656 [(set (match_operand:SI 0 "s_register_operand" "")
3657 (match_operator:SI 1 "logical_binary_operator"
3658 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3659 (match_operand:SI 3 "const_int_operand" "")
3660 (match_operand:SI 4 "const_int_operand" ""))
3661 (match_operator:SI 9 "logical_binary_operator"
3662 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3663 (match_operand:SI 6 "const_int_operand" ""))
3664 (match_operand:SI 7 "s_register_operand" "")])]))
3665 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3667 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3668 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3671 [(ashift:SI (match_dup 2) (match_dup 4))
3675 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3678 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3682 [(set (match_operand:SI 0 "s_register_operand" "")
3683 (match_operator:SI 1 "logical_binary_operator"
3684 [(match_operator:SI 9 "logical_binary_operator"
3685 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3686 (match_operand:SI 6 "const_int_operand" ""))
3687 (match_operand:SI 7 "s_register_operand" "")])
3688 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3689 (match_operand:SI 3 "const_int_operand" "")
3690 (match_operand:SI 4 "const_int_operand" ""))]))
3691 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3693 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3694 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3697 [(ashift:SI (match_dup 2) (match_dup 4))
3701 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3704 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3708 ;; Minimum and maximum insns
3710 (define_expand "smaxsi3"
3712 (set (match_operand:SI 0 "s_register_operand" "")
3713 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3714 (match_operand:SI 2 "arm_rhs_operand" "")))
3715 (clobber (reg:CC CC_REGNUM))])]
3718 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3720 /* No need for a clobber of the condition code register here. */
3721 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3722 gen_rtx_SMAX (SImode, operands[1],
3728 (define_insn "*smax_0"
3729 [(set (match_operand:SI 0 "s_register_operand" "=r")
3730 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3733 "bic%?\\t%0, %1, %1, asr #31"
3734 [(set_attr "predicable" "yes")
3735 (set_attr "predicable_short_it" "no")]
3738 (define_insn "*smax_m1"
3739 [(set (match_operand:SI 0 "s_register_operand" "=r")
3740 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3743 "orr%?\\t%0, %1, %1, asr #31"
3744 [(set_attr "predicable" "yes")
3745 (set_attr "predicable_short_it" "no")]
3748 (define_insn_and_split "*arm_smax_insn"
3749 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3750 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3751 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3752 (clobber (reg:CC CC_REGNUM))]
3755 ; cmp\\t%1, %2\;movlt\\t%0, %2
3756 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3758 [(set (reg:CC CC_REGNUM)
3759 (compare:CC (match_dup 1) (match_dup 2)))
3761 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3765 [(set_attr "conds" "clob")
3766 (set_attr "length" "8,12")]
3769 (define_expand "sminsi3"
3771 (set (match_operand:SI 0 "s_register_operand" "")
3772 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3773 (match_operand:SI 2 "arm_rhs_operand" "")))
3774 (clobber (reg:CC CC_REGNUM))])]
3777 if (operands[2] == const0_rtx)
3779 /* No need for a clobber of the condition code register here. */
3780 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3781 gen_rtx_SMIN (SImode, operands[1],
3787 (define_insn "*smin_0"
3788 [(set (match_operand:SI 0 "s_register_operand" "=r")
3789 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3792 "and%?\\t%0, %1, %1, asr #31"
3793 [(set_attr "predicable" "yes")
3794 (set_attr "predicable_short_it" "no")]
3797 (define_insn_and_split "*arm_smin_insn"
3798 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3799 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3800 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3801 (clobber (reg:CC CC_REGNUM))]
3804 ; cmp\\t%1, %2\;movge\\t%0, %2
3805 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3807 [(set (reg:CC CC_REGNUM)
3808 (compare:CC (match_dup 1) (match_dup 2)))
3810 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3814 [(set_attr "conds" "clob")
3815 (set_attr "length" "8,12")]
3818 (define_expand "umaxsi3"
3820 (set (match_operand:SI 0 "s_register_operand" "")
3821 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3822 (match_operand:SI 2 "arm_rhs_operand" "")))
3823 (clobber (reg:CC CC_REGNUM))])]
3828 (define_insn_and_split "*arm_umaxsi3"
3829 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3830 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3831 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3832 (clobber (reg:CC CC_REGNUM))]
3835 ; cmp\\t%1, %2\;movcc\\t%0, %2
3836 ; cmp\\t%1, %2\;movcs\\t%0, %1
3837 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3839 [(set (reg:CC CC_REGNUM)
3840 (compare:CC (match_dup 1) (match_dup 2)))
3842 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3846 [(set_attr "conds" "clob")
3847 (set_attr "length" "8,8,12")]
3850 (define_expand "uminsi3"
3852 (set (match_operand:SI 0 "s_register_operand" "")
3853 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3854 (match_operand:SI 2 "arm_rhs_operand" "")))
3855 (clobber (reg:CC CC_REGNUM))])]
3860 (define_insn_and_split "*arm_uminsi3"
3861 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3862 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3863 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3864 (clobber (reg:CC CC_REGNUM))]
3867 ; cmp\\t%1, %2\;movcs\\t%0, %2
3868 ; cmp\\t%1, %2\;movcc\\t%0, %1
3869 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3871 [(set (reg:CC CC_REGNUM)
3872 (compare:CC (match_dup 1) (match_dup 2)))
3874 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3878 [(set_attr "conds" "clob")
3879 (set_attr "length" "8,8,12")]
3882 (define_insn "*store_minmaxsi"
3883 [(set (match_operand:SI 0 "memory_operand" "=m")
3884 (match_operator:SI 3 "minmax_operator"
3885 [(match_operand:SI 1 "s_register_operand" "r")
3886 (match_operand:SI 2 "s_register_operand" "r")]))
3887 (clobber (reg:CC CC_REGNUM))]
3888 "TARGET_32BIT && optimize_insn_for_size_p()"
3890 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3891 operands[1], operands[2]);
3892 output_asm_insn (\"cmp\\t%1, %2\", operands);
3894 output_asm_insn (\"ite\t%d3\", operands);
3895 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3896 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3899 [(set_attr "conds" "clob")
3900 (set (attr "length")
3901 (if_then_else (eq_attr "is_thumb" "yes")
3904 (set_attr "type" "store1")]
3907 ; Reject the frame pointer in operand[1], since reloading this after
3908 ; it has been eliminated can cause carnage.
3909 (define_insn "*minmax_arithsi"
3910 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3911 (match_operator:SI 4 "shiftable_operator"
3912 [(match_operator:SI 5 "minmax_operator"
3913 [(match_operand:SI 2 "s_register_operand" "r,r")
3914 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3915 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3916 (clobber (reg:CC CC_REGNUM))]
3917 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3920 enum rtx_code code = GET_CODE (operands[4]);
3923 if (which_alternative != 0 || operands[3] != const0_rtx
3924 || (code != PLUS && code != IOR && code != XOR))
3929 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3930 operands[2], operands[3]);
3931 output_asm_insn (\"cmp\\t%2, %3\", operands);
3935 output_asm_insn (\"ite\\t%d5\", operands);
3937 output_asm_insn (\"it\\t%d5\", operands);
3939 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3941 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3944 [(set_attr "conds" "clob")
3945 (set (attr "length")
3946 (if_then_else (eq_attr "is_thumb" "yes")
3951 ; Reject the frame pointer in operand[1], since reloading this after
3952 ; it has been eliminated can cause carnage.
3953 (define_insn_and_split "*minmax_arithsi_non_canon"
3954 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3956 (match_operand:SI 1 "s_register_operand" "0,?r")
3957 (match_operator:SI 4 "minmax_operator"
3958 [(match_operand:SI 2 "s_register_operand" "r,r")
3959 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
3960 (clobber (reg:CC CC_REGNUM))]
3961 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3963 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3964 [(set (reg:CC CC_REGNUM)
3965 (compare:CC (match_dup 2) (match_dup 3)))
3967 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3969 (minus:SI (match_dup 1)
3971 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3973 (minus:SI (match_dup 1)
3976 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3977 operands[2], operands[3]);
3978 enum rtx_code rc = minmax_code (operands[4]);
3979 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3980 operands[2], operands[3]);
3982 if (mode == CCFPmode || mode == CCFPEmode)
3983 rc = reverse_condition_maybe_unordered (rc);
3985 rc = reverse_condition (rc);
3986 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3988 [(set_attr "conds" "clob")
3989 (set (attr "length")
3990 (if_then_else (eq_attr "is_thumb" "yes")
3995 (define_code_iterator SAT [smin smax])
3996 (define_code_iterator SATrev [smin smax])
3997 (define_code_attr SATlo [(smin "1") (smax "2")])
3998 (define_code_attr SAThi [(smin "2") (smax "1")])
4000 (define_insn "*satsi_<SAT:code>"
4001 [(set (match_operand:SI 0 "s_register_operand" "=r")
4002 (SAT:SI (SATrev:SI (match_operand:SI 3 "s_register_operand" "r")
4003 (match_operand:SI 1 "const_int_operand" "i"))
4004 (match_operand:SI 2 "const_int_operand" "i")))]
4005 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
4006 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4010 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4011 &mask, &signed_sat))
4014 operands[1] = GEN_INT (mask);
4016 return "ssat%?\t%0, %1, %3";
4018 return "usat%?\t%0, %1, %3";
4020 [(set_attr "predicable" "yes")
4021 (set_attr "insn" "sat")])
4023 (define_insn "*satsi_<SAT:code>_shift"
4024 [(set (match_operand:SI 0 "s_register_operand" "=r")
4025 (SAT:SI (SATrev:SI (match_operator:SI 3 "sat_shift_operator"
4026 [(match_operand:SI 4 "s_register_operand" "r")
4027 (match_operand:SI 5 "const_int_operand" "i")])
4028 (match_operand:SI 1 "const_int_operand" "i"))
4029 (match_operand:SI 2 "const_int_operand" "i")))]
4030 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
4031 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4035 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4036 &mask, &signed_sat))
4039 operands[1] = GEN_INT (mask);
4041 return "ssat%?\t%0, %1, %4%S3";
4043 return "usat%?\t%0, %1, %4%S3";
4045 [(set_attr "predicable" "yes")
4046 (set_attr "insn" "sat")
4047 (set_attr "shift" "3")
4048 (set_attr "type" "alu_shift")])
4050 ;; Shift and rotation insns
4052 (define_expand "ashldi3"
4053 [(set (match_operand:DI 0 "s_register_operand" "")
4054 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
4055 (match_operand:SI 2 "general_operand" "")))]
4060 /* Delay the decision whether to use NEON or core-regs until
4061 register allocation. */
4062 emit_insn (gen_ashldi3_neon (operands[0], operands[1], operands[2]));
4067 /* Only the NEON case can handle in-memory shift counts. */
4068 if (!reg_or_int_operand (operands[2], SImode))
4069 operands[2] = force_reg (SImode, operands[2]);
4072 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4073 ; /* No special preparation statements; expand pattern as above. */
4076 rtx scratch1, scratch2;
4078 if (CONST_INT_P (operands[2])
4079 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4081 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
4085 /* Ideally we should use iwmmxt here if we could know that operands[1]
4086 ends up already living in an iwmmxt register. Otherwise it's
4087 cheaper to have the alternate code being generated than moving
4088 values to iwmmxt regs and back. */
4090 /* If we're optimizing for size, we prefer the libgcc calls. */
4091 if (optimize_function_for_size_p (cfun))
4094 /* Expand operation using core-registers.
4095 'FAIL' would achieve the same thing, but this is a bit smarter. */
4096 scratch1 = gen_reg_rtx (SImode);
4097 scratch2 = gen_reg_rtx (SImode);
4098 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4099 operands[2], scratch1, scratch2);
4105 (define_insn_and_split "arm_ashldi3_1bit"
4106 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4107 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
4109 (clobber (reg:CC CC_REGNUM))]
4111 "#" ; "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
4112 "&& reload_completed"
4113 [(parallel [(set (reg:CC CC_REGNUM)
4114 (compare:CC (ashift:SI (match_dup 1) (const_int 1))
4116 (set (match_dup 0) (ashift:SI (match_dup 1) (const_int 1)))])
4117 (set (match_dup 2) (plus:SI (plus:SI (match_dup 3) (match_dup 3))
4118 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4120 operands[2] = gen_highpart (SImode, operands[0]);
4121 operands[0] = gen_lowpart (SImode, operands[0]);
4122 operands[3] = gen_highpart (SImode, operands[1]);
4123 operands[1] = gen_lowpart (SImode, operands[1]);
4125 [(set_attr "conds" "clob")
4126 (set_attr "length" "8")]
4129 (define_expand "ashlsi3"
4130 [(set (match_operand:SI 0 "s_register_operand" "")
4131 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
4132 (match_operand:SI 2 "arm_rhs_operand" "")))]
4135 if (CONST_INT_P (operands[2])
4136 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4138 emit_insn (gen_movsi (operands[0], const0_rtx));
4144 (define_insn "*thumb1_ashlsi3"
4145 [(set (match_operand:SI 0 "register_operand" "=l,l")
4146 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
4147 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4150 [(set_attr "length" "2")
4151 (set_attr "conds" "set")])
4153 (define_expand "ashrdi3"
4154 [(set (match_operand:DI 0 "s_register_operand" "")
4155 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
4156 (match_operand:SI 2 "reg_or_int_operand" "")))]
4161 /* Delay the decision whether to use NEON or core-regs until
4162 register allocation. */
4163 emit_insn (gen_ashrdi3_neon (operands[0], operands[1], operands[2]));
4167 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4168 ; /* No special preparation statements; expand pattern as above. */
4171 rtx scratch1, scratch2;
4173 if (CONST_INT_P (operands[2])
4174 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4176 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
4180 /* Ideally we should use iwmmxt here if we could know that operands[1]
4181 ends up already living in an iwmmxt register. Otherwise it's
4182 cheaper to have the alternate code being generated than moving
4183 values to iwmmxt regs and back. */
4185 /* If we're optimizing for size, we prefer the libgcc calls. */
4186 if (optimize_function_for_size_p (cfun))
4189 /* Expand operation using core-registers.
4190 'FAIL' would achieve the same thing, but this is a bit smarter. */
4191 scratch1 = gen_reg_rtx (SImode);
4192 scratch2 = gen_reg_rtx (SImode);
4193 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4194 operands[2], scratch1, scratch2);
4200 (define_insn_and_split "arm_ashrdi3_1bit"
4201 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4202 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
4204 (clobber (reg:CC CC_REGNUM))]
4206 "#" ; "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
4207 "&& reload_completed"
4208 [(parallel [(set (reg:CC CC_REGNUM)
4209 (compare:CC (ashiftrt:SI (match_dup 3) (const_int 1))
4211 (set (match_dup 2) (ashiftrt:SI (match_dup 3) (const_int 1)))])
4212 (set (match_dup 0) (unspec:SI [(match_dup 1)
4213 (reg:CC_C CC_REGNUM)]
4216 operands[2] = gen_highpart (SImode, operands[0]);
4217 operands[0] = gen_lowpart (SImode, operands[0]);
4218 operands[3] = gen_highpart (SImode, operands[1]);
4219 operands[1] = gen_lowpart (SImode, operands[1]);
4221 [(set_attr "conds" "clob")
4222 (set_attr "length" "8")]
4226 [(set (match_operand:SI 0 "s_register_operand" "=r")
4227 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
4228 (reg:CC_C CC_REGNUM)]
4232 [(set_attr "conds" "use")
4233 (set_attr "insn" "mov")
4234 (set_attr "type" "alu_shift")]
4237 (define_expand "ashrsi3"
4238 [(set (match_operand:SI 0 "s_register_operand" "")
4239 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
4240 (match_operand:SI 2 "arm_rhs_operand" "")))]
4243 if (CONST_INT_P (operands[2])
4244 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4245 operands[2] = GEN_INT (31);
4249 (define_insn "*thumb1_ashrsi3"
4250 [(set (match_operand:SI 0 "register_operand" "=l,l")
4251 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
4252 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4255 [(set_attr "length" "2")
4256 (set_attr "conds" "set")])
4258 (define_expand "lshrdi3"
4259 [(set (match_operand:DI 0 "s_register_operand" "")
4260 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
4261 (match_operand:SI 2 "reg_or_int_operand" "")))]
4266 /* Delay the decision whether to use NEON or core-regs until
4267 register allocation. */
4268 emit_insn (gen_lshrdi3_neon (operands[0], operands[1], operands[2]));
4272 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4273 ; /* No special preparation statements; expand pattern as above. */
4276 rtx scratch1, scratch2;
4278 if (CONST_INT_P (operands[2])
4279 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4281 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
4285 /* Ideally we should use iwmmxt here if we could know that operands[1]
4286 ends up already living in an iwmmxt register. Otherwise it's
4287 cheaper to have the alternate code being generated than moving
4288 values to iwmmxt regs and back. */
4290 /* If we're optimizing for size, we prefer the libgcc calls. */
4291 if (optimize_function_for_size_p (cfun))
4294 /* Expand operation using core-registers.
4295 'FAIL' would achieve the same thing, but this is a bit smarter. */
4296 scratch1 = gen_reg_rtx (SImode);
4297 scratch2 = gen_reg_rtx (SImode);
4298 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4299 operands[2], scratch1, scratch2);
4305 (define_insn_and_split "arm_lshrdi3_1bit"
4306 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4307 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
4309 (clobber (reg:CC CC_REGNUM))]
4311 "#" ; "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
4312 "&& reload_completed"
4313 [(parallel [(set (reg:CC CC_REGNUM)
4314 (compare:CC (lshiftrt:SI (match_dup 3) (const_int 1))
4316 (set (match_dup 2) (lshiftrt:SI (match_dup 3) (const_int 1)))])
4317 (set (match_dup 0) (unspec:SI [(match_dup 1)
4318 (reg:CC_C CC_REGNUM)]
4321 operands[2] = gen_highpart (SImode, operands[0]);
4322 operands[0] = gen_lowpart (SImode, operands[0]);
4323 operands[3] = gen_highpart (SImode, operands[1]);
4324 operands[1] = gen_lowpart (SImode, operands[1]);
4326 [(set_attr "conds" "clob")
4327 (set_attr "length" "8")]
4330 (define_expand "lshrsi3"
4331 [(set (match_operand:SI 0 "s_register_operand" "")
4332 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
4333 (match_operand:SI 2 "arm_rhs_operand" "")))]
4336 if (CONST_INT_P (operands[2])
4337 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4339 emit_insn (gen_movsi (operands[0], const0_rtx));
4345 (define_insn "*thumb1_lshrsi3"
4346 [(set (match_operand:SI 0 "register_operand" "=l,l")
4347 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
4348 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4351 [(set_attr "length" "2")
4352 (set_attr "conds" "set")])
4354 (define_expand "rotlsi3"
4355 [(set (match_operand:SI 0 "s_register_operand" "")
4356 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4357 (match_operand:SI 2 "reg_or_int_operand" "")))]
4360 if (CONST_INT_P (operands[2]))
4361 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4364 rtx reg = gen_reg_rtx (SImode);
4365 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4371 (define_expand "rotrsi3"
4372 [(set (match_operand:SI 0 "s_register_operand" "")
4373 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4374 (match_operand:SI 2 "arm_rhs_operand" "")))]
4379 if (CONST_INT_P (operands[2])
4380 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4381 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4383 else /* TARGET_THUMB1 */
4385 if (CONST_INT_P (operands [2]))
4386 operands [2] = force_reg (SImode, operands[2]);
4391 (define_insn "*thumb1_rotrsi3"
4392 [(set (match_operand:SI 0 "register_operand" "=l")
4393 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
4394 (match_operand:SI 2 "register_operand" "l")))]
4397 [(set_attr "length" "2")]
4400 (define_insn "*arm_shiftsi3"
4401 [(set (match_operand:SI 0 "s_register_operand" "=r")
4402 (match_operator:SI 3 "shift_operator"
4403 [(match_operand:SI 1 "s_register_operand" "r")
4404 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
4406 "* return arm_output_shift(operands, 0);"
4407 [(set_attr "predicable" "yes")
4408 (set_attr "shift" "1")
4409 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4410 (const_string "alu_shift")
4411 (const_string "alu_shift_reg")))]
4414 (define_insn "*shiftsi3_compare"
4415 [(set (reg:CC CC_REGNUM)
4416 (compare:CC (match_operator:SI 3 "shift_operator"
4417 [(match_operand:SI 1 "s_register_operand" "r")
4418 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4420 (set (match_operand:SI 0 "s_register_operand" "=r")
4421 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4423 "* return arm_output_shift(operands, 1);"
4424 [(set_attr "conds" "set")
4425 (set_attr "shift" "1")
4426 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4427 (const_string "alu_shift")
4428 (const_string "alu_shift_reg")))]
4431 (define_insn "*shiftsi3_compare0"
4432 [(set (reg:CC_NOOV CC_REGNUM)
4433 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4434 [(match_operand:SI 1 "s_register_operand" "r")
4435 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4437 (set (match_operand:SI 0 "s_register_operand" "=r")
4438 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4440 "* return arm_output_shift(operands, 1);"
4441 [(set_attr "conds" "set")
4442 (set_attr "shift" "1")
4443 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4444 (const_string "alu_shift")
4445 (const_string "alu_shift_reg")))]
4448 (define_insn "*shiftsi3_compare0_scratch"
4449 [(set (reg:CC_NOOV CC_REGNUM)
4450 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4451 [(match_operand:SI 1 "s_register_operand" "r")
4452 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4454 (clobber (match_scratch:SI 0 "=r"))]
4456 "* return arm_output_shift(operands, 1);"
4457 [(set_attr "conds" "set")
4458 (set_attr "shift" "1")]
4461 (define_insn "*not_shiftsi"
4462 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4463 (not:SI (match_operator:SI 3 "shift_operator"
4464 [(match_operand:SI 1 "s_register_operand" "r,r")
4465 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4468 [(set_attr "predicable" "yes")
4469 (set_attr "predicable_short_it" "no")
4470 (set_attr "shift" "1")
4471 (set_attr "insn" "mvn")
4472 (set_attr "arch" "32,a")
4473 (set_attr "type" "alu_shift,alu_shift_reg")])
4475 (define_insn "*not_shiftsi_compare0"
4476 [(set (reg:CC_NOOV CC_REGNUM)
4478 (not:SI (match_operator:SI 3 "shift_operator"
4479 [(match_operand:SI 1 "s_register_operand" "r,r")
4480 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4482 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4483 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4486 [(set_attr "conds" "set")
4487 (set_attr "shift" "1")
4488 (set_attr "insn" "mvn")
4489 (set_attr "arch" "32,a")
4490 (set_attr "type" "alu_shift,alu_shift_reg")])
4492 (define_insn "*not_shiftsi_compare0_scratch"
4493 [(set (reg:CC_NOOV CC_REGNUM)
4495 (not:SI (match_operator:SI 3 "shift_operator"
4496 [(match_operand:SI 1 "s_register_operand" "r,r")
4497 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4499 (clobber (match_scratch:SI 0 "=r,r"))]
4502 [(set_attr "conds" "set")
4503 (set_attr "shift" "1")
4504 (set_attr "insn" "mvn")
4505 (set_attr "arch" "32,a")
4506 (set_attr "type" "alu_shift,alu_shift_reg")])
4508 ;; We don't really have extzv, but defining this using shifts helps
4509 ;; to reduce register pressure later on.
4511 (define_expand "extzv"
4512 [(set (match_operand 0 "s_register_operand" "")
4513 (zero_extract (match_operand 1 "nonimmediate_operand" "")
4514 (match_operand 2 "const_int_operand" "")
4515 (match_operand 3 "const_int_operand" "")))]
4516 "TARGET_THUMB1 || arm_arch_thumb2"
4519 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4520 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4522 if (arm_arch_thumb2)
4524 HOST_WIDE_INT width = INTVAL (operands[2]);
4525 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4527 if (unaligned_access && MEM_P (operands[1])
4528 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4532 if (BYTES_BIG_ENDIAN)
4533 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4538 base_addr = adjust_address (operands[1], SImode,
4539 bitpos / BITS_PER_UNIT);
4540 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4544 rtx dest = operands[0];
4545 rtx tmp = gen_reg_rtx (SImode);
4547 /* We may get a paradoxical subreg here. Strip it off. */
4548 if (GET_CODE (dest) == SUBREG
4549 && GET_MODE (dest) == SImode
4550 && GET_MODE (SUBREG_REG (dest)) == HImode)
4551 dest = SUBREG_REG (dest);
4553 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4556 base_addr = adjust_address (operands[1], HImode,
4557 bitpos / BITS_PER_UNIT);
4558 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4559 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4563 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4565 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4573 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4576 operands[3] = GEN_INT (rshift);
4580 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4584 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4585 operands[3], gen_reg_rtx (SImode)));
4590 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4592 (define_expand "extzv_t1"
4593 [(set (match_operand:SI 4 "s_register_operand" "")
4594 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
4595 (match_operand:SI 2 "const_int_operand" "")))
4596 (set (match_operand:SI 0 "s_register_operand" "")
4597 (lshiftrt:SI (match_dup 4)
4598 (match_operand:SI 3 "const_int_operand" "")))]
4602 (define_expand "extv"
4603 [(set (match_operand 0 "s_register_operand" "")
4604 (sign_extract (match_operand 1 "nonimmediate_operand" "")
4605 (match_operand 2 "const_int_operand" "")
4606 (match_operand 3 "const_int_operand" "")))]
4609 HOST_WIDE_INT width = INTVAL (operands[2]);
4610 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4612 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4613 && (bitpos % BITS_PER_UNIT) == 0)
4617 if (BYTES_BIG_ENDIAN)
4618 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4622 base_addr = adjust_address (operands[1], SImode,
4623 bitpos / BITS_PER_UNIT);
4624 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4628 rtx dest = operands[0];
4629 rtx tmp = gen_reg_rtx (SImode);
4631 /* We may get a paradoxical subreg here. Strip it off. */
4632 if (GET_CODE (dest) == SUBREG
4633 && GET_MODE (dest) == SImode
4634 && GET_MODE (SUBREG_REG (dest)) == HImode)
4635 dest = SUBREG_REG (dest);
4637 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4640 base_addr = adjust_address (operands[1], HImode,
4641 bitpos / BITS_PER_UNIT);
4642 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4643 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4648 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4650 else if (GET_MODE (operands[0]) == SImode
4651 && GET_MODE (operands[1]) == SImode)
4653 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4661 ; Helper to expand register forms of extv with the proper modes.
4663 (define_expand "extv_regsi"
4664 [(set (match_operand:SI 0 "s_register_operand" "")
4665 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
4666 (match_operand 2 "const_int_operand" "")
4667 (match_operand 3 "const_int_operand" "")))]
4672 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4674 (define_insn "unaligned_loadsi"
4675 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4676 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
4677 UNSPEC_UNALIGNED_LOAD))]
4678 "unaligned_access && TARGET_32BIT"
4679 "ldr%?\t%0, %1\t@ unaligned"
4680 [(set_attr "arch" "t2,any")
4681 (set_attr "length" "2,4")
4682 (set_attr "predicable" "yes")
4683 (set_attr "predicable_short_it" "yes,no")
4684 (set_attr "type" "load1")])
4686 (define_insn "unaligned_loadhis"
4687 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4689 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4690 UNSPEC_UNALIGNED_LOAD)))]
4691 "unaligned_access && TARGET_32BIT"
4692 "ldr%(sh%)\t%0, %1\t@ unaligned"
4693 [(set_attr "arch" "t2,any")
4694 (set_attr "length" "2,4")
4695 (set_attr "predicable" "yes")
4696 (set_attr "predicable_short_it" "yes,no")
4697 (set_attr "type" "load_byte")])
4699 (define_insn "unaligned_loadhiu"
4700 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4702 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4703 UNSPEC_UNALIGNED_LOAD)))]
4704 "unaligned_access && TARGET_32BIT"
4705 "ldr%(h%)\t%0, %1\t@ unaligned"
4706 [(set_attr "arch" "t2,any")
4707 (set_attr "length" "2,4")
4708 (set_attr "predicable" "yes")
4709 (set_attr "predicable_short_it" "yes,no")
4710 (set_attr "type" "load_byte")])
4712 (define_insn "unaligned_storesi"
4713 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
4714 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
4715 UNSPEC_UNALIGNED_STORE))]
4716 "unaligned_access && TARGET_32BIT"
4717 "str%?\t%1, %0\t@ unaligned"
4718 [(set_attr "arch" "t2,any")
4719 (set_attr "length" "2,4")
4720 (set_attr "predicable" "yes")
4721 (set_attr "predicable_short_it" "yes,no")
4722 (set_attr "type" "store1")])
4724 (define_insn "unaligned_storehi"
4725 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
4726 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
4727 UNSPEC_UNALIGNED_STORE))]
4728 "unaligned_access && TARGET_32BIT"
4729 "str%(h%)\t%1, %0\t@ unaligned"
4730 [(set_attr "arch" "t2,any")
4731 (set_attr "length" "2,4")
4732 (set_attr "predicable" "yes")
4733 (set_attr "predicable_short_it" "yes,no")
4734 (set_attr "type" "store1")])
4736 ;; Unaligned double-word load and store.
4737 ;; Split after reload into two unaligned single-word accesses.
4738 ;; It prevents lower_subreg from splitting some other aligned
4739 ;; double-word accesses too early. Used for internal memcpy.
4741 (define_insn_and_split "unaligned_loaddi"
4742 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
4743 (unspec:DI [(match_operand:DI 1 "memory_operand" "o,o")]
4744 UNSPEC_UNALIGNED_LOAD))]
4745 "unaligned_access && TARGET_32BIT"
4747 "&& reload_completed"
4748 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_LOAD))
4749 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_LOAD))]
4751 operands[2] = gen_highpart (SImode, operands[0]);
4752 operands[0] = gen_lowpart (SImode, operands[0]);
4753 operands[3] = gen_highpart (SImode, operands[1]);
4754 operands[1] = gen_lowpart (SImode, operands[1]);
4756 /* If the first destination register overlaps with the base address,
4757 swap the order in which the loads are emitted. */
4758 if (reg_overlap_mentioned_p (operands[0], operands[1]))
4760 rtx tmp = operands[1];
4761 operands[1] = operands[3];
4764 operands[0] = operands[2];
4768 [(set_attr "arch" "t2,any")
4769 (set_attr "length" "4,8")
4770 (set_attr "predicable" "yes")
4771 (set_attr "type" "load2")])
4773 (define_insn_and_split "unaligned_storedi"
4774 [(set (match_operand:DI 0 "memory_operand" "=o,o")
4775 (unspec:DI [(match_operand:DI 1 "s_register_operand" "l,r")]
4776 UNSPEC_UNALIGNED_STORE))]
4777 "unaligned_access && TARGET_32BIT"
4779 "&& reload_completed"
4780 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_STORE))
4781 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_STORE))]
4783 operands[2] = gen_highpart (SImode, operands[0]);
4784 operands[0] = gen_lowpart (SImode, operands[0]);
4785 operands[3] = gen_highpart (SImode, operands[1]);
4786 operands[1] = gen_lowpart (SImode, operands[1]);
4788 [(set_attr "arch" "t2,any")
4789 (set_attr "length" "4,8")
4790 (set_attr "predicable" "yes")
4791 (set_attr "type" "store2")])
4794 (define_insn "*extv_reg"
4795 [(set (match_operand:SI 0 "s_register_operand" "=r")
4796 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4797 (match_operand:SI 2 "const_int_operand" "M")
4798 (match_operand:SI 3 "const_int_operand" "M")))]
4800 "sbfx%?\t%0, %1, %3, %2"
4801 [(set_attr "length" "4")
4802 (set_attr "predicable" "yes")
4803 (set_attr "predicable_short_it" "no")]
4806 (define_insn "extzv_t2"
4807 [(set (match_operand:SI 0 "s_register_operand" "=r")
4808 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4809 (match_operand:SI 2 "const_int_operand" "M")
4810 (match_operand:SI 3 "const_int_operand" "M")))]
4812 "ubfx%?\t%0, %1, %3, %2"
4813 [(set_attr "length" "4")
4814 (set_attr "predicable" "yes")
4815 (set_attr "predicable_short_it" "no")]
4819 ;; Division instructions
4820 (define_insn "divsi3"
4821 [(set (match_operand:SI 0 "s_register_operand" "=r")
4822 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4823 (match_operand:SI 2 "s_register_operand" "r")))]
4825 "sdiv%?\t%0, %1, %2"
4826 [(set_attr "predicable" "yes")
4827 (set_attr "predicable_short_it" "no")
4828 (set_attr "type" "sdiv")]
4831 (define_insn "udivsi3"
4832 [(set (match_operand:SI 0 "s_register_operand" "=r")
4833 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4834 (match_operand:SI 2 "s_register_operand" "r")))]
4836 "udiv%?\t%0, %1, %2"
4837 [(set_attr "predicable" "yes")
4838 (set_attr "predicable_short_it" "no")
4839 (set_attr "type" "udiv")]
4843 ;; Unary arithmetic insns
4845 (define_expand "negdi2"
4847 [(set (match_operand:DI 0 "s_register_operand" "")
4848 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4849 (clobber (reg:CC CC_REGNUM))])]
4854 emit_insn (gen_negdi2_neon (operands[0], operands[1]));
4860 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4861 ;; The first alternative allows the common case of a *full* overlap.
4862 (define_insn_and_split "*arm_negdi2"
4863 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4864 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4865 (clobber (reg:CC CC_REGNUM))]
4867 "#" ; "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4868 "&& reload_completed"
4869 [(parallel [(set (reg:CC CC_REGNUM)
4870 (compare:CC (const_int 0) (match_dup 1)))
4871 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4872 (set (match_dup 2) (minus:SI (minus:SI (const_int 0) (match_dup 3))
4873 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4875 operands[2] = gen_highpart (SImode, operands[0]);
4876 operands[0] = gen_lowpart (SImode, operands[0]);
4877 operands[3] = gen_highpart (SImode, operands[1]);
4878 operands[1] = gen_lowpart (SImode, operands[1]);
4880 [(set_attr "conds" "clob")
4881 (set_attr "length" "8")]
4884 (define_insn "*thumb1_negdi2"
4885 [(set (match_operand:DI 0 "register_operand" "=&l")
4886 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4887 (clobber (reg:CC CC_REGNUM))]
4889 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4890 [(set_attr "length" "6")]
4893 (define_expand "negsi2"
4894 [(set (match_operand:SI 0 "s_register_operand" "")
4895 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4900 (define_insn "*arm_negsi2"
4901 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4902 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4904 "rsb%?\\t%0, %1, #0"
4905 [(set_attr "predicable" "yes")
4906 (set_attr "predicable_short_it" "yes,no")
4907 (set_attr "arch" "t2,*")
4908 (set_attr "length" "4")]
4911 (define_insn "*thumb1_negsi2"
4912 [(set (match_operand:SI 0 "register_operand" "=l")
4913 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4916 [(set_attr "length" "2")]
4919 (define_expand "negsf2"
4920 [(set (match_operand:SF 0 "s_register_operand" "")
4921 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4922 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4926 (define_expand "negdf2"
4927 [(set (match_operand:DF 0 "s_register_operand" "")
4928 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4929 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4932 ;; Negate an extended 32-bit value.
4933 (define_insn_and_split "*negdi_extendsidi"
4934 [(set (match_operand:DI 0 "s_register_operand" "=r,&r,l,&l")
4935 (neg:DI (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r,0,l"))))
4936 (clobber (reg:CC CC_REGNUM))]
4938 "#" ; rsb\\t%Q0, %1, #0\;asr\\t%R0, %Q0, #31
4939 "&& reload_completed"
4942 operands[2] = gen_highpart (SImode, operands[0]);
4943 operands[0] = gen_lowpart (SImode, operands[0]);
4944 rtx tmp = gen_rtx_SET (VOIDmode,
4946 gen_rtx_MINUS (SImode,
4955 /* Set the flags, to emit the short encoding in Thumb2. */
4956 rtx flags = gen_rtx_SET (VOIDmode,
4957 gen_rtx_REG (CCmode, CC_REGNUM),
4958 gen_rtx_COMPARE (CCmode,
4961 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4966 emit_insn (gen_rtx_SET (VOIDmode,
4968 gen_rtx_ASHIFTRT (SImode,
4973 [(set_attr "length" "8,8,4,4")
4974 (set_attr "arch" "a,a,t2,t2")]
4977 (define_insn_and_split "*negdi_zero_extendsidi"
4978 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4979 (neg:DI (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))))
4980 (clobber (reg:CC CC_REGNUM))]
4982 "#" ; "rsbs\\t%Q0, %1, #0\;sbc\\t%R0,%R0,%R0"
4983 ;; Don't care what register is input to sbc,
4984 ;; since we just just need to propagate the carry.
4985 "&& reload_completed"
4986 [(parallel [(set (reg:CC CC_REGNUM)
4987 (compare:CC (const_int 0) (match_dup 1)))
4988 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4989 (set (match_dup 2) (minus:SI (minus:SI (match_dup 2) (match_dup 2))
4990 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4992 operands[2] = gen_highpart (SImode, operands[0]);
4993 operands[0] = gen_lowpart (SImode, operands[0]);
4995 [(set_attr "conds" "clob")
4996 (set_attr "length" "8")] ;; length in thumb is 4
4999 ;; abssi2 doesn't really clobber the condition codes if a different register
5000 ;; is being set. To keep things simple, assume during rtl manipulations that
5001 ;; it does, but tell the final scan operator the truth. Similarly for
5004 (define_expand "abssi2"
5006 [(set (match_operand:SI 0 "s_register_operand" "")
5007 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
5008 (clobber (match_dup 2))])]
5012 operands[2] = gen_rtx_SCRATCH (SImode);
5014 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5017 (define_insn_and_split "*arm_abssi2"
5018 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5019 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5020 (clobber (reg:CC CC_REGNUM))]
5023 "&& reload_completed"
5026 /* if (which_alternative == 0) */
5027 if (REGNO(operands[0]) == REGNO(operands[1]))
5029 /* Emit the pattern:
5030 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5031 [(set (reg:CC CC_REGNUM)
5032 (compare:CC (match_dup 0) (const_int 0)))
5033 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5034 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5036 emit_insn (gen_rtx_SET (VOIDmode,
5037 gen_rtx_REG (CCmode, CC_REGNUM),
5038 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5039 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5040 (gen_rtx_LT (SImode,
5041 gen_rtx_REG (CCmode, CC_REGNUM),
5043 (gen_rtx_SET (VOIDmode,
5045 (gen_rtx_MINUS (SImode,
5052 /* Emit the pattern:
5053 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5055 (xor:SI (match_dup 1)
5056 (ashiftrt:SI (match_dup 1) (const_int 31))))
5058 (minus:SI (match_dup 0)
5059 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5061 emit_insn (gen_rtx_SET (VOIDmode,
5063 gen_rtx_XOR (SImode,
5064 gen_rtx_ASHIFTRT (SImode,
5068 emit_insn (gen_rtx_SET (VOIDmode,
5070 gen_rtx_MINUS (SImode,
5072 gen_rtx_ASHIFTRT (SImode,
5078 [(set_attr "conds" "clob,*")
5079 (set_attr "shift" "1")
5080 (set_attr "predicable" "no, yes")
5081 (set_attr "length" "8")]
5084 (define_insn_and_split "*thumb1_abssi2"
5085 [(set (match_operand:SI 0 "s_register_operand" "=l")
5086 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
5087 (clobber (match_scratch:SI 2 "=&l"))]
5090 "TARGET_THUMB1 && reload_completed"
5091 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
5092 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
5093 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
5095 [(set_attr "length" "6")]
5098 (define_insn_and_split "*arm_neg_abssi2"
5099 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5100 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5101 (clobber (reg:CC CC_REGNUM))]
5104 "&& reload_completed"
5107 /* if (which_alternative == 0) */
5108 if (REGNO (operands[0]) == REGNO (operands[1]))
5110 /* Emit the pattern:
5111 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5113 emit_insn (gen_rtx_SET (VOIDmode,
5114 gen_rtx_REG (CCmode, CC_REGNUM),
5115 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5116 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5118 gen_rtx_REG (CCmode, CC_REGNUM),
5120 gen_rtx_SET (VOIDmode,
5122 (gen_rtx_MINUS (SImode,
5128 /* Emit the pattern:
5129 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5131 emit_insn (gen_rtx_SET (VOIDmode,
5133 gen_rtx_XOR (SImode,
5134 gen_rtx_ASHIFTRT (SImode,
5138 emit_insn (gen_rtx_SET (VOIDmode,
5140 gen_rtx_MINUS (SImode,
5141 gen_rtx_ASHIFTRT (SImode,
5148 [(set_attr "conds" "clob,*")
5149 (set_attr "shift" "1")
5150 (set_attr "predicable" "no, yes")
5151 (set_attr "length" "8")]
5154 (define_insn_and_split "*thumb1_neg_abssi2"
5155 [(set (match_operand:SI 0 "s_register_operand" "=l")
5156 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
5157 (clobber (match_scratch:SI 2 "=&l"))]
5160 "TARGET_THUMB1 && reload_completed"
5161 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
5162 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
5163 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
5165 [(set_attr "length" "6")]
5168 (define_expand "abssf2"
5169 [(set (match_operand:SF 0 "s_register_operand" "")
5170 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
5171 "TARGET_32BIT && TARGET_HARD_FLOAT"
5174 (define_expand "absdf2"
5175 [(set (match_operand:DF 0 "s_register_operand" "")
5176 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
5177 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5180 (define_expand "sqrtsf2"
5181 [(set (match_operand:SF 0 "s_register_operand" "")
5182 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
5183 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
5186 (define_expand "sqrtdf2"
5187 [(set (match_operand:DF 0 "s_register_operand" "")
5188 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
5189 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5192 (define_insn_and_split "one_cmpldi2"
5193 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,?w")
5194 (not:DI (match_operand:DI 1 "s_register_operand" " w, 0, r, w")))]
5201 "TARGET_32BIT && reload_completed
5202 && arm_general_register_operand (operands[0], DImode)"
5203 [(set (match_dup 0) (not:SI (match_dup 1)))
5204 (set (match_dup 2) (not:SI (match_dup 3)))]
5207 operands[2] = gen_highpart (SImode, operands[0]);
5208 operands[0] = gen_lowpart (SImode, operands[0]);
5209 operands[3] = gen_highpart (SImode, operands[1]);
5210 operands[1] = gen_lowpart (SImode, operands[1]);
5212 [(set_attr "length" "*,8,8,*")
5213 (set_attr "predicable" "no,yes,yes,no")
5214 (set_attr "neon_type" "neon_int_1,*,*,neon_int_1")
5215 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")]
5218 (define_expand "one_cmplsi2"
5219 [(set (match_operand:SI 0 "s_register_operand" "")
5220 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
5225 (define_insn "*arm_one_cmplsi2"
5226 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5227 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5230 [(set_attr "predicable" "yes")
5231 (set_attr "predicable_short_it" "yes,no")
5232 (set_attr "arch" "t2,*")
5233 (set_attr "length" "4")
5234 (set_attr "insn" "mvn")]
5237 (define_insn "*thumb1_one_cmplsi2"
5238 [(set (match_operand:SI 0 "register_operand" "=l")
5239 (not:SI (match_operand:SI 1 "register_operand" "l")))]
5242 [(set_attr "length" "2")
5243 (set_attr "insn" "mvn")]
5246 (define_insn "*notsi_compare0"
5247 [(set (reg:CC_NOOV CC_REGNUM)
5248 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5250 (set (match_operand:SI 0 "s_register_operand" "=r")
5251 (not:SI (match_dup 1)))]
5254 [(set_attr "conds" "set")
5255 (set_attr "insn" "mvn")]
5258 (define_insn "*notsi_compare0_scratch"
5259 [(set (reg:CC_NOOV CC_REGNUM)
5260 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5262 (clobber (match_scratch:SI 0 "=r"))]
5265 [(set_attr "conds" "set")
5266 (set_attr "insn" "mvn")]
5269 ;; Fixed <--> Floating conversion insns
5271 (define_expand "floatsihf2"
5272 [(set (match_operand:HF 0 "general_operand" "")
5273 (float:HF (match_operand:SI 1 "general_operand" "")))]
5277 rtx op1 = gen_reg_rtx (SFmode);
5278 expand_float (op1, operands[1], 0);
5279 op1 = convert_to_mode (HFmode, op1, 0);
5280 emit_move_insn (operands[0], op1);
5285 (define_expand "floatdihf2"
5286 [(set (match_operand:HF 0 "general_operand" "")
5287 (float:HF (match_operand:DI 1 "general_operand" "")))]
5291 rtx op1 = gen_reg_rtx (SFmode);
5292 expand_float (op1, operands[1], 0);
5293 op1 = convert_to_mode (HFmode, op1, 0);
5294 emit_move_insn (operands[0], op1);
5299 (define_expand "floatsisf2"
5300 [(set (match_operand:SF 0 "s_register_operand" "")
5301 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
5302 "TARGET_32BIT && TARGET_HARD_FLOAT"
5306 (define_expand "floatsidf2"
5307 [(set (match_operand:DF 0 "s_register_operand" "")
5308 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
5309 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5313 (define_expand "fix_trunchfsi2"
5314 [(set (match_operand:SI 0 "general_operand" "")
5315 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5319 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5320 expand_fix (operands[0], op1, 0);
5325 (define_expand "fix_trunchfdi2"
5326 [(set (match_operand:DI 0 "general_operand" "")
5327 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5331 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5332 expand_fix (operands[0], op1, 0);
5337 (define_expand "fix_truncsfsi2"
5338 [(set (match_operand:SI 0 "s_register_operand" "")
5339 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
5340 "TARGET_32BIT && TARGET_HARD_FLOAT"
5344 (define_expand "fix_truncdfsi2"
5345 [(set (match_operand:SI 0 "s_register_operand" "")
5346 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
5347 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5353 (define_expand "truncdfsf2"
5354 [(set (match_operand:SF 0 "s_register_operand" "")
5356 (match_operand:DF 1 "s_register_operand" "")))]
5357 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5361 /* DFmode -> HFmode conversions have to go through SFmode. */
5362 (define_expand "truncdfhf2"
5363 [(set (match_operand:HF 0 "general_operand" "")
5365 (match_operand:DF 1 "general_operand" "")))]
5370 op1 = convert_to_mode (SFmode, operands[1], 0);
5371 op1 = convert_to_mode (HFmode, op1, 0);
5372 emit_move_insn (operands[0], op1);
5377 ;; Zero and sign extension instructions.
5379 (define_insn "zero_extend<mode>di2"
5380 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,w")
5381 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
5382 "<qhs_zextenddi_cstr>")))]
5383 "TARGET_32BIT <qhs_zextenddi_cond>"
5385 [(set_attr "length" "8,4,8,8")
5386 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")
5387 (set_attr "ce_count" "2")
5388 (set_attr "predicable" "yes")]
5391 (define_insn "extend<mode>di2"
5392 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,?r,w")
5393 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
5394 "<qhs_extenddi_cstr>")))]
5395 "TARGET_32BIT <qhs_sextenddi_cond>"
5397 [(set_attr "length" "8,4,8,8,8")
5398 (set_attr "ce_count" "2")
5399 (set_attr "shift" "1")
5400 (set_attr "predicable" "yes")
5401 (set_attr "arch" "neon_for_64bits,*,a,t,avoid_neon_for_64bits")]
5404 ;; Splits for all extensions to DImode
5406 [(set (match_operand:DI 0 "s_register_operand" "")
5407 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5408 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5409 [(set (match_dup 0) (match_dup 1))]
5411 rtx lo_part = gen_lowpart (SImode, operands[0]);
5412 enum machine_mode src_mode = GET_MODE (operands[1]);
5414 if (REG_P (operands[0])
5415 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5416 emit_clobber (operands[0]);
5417 if (!REG_P (lo_part) || src_mode != SImode
5418 || !rtx_equal_p (lo_part, operands[1]))
5420 if (src_mode == SImode)
5421 emit_move_insn (lo_part, operands[1]);
5423 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5424 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5425 operands[1] = lo_part;
5427 operands[0] = gen_highpart (SImode, operands[0]);
5428 operands[1] = const0_rtx;
5432 [(set (match_operand:DI 0 "s_register_operand" "")
5433 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5434 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5435 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5437 rtx lo_part = gen_lowpart (SImode, operands[0]);
5438 enum machine_mode src_mode = GET_MODE (operands[1]);
5440 if (REG_P (operands[0])
5441 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5442 emit_clobber (operands[0]);
5444 if (!REG_P (lo_part) || src_mode != SImode
5445 || !rtx_equal_p (lo_part, operands[1]))
5447 if (src_mode == SImode)
5448 emit_move_insn (lo_part, operands[1]);
5450 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5451 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5452 operands[1] = lo_part;
5454 operands[0] = gen_highpart (SImode, operands[0]);
5457 (define_expand "zero_extendhisi2"
5458 [(set (match_operand:SI 0 "s_register_operand" "")
5459 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5462 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5464 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5467 if (!arm_arch6 && !MEM_P (operands[1]))
5469 rtx t = gen_lowpart (SImode, operands[1]);
5470 rtx tmp = gen_reg_rtx (SImode);
5471 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5472 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5478 [(set (match_operand:SI 0 "s_register_operand" "")
5479 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5480 "!TARGET_THUMB2 && !arm_arch6"
5481 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5482 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5484 operands[2] = gen_lowpart (SImode, operands[1]);
5487 (define_insn "*thumb1_zero_extendhisi2"
5488 [(set (match_operand:SI 0 "register_operand" "=l,l")
5489 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
5494 if (which_alternative == 0 && arm_arch6)
5495 return "uxth\t%0, %1";
5496 if (which_alternative == 0)
5499 mem = XEXP (operands[1], 0);
5501 if (GET_CODE (mem) == CONST)
5502 mem = XEXP (mem, 0);
5504 if (GET_CODE (mem) == PLUS)
5506 rtx a = XEXP (mem, 0);
5508 /* This can happen due to bugs in reload. */
5509 if (REG_P (a) && REGNO (a) == SP_REGNUM)
5512 ops[0] = operands[0];
5515 output_asm_insn ("mov\t%0, %1", ops);
5517 XEXP (mem, 0) = operands[0];
5521 return "ldrh\t%0, %1";
5523 [(set_attr_alternative "length"
5524 [(if_then_else (eq_attr "is_arch6" "yes")
5525 (const_int 2) (const_int 4))
5527 (set_attr "type" "simple_alu_shift, load_byte")]
5530 (define_insn "*arm_zero_extendhisi2"
5531 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5532 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5533 "TARGET_ARM && arm_arch4 && !arm_arch6"
5537 [(set_attr "type" "alu_shift,load_byte")
5538 (set_attr "predicable" "yes")]
5541 (define_insn "*arm_zero_extendhisi2_v6"
5542 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5543 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5544 "TARGET_ARM && arm_arch6"
5548 [(set_attr "predicable" "yes")
5549 (set_attr "type" "simple_alu_shift,load_byte")]
5552 (define_insn "*arm_zero_extendhisi2addsi"
5553 [(set (match_operand:SI 0 "s_register_operand" "=r")
5554 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5555 (match_operand:SI 2 "s_register_operand" "r")))]
5557 "uxtah%?\\t%0, %2, %1"
5558 [(set_attr "type" "alu_shift")
5559 (set_attr "predicable" "yes")
5560 (set_attr "predicable_short_it" "no")]
5563 (define_expand "zero_extendqisi2"
5564 [(set (match_operand:SI 0 "s_register_operand" "")
5565 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
5568 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5570 emit_insn (gen_andsi3 (operands[0],
5571 gen_lowpart (SImode, operands[1]),
5575 if (!arm_arch6 && !MEM_P (operands[1]))
5577 rtx t = gen_lowpart (SImode, operands[1]);
5578 rtx tmp = gen_reg_rtx (SImode);
5579 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5580 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5586 [(set (match_operand:SI 0 "s_register_operand" "")
5587 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5589 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5590 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5592 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5595 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5600 (define_insn "*thumb1_zero_extendqisi2"
5601 [(set (match_operand:SI 0 "register_operand" "=l,l")
5602 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5603 "TARGET_THUMB1 && !arm_arch6"
5607 [(set_attr "length" "4,2")
5608 (set_attr "type" "alu_shift,load_byte")
5609 (set_attr "pool_range" "*,32")]
5612 (define_insn "*thumb1_zero_extendqisi2_v6"
5613 [(set (match_operand:SI 0 "register_operand" "=l,l")
5614 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5615 "TARGET_THUMB1 && arm_arch6"
5619 [(set_attr "length" "2")
5620 (set_attr "type" "simple_alu_shift,load_byte")]
5623 (define_insn "*arm_zero_extendqisi2"
5624 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5625 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5626 "TARGET_ARM && !arm_arch6"
5629 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5630 [(set_attr "length" "8,4")
5631 (set_attr "type" "alu_shift,load_byte")
5632 (set_attr "predicable" "yes")]
5635 (define_insn "*arm_zero_extendqisi2_v6"
5636 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5637 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5638 "TARGET_ARM && arm_arch6"
5641 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5642 [(set_attr "type" "simple_alu_shift,load_byte")
5643 (set_attr "predicable" "yes")]
5646 (define_insn "*arm_zero_extendqisi2addsi"
5647 [(set (match_operand:SI 0 "s_register_operand" "=r")
5648 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5649 (match_operand:SI 2 "s_register_operand" "r")))]
5651 "uxtab%?\\t%0, %2, %1"
5652 [(set_attr "predicable" "yes")
5653 (set_attr "predicable_short_it" "no")
5654 (set_attr "insn" "xtab")
5655 (set_attr "type" "alu_shift")]
5659 [(set (match_operand:SI 0 "s_register_operand" "")
5660 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5661 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5662 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5663 [(set (match_dup 2) (match_dup 1))
5664 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5669 [(set (match_operand:SI 0 "s_register_operand" "")
5670 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5671 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5672 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5673 [(set (match_dup 2) (match_dup 1))
5674 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5680 [(set (match_operand:SI 0 "s_register_operand" "")
5681 (ior_xor:SI (and:SI (ashift:SI
5682 (match_operand:SI 1 "s_register_operand" "")
5683 (match_operand:SI 2 "const_int_operand" ""))
5684 (match_operand:SI 3 "const_int_operand" ""))
5686 (match_operator 5 "subreg_lowpart_operator"
5687 [(match_operand:SI 4 "s_register_operand" "")]))))]
5689 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
5690 == (GET_MODE_MASK (GET_MODE (operands[5]))
5691 & (GET_MODE_MASK (GET_MODE (operands[5]))
5692 << (INTVAL (operands[2])))))"
5693 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
5695 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5696 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5699 (define_insn "*compareqi_eq0"
5700 [(set (reg:CC_Z CC_REGNUM)
5701 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5705 [(set_attr "conds" "set")
5706 (set_attr "predicable" "yes")
5707 (set_attr "predicable_short_it" "no")]
5710 (define_expand "extendhisi2"
5711 [(set (match_operand:SI 0 "s_register_operand" "")
5712 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5717 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5720 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5722 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5726 if (!arm_arch6 && !MEM_P (operands[1]))
5728 rtx t = gen_lowpart (SImode, operands[1]);
5729 rtx tmp = gen_reg_rtx (SImode);
5730 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5731 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5738 [(set (match_operand:SI 0 "register_operand" "")
5739 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5740 (clobber (match_scratch:SI 2 ""))])]
5742 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5743 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5745 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5748 ;; We used to have an early-clobber on the scratch register here.
5749 ;; However, there's a bug somewhere in reload which means that this
5750 ;; can be partially ignored during spill allocation if the memory
5751 ;; address also needs reloading; this causes us to die later on when
5752 ;; we try to verify the operands. Fortunately, we don't really need
5753 ;; the early-clobber: we can always use operand 0 if operand 2
5754 ;; overlaps the address.
5755 (define_insn "thumb1_extendhisi2"
5756 [(set (match_operand:SI 0 "register_operand" "=l,l")
5757 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
5758 (clobber (match_scratch:SI 2 "=X,l"))]
5765 if (which_alternative == 0 && !arm_arch6)
5767 if (which_alternative == 0)
5768 return \"sxth\\t%0, %1\";
5770 mem = XEXP (operands[1], 0);
5772 /* This code used to try to use 'V', and fix the address only if it was
5773 offsettable, but this fails for e.g. REG+48 because 48 is outside the
5774 range of QImode offsets, and offsettable_address_p does a QImode
5777 if (GET_CODE (mem) == CONST)
5778 mem = XEXP (mem, 0);
5780 if (GET_CODE (mem) == LABEL_REF)
5781 return \"ldr\\t%0, %1\";
5783 if (GET_CODE (mem) == PLUS)
5785 rtx a = XEXP (mem, 0);
5786 rtx b = XEXP (mem, 1);
5788 if (GET_CODE (a) == LABEL_REF
5790 return \"ldr\\t%0, %1\";
5793 return \"ldrsh\\t%0, %1\";
5801 ops[2] = const0_rtx;
5804 gcc_assert (REG_P (ops[1]));
5806 ops[0] = operands[0];
5807 if (reg_mentioned_p (operands[2], ops[1]))
5810 ops[3] = operands[2];
5811 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
5814 [(set_attr_alternative "length"
5815 [(if_then_else (eq_attr "is_arch6" "yes")
5816 (const_int 2) (const_int 4))
5818 (set_attr "type" "simple_alu_shift,load_byte")
5819 (set_attr "pool_range" "*,1018")]
5822 ;; This pattern will only be used when ldsh is not available
5823 (define_expand "extendhisi2_mem"
5824 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5826 (zero_extend:SI (match_dup 7)))
5827 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5828 (set (match_operand:SI 0 "" "")
5829 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5834 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5836 mem1 = change_address (operands[1], QImode, addr);
5837 mem2 = change_address (operands[1], QImode,
5838 plus_constant (Pmode, addr, 1));
5839 operands[0] = gen_lowpart (SImode, operands[0]);
5841 operands[2] = gen_reg_rtx (SImode);
5842 operands[3] = gen_reg_rtx (SImode);
5843 operands[6] = gen_reg_rtx (SImode);
5846 if (BYTES_BIG_ENDIAN)
5848 operands[4] = operands[2];
5849 operands[5] = operands[3];
5853 operands[4] = operands[3];
5854 operands[5] = operands[2];
5860 [(set (match_operand:SI 0 "register_operand" "")
5861 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5863 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5864 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5866 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5869 (define_insn "*arm_extendhisi2"
5870 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5871 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5872 "TARGET_ARM && arm_arch4 && !arm_arch6"
5876 [(set_attr "length" "8,4")
5877 (set_attr "type" "alu_shift,load_byte")
5878 (set_attr "predicable" "yes")
5879 (set_attr "pool_range" "*,256")
5880 (set_attr "neg_pool_range" "*,244")]
5883 ;; ??? Check Thumb-2 pool range
5884 (define_insn "*arm_extendhisi2_v6"
5885 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5886 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5887 "TARGET_32BIT && arm_arch6"
5891 [(set_attr "type" "simple_alu_shift,load_byte")
5892 (set_attr "predicable" "yes")
5893 (set_attr "predicable_short_it" "no")
5894 (set_attr "pool_range" "*,256")
5895 (set_attr "neg_pool_range" "*,244")]
5898 (define_insn "*arm_extendhisi2addsi"
5899 [(set (match_operand:SI 0 "s_register_operand" "=r")
5900 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5901 (match_operand:SI 2 "s_register_operand" "r")))]
5903 "sxtah%?\\t%0, %2, %1"
5906 (define_expand "extendqihi2"
5908 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
5910 (set (match_operand:HI 0 "s_register_operand" "")
5911 (ashiftrt:SI (match_dup 2)
5916 if (arm_arch4 && MEM_P (operands[1]))
5918 emit_insn (gen_rtx_SET (VOIDmode,
5920 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5923 if (!s_register_operand (operands[1], QImode))
5924 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5925 operands[0] = gen_lowpart (SImode, operands[0]);
5926 operands[1] = gen_lowpart (SImode, operands[1]);
5927 operands[2] = gen_reg_rtx (SImode);
5931 (define_insn "*arm_extendqihi_insn"
5932 [(set (match_operand:HI 0 "s_register_operand" "=r")
5933 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5934 "TARGET_ARM && arm_arch4"
5935 "ldr%(sb%)\\t%0, %1"
5936 [(set_attr "type" "load_byte")
5937 (set_attr "predicable" "yes")
5938 (set_attr "pool_range" "256")
5939 (set_attr "neg_pool_range" "244")]
5942 (define_expand "extendqisi2"
5943 [(set (match_operand:SI 0 "s_register_operand" "")
5944 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
5947 if (!arm_arch4 && MEM_P (operands[1]))
5948 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5950 if (!arm_arch6 && !MEM_P (operands[1]))
5952 rtx t = gen_lowpart (SImode, operands[1]);
5953 rtx tmp = gen_reg_rtx (SImode);
5954 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5955 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5961 [(set (match_operand:SI 0 "register_operand" "")
5962 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5964 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5965 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5967 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5970 (define_insn "*arm_extendqisi"
5971 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5972 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5973 "TARGET_ARM && arm_arch4 && !arm_arch6"
5977 [(set_attr "length" "8,4")
5978 (set_attr "type" "alu_shift,load_byte")
5979 (set_attr "predicable" "yes")
5980 (set_attr "pool_range" "*,256")
5981 (set_attr "neg_pool_range" "*,244")]
5984 (define_insn "*arm_extendqisi_v6"
5985 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5987 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5988 "TARGET_ARM && arm_arch6"
5992 [(set_attr "type" "simple_alu_shift,load_byte")
5993 (set_attr "predicable" "yes")
5994 (set_attr "pool_range" "*,256")
5995 (set_attr "neg_pool_range" "*,244")]
5998 (define_insn "*arm_extendqisi2addsi"
5999 [(set (match_operand:SI 0 "s_register_operand" "=r")
6000 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
6001 (match_operand:SI 2 "s_register_operand" "r")))]
6003 "sxtab%?\\t%0, %2, %1"
6004 [(set_attr "type" "alu_shift")
6005 (set_attr "insn" "xtab")
6006 (set_attr "predicable" "yes")
6007 (set_attr "predicable_short_it" "no")]
6011 [(set (match_operand:SI 0 "register_operand" "")
6012 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
6013 "TARGET_THUMB1 && reload_completed"
6014 [(set (match_dup 0) (match_dup 2))
6015 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
6017 rtx addr = XEXP (operands[1], 0);
6019 if (GET_CODE (addr) == CONST)
6020 addr = XEXP (addr, 0);
6022 if (GET_CODE (addr) == PLUS
6023 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
6024 /* No split necessary. */
6027 if (GET_CODE (addr) == PLUS
6028 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
6031 if (reg_overlap_mentioned_p (operands[0], addr))
6033 rtx t = gen_lowpart (QImode, operands[0]);
6034 emit_move_insn (t, operands[1]);
6035 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
6041 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
6042 operands[2] = const0_rtx;
6044 else if (GET_CODE (addr) != PLUS)
6046 else if (REG_P (XEXP (addr, 0)))
6048 operands[2] = XEXP (addr, 1);
6049 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
6053 operands[2] = XEXP (addr, 0);
6054 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
6057 operands[3] = change_address (operands[1], QImode, addr);
6061 [(set (match_operand:SI 0 "register_operand" "")
6062 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
6063 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
6064 (set (match_operand:SI 3 "register_operand" "")
6065 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
6067 && GET_CODE (XEXP (operands[4], 0)) == PLUS
6068 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
6069 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
6070 && (peep2_reg_dead_p (3, operands[0])
6071 || rtx_equal_p (operands[0], operands[3]))
6072 && (peep2_reg_dead_p (3, operands[2])
6073 || rtx_equal_p (operands[2], operands[3]))"
6074 [(set (match_dup 2) (match_dup 1))
6075 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
6077 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
6078 operands[4] = change_address (operands[4], QImode, addr);
6081 (define_insn "thumb1_extendqisi2"
6082 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
6083 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
6088 if (which_alternative == 0 && arm_arch6)
6089 return "sxtb\\t%0, %1";
6090 if (which_alternative == 0)
6093 addr = XEXP (operands[1], 0);
6094 if (GET_CODE (addr) == PLUS
6095 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
6096 return "ldrsb\\t%0, %1";
6100 [(set_attr_alternative "length"
6101 [(if_then_else (eq_attr "is_arch6" "yes")
6102 (const_int 2) (const_int 4))
6104 (if_then_else (eq_attr "is_arch6" "yes")
6105 (const_int 4) (const_int 6))])
6106 (set_attr "type" "simple_alu_shift,load_byte,load_byte")]
6109 (define_expand "extendsfdf2"
6110 [(set (match_operand:DF 0 "s_register_operand" "")
6111 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
6112 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6116 /* HFmode -> DFmode conversions have to go through SFmode. */
6117 (define_expand "extendhfdf2"
6118 [(set (match_operand:DF 0 "general_operand" "")
6119 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
6124 op1 = convert_to_mode (SFmode, operands[1], 0);
6125 op1 = convert_to_mode (DFmode, op1, 0);
6126 emit_insn (gen_movdf (operands[0], op1));
6131 ;; Move insns (including loads and stores)
6133 ;; XXX Just some ideas about movti.
6134 ;; I don't think these are a good idea on the arm, there just aren't enough
6136 ;;(define_expand "loadti"
6137 ;; [(set (match_operand:TI 0 "s_register_operand" "")
6138 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
6141 ;;(define_expand "storeti"
6142 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
6143 ;; (match_operand:TI 1 "s_register_operand" ""))]
6146 ;;(define_expand "movti"
6147 ;; [(set (match_operand:TI 0 "general_operand" "")
6148 ;; (match_operand:TI 1 "general_operand" ""))]
6154 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
6155 ;; operands[1] = copy_to_reg (operands[1]);
6156 ;; if (MEM_P (operands[0]))
6157 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6158 ;; else if (MEM_P (operands[1]))
6159 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6163 ;; emit_insn (insn);
6167 ;; Recognize garbage generated above.
6170 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6171 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6175 ;; register mem = (which_alternative < 3);
6176 ;; register const char *template;
6178 ;; operands[mem] = XEXP (operands[mem], 0);
6179 ;; switch (which_alternative)
6181 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6182 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6183 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6184 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6185 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6186 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6188 ;; output_asm_insn (template, operands);
6192 (define_expand "movdi"
6193 [(set (match_operand:DI 0 "general_operand" "")
6194 (match_operand:DI 1 "general_operand" ""))]
6197 if (can_create_pseudo_p ())
6199 if (!REG_P (operands[0]))
6200 operands[1] = force_reg (DImode, operands[1]);
6205 (define_insn "*arm_movdi"
6206 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, q, m")
6207 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,q"))]
6209 && !(TARGET_HARD_FLOAT && TARGET_VFP)
6211 && ( register_operand (operands[0], DImode)
6212 || register_operand (operands[1], DImode))"
6214 switch (which_alternative)
6221 return output_move_double (operands, true, NULL);
6224 [(set_attr "length" "8,12,16,8,8")
6225 (set_attr "type" "*,*,*,load2,store2")
6226 (set_attr "arm_pool_range" "*,*,*,1020,*")
6227 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6228 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6229 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6233 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6234 (match_operand:ANY64 1 "const_double_operand" ""))]
6237 && (arm_const_double_inline_cost (operands[1])
6238 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
6241 arm_split_constant (SET, SImode, curr_insn,
6242 INTVAL (gen_lowpart (SImode, operands[1])),
6243 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6244 arm_split_constant (SET, SImode, curr_insn,
6245 INTVAL (gen_highpart_mode (SImode,
6246 GET_MODE (operands[0]),
6248 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6253 ; If optimizing for size, or if we have load delay slots, then
6254 ; we want to split the constant into two separate operations.
6255 ; In both cases this may split a trivial part into a single data op
6256 ; leaving a single complex constant to load. We can also get longer
6257 ; offsets in a LDR which means we get better chances of sharing the pool
6258 ; entries. Finally, we can normally do a better job of scheduling
6259 ; LDR instructions than we can with LDM.
6260 ; This pattern will only match if the one above did not.
6262 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6263 (match_operand:ANY64 1 "const_double_operand" ""))]
6264 "TARGET_ARM && reload_completed
6265 && arm_const_double_by_parts (operands[1])"
6266 [(set (match_dup 0) (match_dup 1))
6267 (set (match_dup 2) (match_dup 3))]
6269 operands[2] = gen_highpart (SImode, operands[0]);
6270 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6272 operands[0] = gen_lowpart (SImode, operands[0]);
6273 operands[1] = gen_lowpart (SImode, operands[1]);
6278 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6279 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
6280 "TARGET_EITHER && reload_completed"
6281 [(set (match_dup 0) (match_dup 1))
6282 (set (match_dup 2) (match_dup 3))]
6284 operands[2] = gen_highpart (SImode, operands[0]);
6285 operands[3] = gen_highpart (SImode, operands[1]);
6286 operands[0] = gen_lowpart (SImode, operands[0]);
6287 operands[1] = gen_lowpart (SImode, operands[1]);
6289 /* Handle a partial overlap. */
6290 if (rtx_equal_p (operands[0], operands[3]))
6292 rtx tmp0 = operands[0];
6293 rtx tmp1 = operands[1];
6295 operands[0] = operands[2];
6296 operands[1] = operands[3];
6303 ;; We can't actually do base+index doubleword loads if the index and
6304 ;; destination overlap. Split here so that we at least have chance to
6307 [(set (match_operand:DI 0 "s_register_operand" "")
6308 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6309 (match_operand:SI 2 "s_register_operand" ""))))]
6311 && reg_overlap_mentioned_p (operands[0], operands[1])
6312 && reg_overlap_mentioned_p (operands[0], operands[2])"
6314 (plus:SI (match_dup 1)
6317 (mem:DI (match_dup 4)))]
6319 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6323 ;;; ??? This should have alternatives for constants.
6324 ;;; ??? This was originally identical to the movdf_insn pattern.
6325 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
6326 ;;; thumb_reorg with a memory reference.
6327 (define_insn "*thumb1_movdi_insn"
6328 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
6329 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
6331 && ( register_operand (operands[0], DImode)
6332 || register_operand (operands[1], DImode))"
6335 switch (which_alternative)
6339 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6340 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6341 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6343 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
6345 operands[1] = GEN_INT (- INTVAL (operands[1]));
6346 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
6348 return \"ldmia\\t%1, {%0, %H0}\";
6350 return \"stmia\\t%0, {%1, %H1}\";
6352 return thumb_load_double_from_address (operands);
6354 operands[2] = gen_rtx_MEM (SImode,
6355 plus_constant (Pmode, XEXP (operands[0], 0), 4));
6356 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6359 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6360 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6361 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6364 [(set_attr "length" "4,4,6,2,2,6,4,4")
6365 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
6366 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
6367 (set_attr "pool_range" "*,*,*,*,*,1018,*,*")]
6370 (define_expand "movsi"
6371 [(set (match_operand:SI 0 "general_operand" "")
6372 (match_operand:SI 1 "general_operand" ""))]
6376 rtx base, offset, tmp;
6380 /* Everything except mem = const or mem = mem can be done easily. */
6381 if (MEM_P (operands[0]))
6382 operands[1] = force_reg (SImode, operands[1]);
6383 if (arm_general_register_operand (operands[0], SImode)
6384 && CONST_INT_P (operands[1])
6385 && !(const_ok_for_arm (INTVAL (operands[1]))
6386 || const_ok_for_arm (~INTVAL (operands[1]))))
6388 arm_split_constant (SET, SImode, NULL_RTX,
6389 INTVAL (operands[1]), operands[0], NULL_RTX,
6390 optimize && can_create_pseudo_p ());
6394 else /* TARGET_THUMB1... */
6396 if (can_create_pseudo_p ())
6398 if (!REG_P (operands[0]))
6399 operands[1] = force_reg (SImode, operands[1]);
6403 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
6405 split_const (operands[1], &base, &offset);
6406 if (GET_CODE (base) == SYMBOL_REF
6407 && !offset_within_block_p (base, INTVAL (offset)))
6409 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6410 emit_move_insn (tmp, base);
6411 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6416 /* Recognize the case where operand[1] is a reference to thread-local
6417 data and load its address to a register. */
6418 if (arm_tls_referenced_p (operands[1]))
6420 rtx tmp = operands[1];
6423 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
6425 addend = XEXP (XEXP (tmp, 0), 1);
6426 tmp = XEXP (XEXP (tmp, 0), 0);
6429 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
6430 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
6432 tmp = legitimize_tls_address (tmp,
6433 !can_create_pseudo_p () ? operands[0] : 0);
6436 tmp = gen_rtx_PLUS (SImode, tmp, addend);
6437 tmp = force_operand (tmp, operands[0]);
6442 && (CONSTANT_P (operands[1])
6443 || symbol_mentioned_p (operands[1])
6444 || label_mentioned_p (operands[1])))
6445 operands[1] = legitimize_pic_address (operands[1], SImode,
6446 (!can_create_pseudo_p ()
6453 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6454 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6455 ;; so this does not matter.
6456 (define_insn "*arm_movt"
6457 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
6458 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
6459 (match_operand:SI 2 "general_operand" "i")))]
6461 "movt%?\t%0, #:upper16:%c2"
6462 [(set_attr "predicable" "yes")
6463 (set_attr "predicable_short_it" "no")
6464 (set_attr "length" "4")]
6467 (define_insn "*arm_movsi_insn"
6468 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6469 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6470 "TARGET_ARM && ! TARGET_IWMMXT
6471 && !(TARGET_HARD_FLOAT && TARGET_VFP)
6472 && ( register_operand (operands[0], SImode)
6473 || register_operand (operands[1], SImode))"
6481 [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,simple_alu_imm,load1,store1")
6482 (set_attr "insn" "mov,mov,mvn,mov,*,*")
6483 (set_attr "predicable" "yes")
6484 (set_attr "pool_range" "*,*,*,*,4096,*")
6485 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6489 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6490 (match_operand:SI 1 "const_int_operand" ""))]
6492 && (!(const_ok_for_arm (INTVAL (operands[1]))
6493 || const_ok_for_arm (~INTVAL (operands[1]))))"
6494 [(clobber (const_int 0))]
6496 arm_split_constant (SET, SImode, NULL_RTX,
6497 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6502 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6503 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6504 ;; and lo_sum would be merged back into memory load at cprop. However,
6505 ;; if the default is to prefer movt/movw rather than a load from the constant
6506 ;; pool, the performance is better.
6508 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6509 (match_operand:SI 1 "general_operand" ""))]
6511 && TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6512 && !flag_pic && !target_word_relocations
6513 && !arm_tls_referenced_p (operands[1])"
6514 [(clobber (const_int 0))]
6516 arm_emit_movpair (operands[0], operands[1]);
6520 (define_insn "*thumb1_movsi_insn"
6521 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
6522 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
6524 && ( register_operand (operands[0], SImode)
6525 || register_operand (operands[1], SImode))"
6536 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
6537 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
6538 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")
6539 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
6542 [(set (match_operand:SI 0 "register_operand" "")
6543 (match_operand:SI 1 "const_int_operand" ""))]
6544 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
6545 [(set (match_dup 2) (match_dup 1))
6546 (set (match_dup 0) (neg:SI (match_dup 2)))]
6549 operands[1] = GEN_INT (- INTVAL (operands[1]));
6550 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6555 [(set (match_operand:SI 0 "register_operand" "")
6556 (match_operand:SI 1 "const_int_operand" ""))]
6557 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
6558 [(set (match_dup 2) (match_dup 1))
6559 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
6562 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
6563 unsigned HOST_WIDE_INT mask = 0xff;
6566 for (i = 0; i < 25; i++)
6567 if ((val & (mask << i)) == val)
6570 /* Don't split if the shift is zero. */
6574 operands[1] = GEN_INT (val >> i);
6575 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6576 operands[3] = GEN_INT (i);
6580 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
6582 [(set (match_operand:SI 0 "register_operand" "")
6583 (match_operand:SI 1 "const_int_operand" ""))]
6584 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])"
6585 [(set (match_dup 2) (match_dup 1))
6586 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
6589 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
6590 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6591 operands[3] = GEN_INT (255);
6595 ;; When generating pic, we need to load the symbol offset into a register.
6596 ;; So that the optimizer does not confuse this with a normal symbol load
6597 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6598 ;; since that is the only type of relocation we can use.
6600 ;; Wrap calculation of the whole PIC address in a single pattern for the
6601 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6602 ;; a PIC address involves two loads from memory, so we want to CSE it
6603 ;; as often as possible.
6604 ;; This pattern will be split into one of the pic_load_addr_* patterns
6605 ;; and a move after GCSE optimizations.
6607 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6608 (define_expand "calculate_pic_address"
6609 [(set (match_operand:SI 0 "register_operand" "")
6610 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6611 (unspec:SI [(match_operand:SI 2 "" "")]
6616 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6618 [(set (match_operand:SI 0 "register_operand" "")
6619 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6620 (unspec:SI [(match_operand:SI 2 "" "")]
6623 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6624 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6625 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6628 ;; operand1 is the memory address to go into
6629 ;; pic_load_addr_32bit.
6630 ;; operand2 is the PIC label to be emitted
6631 ;; from pic_add_dot_plus_eight.
6632 ;; We do this to allow hoisting of the entire insn.
6633 (define_insn_and_split "pic_load_addr_unified"
6634 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6635 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6636 (match_operand:SI 2 "" "")]
6637 UNSPEC_PIC_UNIFIED))]
6640 "&& reload_completed"
6641 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6642 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6643 (match_dup 2)] UNSPEC_PIC_BASE))]
6644 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6645 [(set_attr "type" "load1,load1,load1")
6646 (set_attr "pool_range" "4096,4094,1022")
6647 (set_attr "neg_pool_range" "4084,0,0")
6648 (set_attr "arch" "a,t2,t1")
6649 (set_attr "length" "8,6,4")]
6652 ;; The rather odd constraints on the following are to force reload to leave
6653 ;; the insn alone, and to force the minipool generation pass to then move
6654 ;; the GOT symbol to memory.
6656 (define_insn "pic_load_addr_32bit"
6657 [(set (match_operand:SI 0 "s_register_operand" "=r")
6658 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6659 "TARGET_32BIT && flag_pic"
6661 [(set_attr "type" "load1")
6662 (set (attr "pool_range")
6663 (if_then_else (eq_attr "is_thumb" "no")
6666 (set (attr "neg_pool_range")
6667 (if_then_else (eq_attr "is_thumb" "no")
6672 (define_insn "pic_load_addr_thumb1"
6673 [(set (match_operand:SI 0 "s_register_operand" "=l")
6674 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6675 "TARGET_THUMB1 && flag_pic"
6677 [(set_attr "type" "load1")
6678 (set (attr "pool_range") (const_int 1018))]
6681 (define_insn "pic_add_dot_plus_four"
6682 [(set (match_operand:SI 0 "register_operand" "=r")
6683 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6685 (match_operand 2 "" "")]
6689 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6690 INTVAL (operands[2]));
6691 return \"add\\t%0, %|pc\";
6693 [(set_attr "length" "2")]
6696 (define_insn "pic_add_dot_plus_eight"
6697 [(set (match_operand:SI 0 "register_operand" "=r")
6698 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6700 (match_operand 2 "" "")]
6704 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6705 INTVAL (operands[2]));
6706 return \"add%?\\t%0, %|pc, %1\";
6708 [(set_attr "predicable" "yes")]
6711 (define_insn "tls_load_dot_plus_eight"
6712 [(set (match_operand:SI 0 "register_operand" "=r")
6713 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6715 (match_operand 2 "" "")]
6719 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6720 INTVAL (operands[2]));
6721 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6723 [(set_attr "predicable" "yes")]
6726 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6727 ;; followed by a load. These sequences can be crunched down to
6728 ;; tls_load_dot_plus_eight by a peephole.
6731 [(set (match_operand:SI 0 "register_operand" "")
6732 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6734 (match_operand 1 "" "")]
6736 (set (match_operand:SI 2 "arm_general_register_operand" "")
6737 (mem:SI (match_dup 0)))]
6738 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6740 (mem:SI (unspec:SI [(match_dup 3)
6747 (define_insn "pic_offset_arm"
6748 [(set (match_operand:SI 0 "register_operand" "=r")
6749 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6750 (unspec:SI [(match_operand:SI 2 "" "X")]
6751 UNSPEC_PIC_OFFSET))))]
6752 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6753 "ldr%?\\t%0, [%1,%2]"
6754 [(set_attr "type" "load1")]
6757 (define_expand "builtin_setjmp_receiver"
6758 [(label_ref (match_operand 0 "" ""))]
6762 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6764 if (arm_pic_register != INVALID_REGNUM)
6765 arm_load_pic_register (1UL << 3);
6769 ;; If copying one reg to another we can set the condition codes according to
6770 ;; its value. Such a move is common after a return from subroutine and the
6771 ;; result is being tested against zero.
6773 (define_insn "*movsi_compare0"
6774 [(set (reg:CC CC_REGNUM)
6775 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6777 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6783 [(set_attr "conds" "set")
6784 (set_attr "type" "simple_alu_imm,simple_alu_imm")]
6787 ;; Subroutine to store a half word from a register into memory.
6788 ;; Operand 0 is the source register (HImode)
6789 ;; Operand 1 is the destination address in a register (SImode)
6791 ;; In both this routine and the next, we must be careful not to spill
6792 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6793 ;; can generate unrecognizable rtl.
6795 (define_expand "storehi"
6796 [;; store the low byte
6797 (set (match_operand 1 "" "") (match_dup 3))
6798 ;; extract the high byte
6800 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6801 ;; store the high byte
6802 (set (match_dup 4) (match_dup 5))]
6806 rtx op1 = operands[1];
6807 rtx addr = XEXP (op1, 0);
6808 enum rtx_code code = GET_CODE (addr);
6810 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6812 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6814 operands[4] = adjust_address (op1, QImode, 1);
6815 operands[1] = adjust_address (operands[1], QImode, 0);
6816 operands[3] = gen_lowpart (QImode, operands[0]);
6817 operands[0] = gen_lowpart (SImode, operands[0]);
6818 operands[2] = gen_reg_rtx (SImode);
6819 operands[5] = gen_lowpart (QImode, operands[2]);
6823 (define_expand "storehi_bigend"
6824 [(set (match_dup 4) (match_dup 3))
6826 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6827 (set (match_operand 1 "" "") (match_dup 5))]
6831 rtx op1 = operands[1];
6832 rtx addr = XEXP (op1, 0);
6833 enum rtx_code code = GET_CODE (addr);
6835 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6837 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6839 operands[4] = adjust_address (op1, QImode, 1);
6840 operands[1] = adjust_address (operands[1], QImode, 0);
6841 operands[3] = gen_lowpart (QImode, operands[0]);
6842 operands[0] = gen_lowpart (SImode, operands[0]);
6843 operands[2] = gen_reg_rtx (SImode);
6844 operands[5] = gen_lowpart (QImode, operands[2]);
6848 ;; Subroutine to store a half word integer constant into memory.
6849 (define_expand "storeinthi"
6850 [(set (match_operand 0 "" "")
6851 (match_operand 1 "" ""))
6852 (set (match_dup 3) (match_dup 2))]
6856 HOST_WIDE_INT value = INTVAL (operands[1]);
6857 rtx addr = XEXP (operands[0], 0);
6858 rtx op0 = operands[0];
6859 enum rtx_code code = GET_CODE (addr);
6861 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6863 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6865 operands[1] = gen_reg_rtx (SImode);
6866 if (BYTES_BIG_ENDIAN)
6868 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6869 if ((value & 255) == ((value >> 8) & 255))
6870 operands[2] = operands[1];
6873 operands[2] = gen_reg_rtx (SImode);
6874 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6879 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6880 if ((value & 255) == ((value >> 8) & 255))
6881 operands[2] = operands[1];
6884 operands[2] = gen_reg_rtx (SImode);
6885 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6889 operands[3] = adjust_address (op0, QImode, 1);
6890 operands[0] = adjust_address (operands[0], QImode, 0);
6891 operands[2] = gen_lowpart (QImode, operands[2]);
6892 operands[1] = gen_lowpart (QImode, operands[1]);
6896 (define_expand "storehi_single_op"
6897 [(set (match_operand:HI 0 "memory_operand" "")
6898 (match_operand:HI 1 "general_operand" ""))]
6899 "TARGET_32BIT && arm_arch4"
6901 if (!s_register_operand (operands[1], HImode))
6902 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6906 (define_expand "movhi"
6907 [(set (match_operand:HI 0 "general_operand" "")
6908 (match_operand:HI 1 "general_operand" ""))]
6913 if (can_create_pseudo_p ())
6915 if (MEM_P (operands[0]))
6919 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6922 if (CONST_INT_P (operands[1]))
6923 emit_insn (gen_storeinthi (operands[0], operands[1]));
6926 if (MEM_P (operands[1]))
6927 operands[1] = force_reg (HImode, operands[1]);
6928 if (BYTES_BIG_ENDIAN)
6929 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6931 emit_insn (gen_storehi (operands[1], operands[0]));
6935 /* Sign extend a constant, and keep it in an SImode reg. */
6936 else if (CONST_INT_P (operands[1]))
6938 rtx reg = gen_reg_rtx (SImode);
6939 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6941 /* If the constant is already valid, leave it alone. */
6942 if (!const_ok_for_arm (val))
6944 /* If setting all the top bits will make the constant
6945 loadable in a single instruction, then set them.
6946 Otherwise, sign extend the number. */
6948 if (const_ok_for_arm (~(val | ~0xffff)))
6950 else if (val & 0x8000)
6954 emit_insn (gen_movsi (reg, GEN_INT (val)));
6955 operands[1] = gen_lowpart (HImode, reg);
6957 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6958 && MEM_P (operands[1]))
6960 rtx reg = gen_reg_rtx (SImode);
6962 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6963 operands[1] = gen_lowpart (HImode, reg);
6965 else if (!arm_arch4)
6967 if (MEM_P (operands[1]))
6970 rtx offset = const0_rtx;
6971 rtx reg = gen_reg_rtx (SImode);
6973 if ((REG_P (base = XEXP (operands[1], 0))
6974 || (GET_CODE (base) == PLUS
6975 && (CONST_INT_P (offset = XEXP (base, 1)))
6976 && ((INTVAL(offset) & 1) != 1)
6977 && REG_P (base = XEXP (base, 0))))
6978 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6982 new_rtx = widen_memory_access (operands[1], SImode,
6983 ((INTVAL (offset) & ~3)
6984 - INTVAL (offset)));
6985 emit_insn (gen_movsi (reg, new_rtx));
6986 if (((INTVAL (offset) & 2) != 0)
6987 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6989 rtx reg2 = gen_reg_rtx (SImode);
6991 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6996 emit_insn (gen_movhi_bytes (reg, operands[1]));
6998 operands[1] = gen_lowpart (HImode, reg);
7002 /* Handle loading a large integer during reload. */
7003 else if (CONST_INT_P (operands[1])
7004 && !const_ok_for_arm (INTVAL (operands[1]))
7005 && !const_ok_for_arm (~INTVAL (operands[1])))
7007 /* Writing a constant to memory needs a scratch, which should
7008 be handled with SECONDARY_RELOADs. */
7009 gcc_assert (REG_P (operands[0]));
7011 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7012 emit_insn (gen_movsi (operands[0], operands[1]));
7016 else if (TARGET_THUMB2)
7018 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
7019 if (can_create_pseudo_p ())
7021 if (!REG_P (operands[0]))
7022 operands[1] = force_reg (HImode, operands[1]);
7023 /* Zero extend a constant, and keep it in an SImode reg. */
7024 else if (CONST_INT_P (operands[1]))
7026 rtx reg = gen_reg_rtx (SImode);
7027 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
7029 emit_insn (gen_movsi (reg, GEN_INT (val)));
7030 operands[1] = gen_lowpart (HImode, reg);
7034 else /* TARGET_THUMB1 */
7036 if (can_create_pseudo_p ())
7038 if (CONST_INT_P (operands[1]))
7040 rtx reg = gen_reg_rtx (SImode);
7042 emit_insn (gen_movsi (reg, operands[1]));
7043 operands[1] = gen_lowpart (HImode, reg);
7046 /* ??? We shouldn't really get invalid addresses here, but this can
7047 happen if we are passed a SP (never OK for HImode/QImode) or
7048 virtual register (also rejected as illegitimate for HImode/QImode)
7049 relative address. */
7050 /* ??? This should perhaps be fixed elsewhere, for instance, in
7051 fixup_stack_1, by checking for other kinds of invalid addresses,
7052 e.g. a bare reference to a virtual register. This may confuse the
7053 alpha though, which must handle this case differently. */
7054 if (MEM_P (operands[0])
7055 && !memory_address_p (GET_MODE (operands[0]),
7056 XEXP (operands[0], 0)))
7058 = replace_equiv_address (operands[0],
7059 copy_to_reg (XEXP (operands[0], 0)));
7061 if (MEM_P (operands[1])
7062 && !memory_address_p (GET_MODE (operands[1]),
7063 XEXP (operands[1], 0)))
7065 = replace_equiv_address (operands[1],
7066 copy_to_reg (XEXP (operands[1], 0)));
7068 if (MEM_P (operands[1]) && optimize > 0)
7070 rtx reg = gen_reg_rtx (SImode);
7072 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7073 operands[1] = gen_lowpart (HImode, reg);
7076 if (MEM_P (operands[0]))
7077 operands[1] = force_reg (HImode, operands[1]);
7079 else if (CONST_INT_P (operands[1])
7080 && !satisfies_constraint_I (operands[1]))
7082 /* Handle loading a large integer during reload. */
7084 /* Writing a constant to memory needs a scratch, which should
7085 be handled with SECONDARY_RELOADs. */
7086 gcc_assert (REG_P (operands[0]));
7088 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7089 emit_insn (gen_movsi (operands[0], operands[1]));
7096 (define_insn "*thumb1_movhi_insn"
7097 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
7098 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
7100 && ( register_operand (operands[0], HImode)
7101 || register_operand (operands[1], HImode))"
7103 switch (which_alternative)
7105 case 0: return \"add %0, %1, #0\";
7106 case 2: return \"strh %1, %0\";
7107 case 3: return \"mov %0, %1\";
7108 case 4: return \"mov %0, %1\";
7109 case 5: return \"mov %0, %1\";
7110 default: gcc_unreachable ();
7112 /* The stack pointer can end up being taken as an index register.
7113 Catch this case here and deal with it. */
7114 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
7115 && REG_P (XEXP (XEXP (operands[1], 0), 0))
7116 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
7119 ops[0] = operands[0];
7120 ops[1] = XEXP (XEXP (operands[1], 0), 0);
7122 output_asm_insn (\"mov %0, %1\", ops);
7124 XEXP (XEXP (operands[1], 0), 0) = operands[0];
7127 return \"ldrh %0, %1\";
7129 [(set_attr "length" "2,4,2,2,2,2")
7130 (set_attr "type" "*,load1,store1,*,*,*")
7131 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
7134 (define_expand "movhi_bytes"
7135 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7137 (zero_extend:SI (match_dup 6)))
7138 (set (match_operand:SI 0 "" "")
7139 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7144 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7146 mem1 = change_address (operands[1], QImode, addr);
7147 mem2 = change_address (operands[1], QImode,
7148 plus_constant (Pmode, addr, 1));
7149 operands[0] = gen_lowpart (SImode, operands[0]);
7151 operands[2] = gen_reg_rtx (SImode);
7152 operands[3] = gen_reg_rtx (SImode);
7155 if (BYTES_BIG_ENDIAN)
7157 operands[4] = operands[2];
7158 operands[5] = operands[3];
7162 operands[4] = operands[3];
7163 operands[5] = operands[2];
7168 (define_expand "movhi_bigend"
7170 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
7173 (ashiftrt:SI (match_dup 2) (const_int 16)))
7174 (set (match_operand:HI 0 "s_register_operand" "")
7178 operands[2] = gen_reg_rtx (SImode);
7179 operands[3] = gen_reg_rtx (SImode);
7180 operands[4] = gen_lowpart (HImode, operands[3]);
7184 ;; Pattern to recognize insn generated default case above
7185 (define_insn "*movhi_insn_arch4"
7186 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
7187 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
7190 && (register_operand (operands[0], HImode)
7191 || register_operand (operands[1], HImode))"
7193 mov%?\\t%0, %1\\t%@ movhi
7194 mvn%?\\t%0, #%B1\\t%@ movhi
7195 str%(h%)\\t%1, %0\\t%@ movhi
7196 ldr%(h%)\\t%0, %1\\t%@ movhi"
7197 [(set_attr "predicable" "yes")
7198 (set_attr "insn" "mov,mvn,*,*")
7199 (set_attr "pool_range" "*,*,*,256")
7200 (set_attr "neg_pool_range" "*,*,*,244")
7201 (set_attr_alternative "type"
7202 [(if_then_else (match_operand 1 "const_int_operand" "")
7203 (const_string "simple_alu_imm" )
7205 (const_string "simple_alu_imm")
7206 (const_string "store1")
7207 (const_string "load1")])]
7210 (define_insn "*movhi_bytes"
7211 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7212 (match_operand:HI 1 "arm_rhs_operand" "I,r,K"))]
7215 mov%?\\t%0, %1\\t%@ movhi
7216 mov%?\\t%0, %1\\t%@ movhi
7217 mvn%?\\t%0, #%B1\\t%@ movhi"
7218 [(set_attr "predicable" "yes")
7219 (set_attr "insn" "mov, mov,mvn")
7220 (set_attr "type" "simple_alu_imm,*,simple_alu_imm")]
7223 (define_expand "thumb_movhi_clobber"
7224 [(set (match_operand:HI 0 "memory_operand" "")
7225 (match_operand:HI 1 "register_operand" ""))
7226 (clobber (match_operand:DI 2 "register_operand" ""))]
7229 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
7230 && REGNO (operands[1]) <= LAST_LO_REGNUM)
7232 emit_insn (gen_movhi (operands[0], operands[1]));
7235 /* XXX Fixme, need to handle other cases here as well. */
7240 ;; We use a DImode scratch because we may occasionally need an additional
7241 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
7242 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
7243 (define_expand "reload_outhi"
7244 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7245 (match_operand:HI 1 "s_register_operand" "r")
7246 (match_operand:DI 2 "s_register_operand" "=&l")])]
7249 arm_reload_out_hi (operands);
7251 thumb_reload_out_hi (operands);
7256 (define_expand "reload_inhi"
7257 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7258 (match_operand:HI 1 "arm_reload_memory_operand" "o")
7259 (match_operand:DI 2 "s_register_operand" "=&r")])]
7263 arm_reload_in_hi (operands);
7265 thumb_reload_out_hi (operands);
7269 (define_expand "movqi"
7270 [(set (match_operand:QI 0 "general_operand" "")
7271 (match_operand:QI 1 "general_operand" ""))]
7274 /* Everything except mem = const or mem = mem can be done easily */
7276 if (can_create_pseudo_p ())
7278 if (CONST_INT_P (operands[1]))
7280 rtx reg = gen_reg_rtx (SImode);
7282 /* For thumb we want an unsigned immediate, then we are more likely
7283 to be able to use a movs insn. */
7285 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7287 emit_insn (gen_movsi (reg, operands[1]));
7288 operands[1] = gen_lowpart (QImode, reg);
7293 /* ??? We shouldn't really get invalid addresses here, but this can
7294 happen if we are passed a SP (never OK for HImode/QImode) or
7295 virtual register (also rejected as illegitimate for HImode/QImode)
7296 relative address. */
7297 /* ??? This should perhaps be fixed elsewhere, for instance, in
7298 fixup_stack_1, by checking for other kinds of invalid addresses,
7299 e.g. a bare reference to a virtual register. This may confuse the
7300 alpha though, which must handle this case differently. */
7301 if (MEM_P (operands[0])
7302 && !memory_address_p (GET_MODE (operands[0]),
7303 XEXP (operands[0], 0)))
7305 = replace_equiv_address (operands[0],
7306 copy_to_reg (XEXP (operands[0], 0)));
7307 if (MEM_P (operands[1])
7308 && !memory_address_p (GET_MODE (operands[1]),
7309 XEXP (operands[1], 0)))
7311 = replace_equiv_address (operands[1],
7312 copy_to_reg (XEXP (operands[1], 0)));
7315 if (MEM_P (operands[1]) && optimize > 0)
7317 rtx reg = gen_reg_rtx (SImode);
7319 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7320 operands[1] = gen_lowpart (QImode, reg);
7323 if (MEM_P (operands[0]))
7324 operands[1] = force_reg (QImode, operands[1]);
7326 else if (TARGET_THUMB
7327 && CONST_INT_P (operands[1])
7328 && !satisfies_constraint_I (operands[1]))
7330 /* Handle loading a large integer during reload. */
7332 /* Writing a constant to memory needs a scratch, which should
7333 be handled with SECONDARY_RELOADs. */
7334 gcc_assert (REG_P (operands[0]));
7336 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7337 emit_insn (gen_movsi (operands[0], operands[1]));
7343 (define_insn "*arm_movqi_insn"
7344 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7345 (match_operand:QI 1 "general_operand" "r,r,I,Py,K,Uu,l,m,r"))]
7347 && ( register_operand (operands[0], QImode)
7348 || register_operand (operands[1], QImode))"
7359 [(set_attr "type" "*,*,simple_alu_imm,simple_alu_imm,simple_alu_imm,load1, store1, load1, store1")
7360 (set_attr "insn" "mov,mov,mov,mov,mvn,*,*,*,*")
7361 (set_attr "predicable" "yes")
7362 (set_attr "predicable_short_it" "yes,yes,yes,no,no,no,no,no,no")
7363 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7364 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7367 (define_insn "*thumb1_movqi_insn"
7368 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
7369 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
7371 && ( register_operand (operands[0], QImode)
7372 || register_operand (operands[1], QImode))"
7380 [(set_attr "length" "2")
7381 (set_attr "type" "simple_alu_imm,load1,store1,*,*,simple_alu_imm")
7382 (set_attr "insn" "*,*,*,mov,mov,mov")
7383 (set_attr "pool_range" "*,32,*,*,*,*")
7384 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
7387 (define_expand "movhf"
7388 [(set (match_operand:HF 0 "general_operand" "")
7389 (match_operand:HF 1 "general_operand" ""))]
7394 if (MEM_P (operands[0]))
7395 operands[1] = force_reg (HFmode, operands[1]);
7397 else /* TARGET_THUMB1 */
7399 if (can_create_pseudo_p ())
7401 if (!REG_P (operands[0]))
7402 operands[1] = force_reg (HFmode, operands[1]);
7408 (define_insn "*arm32_movhf"
7409 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
7410 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
7411 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
7412 && ( s_register_operand (operands[0], HFmode)
7413 || s_register_operand (operands[1], HFmode))"
7415 switch (which_alternative)
7417 case 0: /* ARM register from memory */
7418 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
7419 case 1: /* memory from ARM register */
7420 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
7421 case 2: /* ARM register from ARM register */
7422 return \"mov%?\\t%0, %1\\t%@ __fp16\";
7423 case 3: /* ARM register from constant */
7429 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7430 bits = real_to_target (NULL, &r, HFmode);
7431 ops[0] = operands[0];
7432 ops[1] = GEN_INT (bits);
7433 ops[2] = GEN_INT (bits & 0xff00);
7434 ops[3] = GEN_INT (bits & 0x00ff);
7436 if (arm_arch_thumb2)
7437 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7439 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7446 [(set_attr "conds" "unconditional")
7447 (set_attr "type" "load1,store1,*,*")
7448 (set_attr "insn" "*,*,mov,mov")
7449 (set_attr "length" "4,4,4,8")
7450 (set_attr "predicable" "yes")]
7453 (define_insn "*thumb1_movhf"
7454 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
7455 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
7457 && ( s_register_operand (operands[0], HFmode)
7458 || s_register_operand (operands[1], HFmode))"
7460 switch (which_alternative)
7465 gcc_assert (MEM_P (operands[1]));
7466 addr = XEXP (operands[1], 0);
7467 if (GET_CODE (addr) == LABEL_REF
7468 || (GET_CODE (addr) == CONST
7469 && GET_CODE (XEXP (addr, 0)) == PLUS
7470 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
7471 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
7473 /* Constant pool entry. */
7474 return \"ldr\\t%0, %1\";
7476 return \"ldrh\\t%0, %1\";
7478 case 2: return \"strh\\t%1, %0\";
7479 default: return \"mov\\t%0, %1\";
7482 [(set_attr "length" "2")
7483 (set_attr "type" "*,load1,store1,*,*")
7484 (set_attr "insn" "mov,*,*,mov,mov")
7485 (set_attr "pool_range" "*,1018,*,*,*")
7486 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
7488 (define_expand "movsf"
7489 [(set (match_operand:SF 0 "general_operand" "")
7490 (match_operand:SF 1 "general_operand" ""))]
7495 if (MEM_P (operands[0]))
7496 operands[1] = force_reg (SFmode, operands[1]);
7498 else /* TARGET_THUMB1 */
7500 if (can_create_pseudo_p ())
7502 if (!REG_P (operands[0]))
7503 operands[1] = force_reg (SFmode, operands[1]);
7509 ;; Transform a floating-point move of a constant into a core register into
7510 ;; an SImode operation.
7512 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7513 (match_operand:SF 1 "immediate_operand" ""))]
7516 && CONST_DOUBLE_P (operands[1])"
7517 [(set (match_dup 2) (match_dup 3))]
7519 operands[2] = gen_lowpart (SImode, operands[0]);
7520 operands[3] = gen_lowpart (SImode, operands[1]);
7521 if (operands[2] == 0 || operands[3] == 0)
7526 (define_insn "*arm_movsf_soft_insn"
7527 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7528 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7530 && TARGET_SOFT_FLOAT
7531 && (!MEM_P (operands[0])
7532 || register_operand (operands[1], SFmode))"
7535 ldr%?\\t%0, %1\\t%@ float
7536 str%?\\t%1, %0\\t%@ float"
7537 [(set_attr "predicable" "yes")
7538 (set_attr "type" "*,load1,store1")
7539 (set_attr "insn" "mov,*,*")
7540 (set_attr "arm_pool_range" "*,4096,*")
7541 (set_attr "thumb2_pool_range" "*,4094,*")
7542 (set_attr "arm_neg_pool_range" "*,4084,*")
7543 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7546 ;;; ??? This should have alternatives for constants.
7547 (define_insn "*thumb1_movsf_insn"
7548 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
7549 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
7551 && ( register_operand (operands[0], SFmode)
7552 || register_operand (operands[1], SFmode))"
7561 [(set_attr "length" "2")
7562 (set_attr "type" "*,load1,store1,load1,store1,*,*")
7563 (set_attr "pool_range" "*,*,*,1018,*,*,*")
7564 (set_attr "insn" "*,*,*,*,*,mov,mov")
7565 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
7568 (define_expand "movdf"
7569 [(set (match_operand:DF 0 "general_operand" "")
7570 (match_operand:DF 1 "general_operand" ""))]
7575 if (MEM_P (operands[0]))
7576 operands[1] = force_reg (DFmode, operands[1]);
7578 else /* TARGET_THUMB */
7580 if (can_create_pseudo_p ())
7582 if (!REG_P (operands[0]))
7583 operands[1] = force_reg (DFmode, operands[1]);
7589 ;; Reloading a df mode value stored in integer regs to memory can require a
7591 (define_expand "reload_outdf"
7592 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7593 (match_operand:DF 1 "s_register_operand" "r")
7594 (match_operand:SI 2 "s_register_operand" "=&r")]
7598 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7601 operands[2] = XEXP (operands[0], 0);
7602 else if (code == POST_INC || code == PRE_DEC)
7604 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7605 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7606 emit_insn (gen_movdi (operands[0], operands[1]));
7609 else if (code == PRE_INC)
7611 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7613 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7616 else if (code == POST_DEC)
7617 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7619 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7620 XEXP (XEXP (operands[0], 0), 1)));
7622 emit_insn (gen_rtx_SET (VOIDmode,
7623 replace_equiv_address (operands[0], operands[2]),
7626 if (code == POST_DEC)
7627 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7633 (define_insn "*movdf_soft_insn"
7634 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,q,m")
7635 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,q"))]
7636 "TARGET_32BIT && TARGET_SOFT_FLOAT
7637 && ( register_operand (operands[0], DFmode)
7638 || register_operand (operands[1], DFmode))"
7640 switch (which_alternative)
7647 return output_move_double (operands, true, NULL);
7650 [(set_attr "length" "8,12,16,8,8")
7651 (set_attr "type" "*,*,*,load2,store2")
7652 (set_attr "arm_pool_range" "*,*,*,1020,*")
7653 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7654 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7655 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7658 ;;; ??? This should have alternatives for constants.
7659 ;;; ??? This was originally identical to the movdi_insn pattern.
7660 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
7661 ;;; thumb_reorg with a memory reference.
7662 (define_insn "*thumb_movdf_insn"
7663 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
7664 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
7666 && ( register_operand (operands[0], DFmode)
7667 || register_operand (operands[1], DFmode))"
7669 switch (which_alternative)
7673 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7674 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
7675 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
7677 return \"ldmia\\t%1, {%0, %H0}\";
7679 return \"stmia\\t%0, {%1, %H1}\";
7681 return thumb_load_double_from_address (operands);
7683 operands[2] = gen_rtx_MEM (SImode,
7684 plus_constant (Pmode,
7685 XEXP (operands[0], 0), 4));
7686 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
7689 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7690 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
7691 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
7694 [(set_attr "length" "4,2,2,6,4,4")
7695 (set_attr "type" "*,load2,store2,load2,store2,*")
7696 (set_attr "insn" "*,*,*,*,*,mov")
7697 (set_attr "pool_range" "*,*,*,1018,*,*")]
7701 ;; load- and store-multiple insns
7702 ;; The arm can load/store any set of registers, provided that they are in
7703 ;; ascending order, but these expanders assume a contiguous set.
7705 (define_expand "load_multiple"
7706 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7707 (match_operand:SI 1 "" ""))
7708 (use (match_operand:SI 2 "" ""))])]
7711 HOST_WIDE_INT offset = 0;
7713 /* Support only fixed point registers. */
7714 if (!CONST_INT_P (operands[2])
7715 || INTVAL (operands[2]) > 14
7716 || INTVAL (operands[2]) < 2
7717 || !MEM_P (operands[1])
7718 || !REG_P (operands[0])
7719 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7720 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7724 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7725 INTVAL (operands[2]),
7726 force_reg (SImode, XEXP (operands[1], 0)),
7727 FALSE, operands[1], &offset);
7730 (define_expand "store_multiple"
7731 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7732 (match_operand:SI 1 "" ""))
7733 (use (match_operand:SI 2 "" ""))])]
7736 HOST_WIDE_INT offset = 0;
7738 /* Support only fixed point registers. */
7739 if (!CONST_INT_P (operands[2])
7740 || INTVAL (operands[2]) > 14
7741 || INTVAL (operands[2]) < 2
7742 || !REG_P (operands[1])
7743 || !MEM_P (operands[0])
7744 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7745 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7749 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7750 INTVAL (operands[2]),
7751 force_reg (SImode, XEXP (operands[0], 0)),
7752 FALSE, operands[0], &offset);
7756 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7757 ;; We could let this apply for blocks of less than this, but it clobbers so
7758 ;; many registers that there is then probably a better way.
7760 (define_expand "movmemqi"
7761 [(match_operand:BLK 0 "general_operand" "")
7762 (match_operand:BLK 1 "general_operand" "")
7763 (match_operand:SI 2 "const_int_operand" "")
7764 (match_operand:SI 3 "const_int_operand" "")]
7769 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7770 && !optimize_function_for_size_p (cfun))
7772 if (gen_movmem_ldrd_strd (operands))
7777 if (arm_gen_movmemqi (operands))
7781 else /* TARGET_THUMB1 */
7783 if ( INTVAL (operands[3]) != 4
7784 || INTVAL (operands[2]) > 48)
7787 thumb_expand_movmemqi (operands);
7793 ;; Thumb block-move insns
7795 (define_insn "movmem12b"
7796 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7797 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7798 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7799 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7800 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
7801 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
7802 (set (match_operand:SI 0 "register_operand" "=l")
7803 (plus:SI (match_dup 2) (const_int 12)))
7804 (set (match_operand:SI 1 "register_operand" "=l")
7805 (plus:SI (match_dup 3) (const_int 12)))
7806 (clobber (match_scratch:SI 4 "=&l"))
7807 (clobber (match_scratch:SI 5 "=&l"))
7808 (clobber (match_scratch:SI 6 "=&l"))]
7810 "* return thumb_output_move_mem_multiple (3, operands);"
7811 [(set_attr "length" "4")
7812 ; This isn't entirely accurate... It loads as well, but in terms of
7813 ; scheduling the following insn it is better to consider it as a store
7814 (set_attr "type" "store3")]
7817 (define_insn "movmem8b"
7818 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7819 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7820 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7821 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7822 (set (match_operand:SI 0 "register_operand" "=l")
7823 (plus:SI (match_dup 2) (const_int 8)))
7824 (set (match_operand:SI 1 "register_operand" "=l")
7825 (plus:SI (match_dup 3) (const_int 8)))
7826 (clobber (match_scratch:SI 4 "=&l"))
7827 (clobber (match_scratch:SI 5 "=&l"))]
7829 "* return thumb_output_move_mem_multiple (2, operands);"
7830 [(set_attr "length" "4")
7831 ; This isn't entirely accurate... It loads as well, but in terms of
7832 ; scheduling the following insn it is better to consider it as a store
7833 (set_attr "type" "store2")]
7838 ;; Compare & branch insns
7839 ;; The range calculations are based as follows:
7840 ;; For forward branches, the address calculation returns the address of
7841 ;; the next instruction. This is 2 beyond the branch instruction.
7842 ;; For backward branches, the address calculation returns the address of
7843 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7844 ;; instruction for the shortest sequence, and 4 before the branch instruction
7845 ;; if we have to jump around an unconditional branch.
7846 ;; To the basic branch range the PC offset must be added (this is +4).
7847 ;; So for forward branches we have
7848 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7849 ;; And for backward branches we have
7850 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7852 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7853 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7855 (define_expand "cbranchsi4"
7856 [(set (pc) (if_then_else
7857 (match_operator 0 "expandable_comparison_operator"
7858 [(match_operand:SI 1 "s_register_operand" "")
7859 (match_operand:SI 2 "nonmemory_operand" "")])
7860 (label_ref (match_operand 3 "" ""))
7866 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7868 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7872 if (thumb1_cmpneg_operand (operands[2], SImode))
7874 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7875 operands[3], operands[0]));
7878 if (!thumb1_cmp_operand (operands[2], SImode))
7879 operands[2] = force_reg (SImode, operands[2]);
7882 ;; A pattern to recognize a special situation and optimize for it.
7883 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
7884 ;; due to the available addressing modes. Hence, convert a signed comparison
7885 ;; with zero into an unsigned comparison with 127 if possible.
7886 (define_expand "cbranchqi4"
7887 [(set (pc) (if_then_else
7888 (match_operator 0 "lt_ge_comparison_operator"
7889 [(match_operand:QI 1 "memory_operand" "")
7890 (match_operand:QI 2 "const0_operand" "")])
7891 (label_ref (match_operand 3 "" ""))
7896 xops[1] = gen_reg_rtx (SImode);
7897 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
7898 xops[2] = GEN_INT (127);
7899 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
7900 VOIDmode, xops[1], xops[2]);
7901 xops[3] = operands[3];
7902 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
7906 (define_expand "cbranchsf4"
7907 [(set (pc) (if_then_else
7908 (match_operator 0 "expandable_comparison_operator"
7909 [(match_operand:SF 1 "s_register_operand" "")
7910 (match_operand:SF 2 "arm_float_compare_operand" "")])
7911 (label_ref (match_operand 3 "" ""))
7913 "TARGET_32BIT && TARGET_HARD_FLOAT"
7914 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7915 operands[3])); DONE;"
7918 (define_expand "cbranchdf4"
7919 [(set (pc) (if_then_else
7920 (match_operator 0 "expandable_comparison_operator"
7921 [(match_operand:DF 1 "s_register_operand" "")
7922 (match_operand:DF 2 "arm_float_compare_operand" "")])
7923 (label_ref (match_operand 3 "" ""))
7925 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7926 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7927 operands[3])); DONE;"
7930 (define_expand "cbranchdi4"
7931 [(set (pc) (if_then_else
7932 (match_operator 0 "expandable_comparison_operator"
7933 [(match_operand:DI 1 "s_register_operand" "")
7934 (match_operand:DI 2 "cmpdi_operand" "")])
7935 (label_ref (match_operand 3 "" ""))
7939 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7941 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7947 (define_insn "cbranchsi4_insn"
7948 [(set (pc) (if_then_else
7949 (match_operator 0 "arm_comparison_operator"
7950 [(match_operand:SI 1 "s_register_operand" "l,l*h")
7951 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
7952 (label_ref (match_operand 3 "" ""))
7956 rtx t = cfun->machine->thumb1_cc_insn;
7959 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
7960 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
7962 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
7964 if (!noov_comparison_operator (operands[0], VOIDmode))
7967 else if (cfun->machine->thumb1_cc_mode != CCmode)
7972 output_asm_insn ("cmp\t%1, %2", operands);
7973 cfun->machine->thumb1_cc_insn = insn;
7974 cfun->machine->thumb1_cc_op0 = operands[1];
7975 cfun->machine->thumb1_cc_op1 = operands[2];
7976 cfun->machine->thumb1_cc_mode = CCmode;
7979 /* Ensure we emit the right type of condition code on the jump. */
7980 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
7983 switch (get_attr_length (insn))
7985 case 4: return \"b%d0\\t%l3\";
7986 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7987 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7990 [(set (attr "far_jump")
7992 (eq_attr "length" "8")
7993 (const_string "yes")
7994 (const_string "no")))
7995 (set (attr "length")
7997 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7998 (le (minus (match_dup 3) (pc)) (const_int 256)))
8001 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8002 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8007 (define_insn "cbranchsi4_scratch"
8008 [(set (pc) (if_then_else
8009 (match_operator 4 "arm_comparison_operator"
8010 [(match_operand:SI 1 "s_register_operand" "l,0")
8011 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
8012 (label_ref (match_operand 3 "" ""))
8014 (clobber (match_scratch:SI 0 "=l,l"))]
8017 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
8019 switch (get_attr_length (insn))
8021 case 4: return \"b%d4\\t%l3\";
8022 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8023 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8026 [(set (attr "far_jump")
8028 (eq_attr "length" "8")
8029 (const_string "yes")
8030 (const_string "no")))
8031 (set (attr "length")
8033 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8034 (le (minus (match_dup 3) (pc)) (const_int 256)))
8037 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8038 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8043 (define_insn "*negated_cbranchsi4"
8046 (match_operator 0 "equality_operator"
8047 [(match_operand:SI 1 "s_register_operand" "l")
8048 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
8049 (label_ref (match_operand 3 "" ""))
8053 output_asm_insn (\"cmn\\t%1, %2\", operands);
8054 switch (get_attr_length (insn))
8056 case 4: return \"b%d0\\t%l3\";
8057 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8058 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8061 [(set (attr "far_jump")
8063 (eq_attr "length" "8")
8064 (const_string "yes")
8065 (const_string "no")))
8066 (set (attr "length")
8068 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8069 (le (minus (match_dup 3) (pc)) (const_int 256)))
8072 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8073 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8078 (define_insn "*tbit_cbranch"
8081 (match_operator 0 "equality_operator"
8082 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
8084 (match_operand:SI 2 "const_int_operand" "i"))
8086 (label_ref (match_operand 3 "" ""))
8088 (clobber (match_scratch:SI 4 "=l"))]
8093 op[0] = operands[4];
8094 op[1] = operands[1];
8095 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
8097 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
8098 switch (get_attr_length (insn))
8100 case 4: return \"b%d0\\t%l3\";
8101 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8102 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8105 [(set (attr "far_jump")
8107 (eq_attr "length" "8")
8108 (const_string "yes")
8109 (const_string "no")))
8110 (set (attr "length")
8112 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8113 (le (minus (match_dup 3) (pc)) (const_int 256)))
8116 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8117 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8122 (define_insn "*tlobits_cbranch"
8125 (match_operator 0 "equality_operator"
8126 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
8127 (match_operand:SI 2 "const_int_operand" "i")
8130 (label_ref (match_operand 3 "" ""))
8132 (clobber (match_scratch:SI 4 "=l"))]
8137 op[0] = operands[4];
8138 op[1] = operands[1];
8139 op[2] = GEN_INT (32 - INTVAL (operands[2]));
8141 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
8142 switch (get_attr_length (insn))
8144 case 4: return \"b%d0\\t%l3\";
8145 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8146 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8149 [(set (attr "far_jump")
8151 (eq_attr "length" "8")
8152 (const_string "yes")
8153 (const_string "no")))
8154 (set (attr "length")
8156 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8157 (le (minus (match_dup 3) (pc)) (const_int 256)))
8160 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8161 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8166 (define_insn "*tstsi3_cbranch"
8169 (match_operator 3 "equality_operator"
8170 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
8171 (match_operand:SI 1 "s_register_operand" "l"))
8173 (label_ref (match_operand 2 "" ""))
8178 output_asm_insn (\"tst\\t%0, %1\", operands);
8179 switch (get_attr_length (insn))
8181 case 4: return \"b%d3\\t%l2\";
8182 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
8183 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
8186 [(set (attr "far_jump")
8188 (eq_attr "length" "8")
8189 (const_string "yes")
8190 (const_string "no")))
8191 (set (attr "length")
8193 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
8194 (le (minus (match_dup 2) (pc)) (const_int 256)))
8197 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
8198 (le (minus (match_dup 2) (pc)) (const_int 2048)))
8203 (define_insn "*cbranchne_decr1"
8205 (if_then_else (match_operator 3 "equality_operator"
8206 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
8208 (label_ref (match_operand 4 "" ""))
8210 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
8211 (plus:SI (match_dup 2) (const_int -1)))
8212 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
8217 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
8219 VOIDmode, operands[2], const1_rtx);
8220 cond[1] = operands[4];
8222 if (which_alternative == 0)
8223 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
8224 else if (which_alternative == 1)
8226 /* We must provide an alternative for a hi reg because reload
8227 cannot handle output reloads on a jump instruction, but we
8228 can't subtract into that. Fortunately a mov from lo to hi
8229 does not clobber the condition codes. */
8230 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
8231 output_asm_insn (\"mov\\t%0, %1\", operands);
8235 /* Similarly, but the target is memory. */
8236 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
8237 output_asm_insn (\"str\\t%1, %0\", operands);
8240 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
8243 output_asm_insn (\"b%d0\\t%l1\", cond);
8246 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
8247 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
8249 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
8250 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
8254 [(set (attr "far_jump")
8256 (ior (and (eq (symbol_ref ("which_alternative"))
8258 (eq_attr "length" "8"))
8259 (eq_attr "length" "10"))
8260 (const_string "yes")
8261 (const_string "no")))
8262 (set_attr_alternative "length"
8266 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
8267 (le (minus (match_dup 4) (pc)) (const_int 256)))
8270 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
8271 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8276 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8277 (le (minus (match_dup 4) (pc)) (const_int 256)))
8280 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8281 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8286 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8287 (le (minus (match_dup 4) (pc)) (const_int 256)))
8290 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8291 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8296 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8297 (le (minus (match_dup 4) (pc)) (const_int 256)))
8300 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8301 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8306 (define_insn "*addsi3_cbranch"
8309 (match_operator 4 "arm_comparison_operator"
8311 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
8312 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
8314 (label_ref (match_operand 5 "" ""))
8317 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
8318 (plus:SI (match_dup 2) (match_dup 3)))
8319 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
8321 && (GET_CODE (operands[4]) == EQ
8322 || GET_CODE (operands[4]) == NE
8323 || GET_CODE (operands[4]) == GE
8324 || GET_CODE (operands[4]) == LT)"
8329 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
8330 cond[1] = operands[2];
8331 cond[2] = operands[3];
8333 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
8334 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
8336 output_asm_insn (\"add\\t%0, %1, %2\", cond);
8338 if (which_alternative >= 2
8339 && which_alternative < 4)
8340 output_asm_insn (\"mov\\t%0, %1\", operands);
8341 else if (which_alternative >= 4)
8342 output_asm_insn (\"str\\t%1, %0\", operands);
8344 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
8347 return \"b%d4\\t%l5\";
8349 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
8351 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
8355 [(set (attr "far_jump")
8357 (ior (and (lt (symbol_ref ("which_alternative"))
8359 (eq_attr "length" "8"))
8360 (eq_attr "length" "10"))
8361 (const_string "yes")
8362 (const_string "no")))
8363 (set (attr "length")
8365 (lt (symbol_ref ("which_alternative"))
8368 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
8369 (le (minus (match_dup 5) (pc)) (const_int 256)))
8372 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
8373 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8377 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
8378 (le (minus (match_dup 5) (pc)) (const_int 256)))
8381 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
8382 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8387 (define_insn "*addsi3_cbranch_scratch"
8390 (match_operator 3 "arm_comparison_operator"
8392 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
8393 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
8395 (label_ref (match_operand 4 "" ""))
8397 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
8399 && (GET_CODE (operands[3]) == EQ
8400 || GET_CODE (operands[3]) == NE
8401 || GET_CODE (operands[3]) == GE
8402 || GET_CODE (operands[3]) == LT)"
8405 switch (which_alternative)
8408 output_asm_insn (\"cmp\t%1, #%n2\", operands);
8411 output_asm_insn (\"cmn\t%1, %2\", operands);
8414 if (INTVAL (operands[2]) < 0)
8415 output_asm_insn (\"sub\t%0, %1, %2\", operands);
8417 output_asm_insn (\"add\t%0, %1, %2\", operands);
8420 if (INTVAL (operands[2]) < 0)
8421 output_asm_insn (\"sub\t%0, %0, %2\", operands);
8423 output_asm_insn (\"add\t%0, %0, %2\", operands);
8427 switch (get_attr_length (insn))
8430 return \"b%d3\\t%l4\";
8432 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
8434 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
8438 [(set (attr "far_jump")
8440 (eq_attr "length" "8")
8441 (const_string "yes")
8442 (const_string "no")))
8443 (set (attr "length")
8445 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
8446 (le (minus (match_dup 4) (pc)) (const_int 256)))
8449 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
8450 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8456 ;; Comparison and test insns
8458 (define_insn "*arm_cmpsi_insn"
8459 [(set (reg:CC CC_REGNUM)
8460 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
8461 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
8468 [(set_attr "conds" "set")
8469 (set_attr "arch" "t2,t2,any,any")
8470 (set_attr "length" "2,2,4,4")
8471 (set_attr "predicable" "yes")
8472 (set_attr "type" "*,*,*,simple_alu_imm")]
8475 (define_insn "*cmpsi_shiftsi"
8476 [(set (reg:CC CC_REGNUM)
8477 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
8478 (match_operator:SI 3 "shift_operator"
8479 [(match_operand:SI 1 "s_register_operand" "r,r")
8480 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
8483 [(set_attr "conds" "set")
8484 (set_attr "shift" "1")
8485 (set_attr "arch" "32,a")
8486 (set_attr "type" "alu_shift,alu_shift_reg")])
8488 (define_insn "*cmpsi_shiftsi_swp"
8489 [(set (reg:CC_SWP CC_REGNUM)
8490 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
8491 [(match_operand:SI 1 "s_register_operand" "r,r")
8492 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
8493 (match_operand:SI 0 "s_register_operand" "r,r")))]
8496 [(set_attr "conds" "set")
8497 (set_attr "shift" "1")
8498 (set_attr "arch" "32,a")
8499 (set_attr "type" "alu_shift,alu_shift_reg")])
8501 (define_insn "*arm_cmpsi_negshiftsi_si"
8502 [(set (reg:CC_Z CC_REGNUM)
8504 (neg:SI (match_operator:SI 1 "shift_operator"
8505 [(match_operand:SI 2 "s_register_operand" "r")
8506 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
8507 (match_operand:SI 0 "s_register_operand" "r")))]
8510 [(set_attr "conds" "set")
8511 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
8512 (const_string "alu_shift")
8513 (const_string "alu_shift_reg")))
8514 (set_attr "predicable" "yes")]
8517 ;; DImode comparisons. The generic code generates branches that
8518 ;; if-conversion can not reduce to a conditional compare, so we do
8521 (define_insn_and_split "*arm_cmpdi_insn"
8522 [(set (reg:CC_NCV CC_REGNUM)
8523 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
8524 (match_operand:DI 1 "arm_di_operand" "rDi")))
8525 (clobber (match_scratch:SI 2 "=r"))]
8527 "#" ; "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
8528 "&& reload_completed"
8529 [(set (reg:CC CC_REGNUM)
8530 (compare:CC (match_dup 0) (match_dup 1)))
8531 (parallel [(set (reg:CC CC_REGNUM)
8532 (compare:CC (match_dup 3) (match_dup 4)))
8534 (minus:SI (match_dup 5)
8535 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))])]
8537 operands[3] = gen_highpart (SImode, operands[0]);
8538 operands[0] = gen_lowpart (SImode, operands[0]);
8539 if (CONST_INT_P (operands[1]))
8541 operands[4] = GEN_INT (~INTVAL (gen_highpart_mode (SImode,
8544 operands[5] = gen_rtx_PLUS (SImode, operands[3], operands[4]);
8548 operands[4] = gen_highpart (SImode, operands[1]);
8549 operands[5] = gen_rtx_MINUS (SImode, operands[3], operands[4]);
8551 operands[1] = gen_lowpart (SImode, operands[1]);
8552 operands[2] = gen_lowpart (SImode, operands[2]);
8554 [(set_attr "conds" "set")
8555 (set_attr "length" "8")]
8558 (define_insn_and_split "*arm_cmpdi_unsigned"
8559 [(set (reg:CC_CZ CC_REGNUM)
8560 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
8561 (match_operand:DI 1 "arm_di_operand" "rDi")))]
8563 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
8564 "&& reload_completed"
8565 [(set (reg:CC CC_REGNUM)
8566 (compare:CC (match_dup 2) (match_dup 3)))
8567 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
8568 (set (reg:CC CC_REGNUM)
8569 (compare:CC (match_dup 0) (match_dup 1))))]
8571 operands[2] = gen_highpart (SImode, operands[0]);
8572 operands[0] = gen_lowpart (SImode, operands[0]);
8573 if (CONST_INT_P (operands[1]))
8574 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
8576 operands[3] = gen_highpart (SImode, operands[1]);
8577 operands[1] = gen_lowpart (SImode, operands[1]);
8579 [(set_attr "conds" "set")
8580 (set_attr "length" "8")]
8583 (define_insn "*arm_cmpdi_zero"
8584 [(set (reg:CC_Z CC_REGNUM)
8585 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
8587 (clobber (match_scratch:SI 1 "=r"))]
8589 "orr%.\\t%1, %Q0, %R0"
8590 [(set_attr "conds" "set")]
8593 (define_insn "*thumb_cmpdi_zero"
8594 [(set (reg:CC_Z CC_REGNUM)
8595 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
8597 (clobber (match_scratch:SI 1 "=l"))]
8599 "orr\\t%1, %Q0, %R0"
8600 [(set_attr "conds" "set")
8601 (set_attr "length" "2")]
8604 ; This insn allows redundant compares to be removed by cse, nothing should
8605 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
8606 ; is deleted later on. The match_dup will match the mode here, so that
8607 ; mode changes of the condition codes aren't lost by this even though we don't
8608 ; specify what they are.
8610 (define_insn "*deleted_compare"
8611 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
8613 "\\t%@ deleted compare"
8614 [(set_attr "conds" "set")
8615 (set_attr "length" "0")]
8619 ;; Conditional branch insns
8621 (define_expand "cbranch_cc"
8623 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
8624 (match_operand 2 "" "")])
8625 (label_ref (match_operand 3 "" ""))
8628 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
8629 operands[1], operands[2], NULL_RTX);
8630 operands[2] = const0_rtx;"
8634 ;; Patterns to match conditional branch insns.
8637 (define_insn "arm_cond_branch"
8639 (if_then_else (match_operator 1 "arm_comparison_operator"
8640 [(match_operand 2 "cc_register" "") (const_int 0)])
8641 (label_ref (match_operand 0 "" ""))
8645 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8647 arm_ccfsm_state += 2;
8650 return \"b%d1\\t%l0\";
8652 [(set_attr "conds" "use")
8653 (set_attr "type" "branch")
8654 (set (attr "length")
8656 (and (match_test "TARGET_THUMB2")
8657 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8658 (le (minus (match_dup 0) (pc)) (const_int 256))))
8663 (define_insn "*arm_cond_branch_reversed"
8665 (if_then_else (match_operator 1 "arm_comparison_operator"
8666 [(match_operand 2 "cc_register" "") (const_int 0)])
8668 (label_ref (match_operand 0 "" ""))))]
8671 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8673 arm_ccfsm_state += 2;
8676 return \"b%D1\\t%l0\";
8678 [(set_attr "conds" "use")
8679 (set_attr "type" "branch")
8680 (set (attr "length")
8682 (and (match_test "TARGET_THUMB2")
8683 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8684 (le (minus (match_dup 0) (pc)) (const_int 256))))
8693 (define_expand "cstore_cc"
8694 [(set (match_operand:SI 0 "s_register_operand" "")
8695 (match_operator:SI 1 "" [(match_operand 2 "" "")
8696 (match_operand 3 "" "")]))]
8698 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
8699 operands[2], operands[3], NULL_RTX);
8700 operands[3] = const0_rtx;"
8703 (define_insn_and_split "*mov_scc"
8704 [(set (match_operand:SI 0 "s_register_operand" "=r")
8705 (match_operator:SI 1 "arm_comparison_operator"
8706 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8708 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8711 (if_then_else:SI (match_dup 1)
8715 [(set_attr "conds" "use")
8716 (set_attr "length" "8")]
8719 (define_insn_and_split "*mov_negscc"
8720 [(set (match_operand:SI 0 "s_register_operand" "=r")
8721 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8722 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8724 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8727 (if_then_else:SI (match_dup 1)
8731 operands[3] = GEN_INT (~0);
8733 [(set_attr "conds" "use")
8734 (set_attr "length" "8")]
8737 (define_insn_and_split "*mov_notscc"
8738 [(set (match_operand:SI 0 "s_register_operand" "=r")
8739 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8740 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8742 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8745 (if_then_else:SI (match_dup 1)
8749 operands[3] = GEN_INT (~1);
8750 operands[4] = GEN_INT (~0);
8752 [(set_attr "conds" "use")
8753 (set_attr "length" "8")]
8756 (define_expand "cstoresi4"
8757 [(set (match_operand:SI 0 "s_register_operand" "")
8758 (match_operator:SI 1 "expandable_comparison_operator"
8759 [(match_operand:SI 2 "s_register_operand" "")
8760 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8761 "TARGET_32BIT || TARGET_THUMB1"
8763 rtx op3, scratch, scratch2;
8767 if (!arm_add_operand (operands[3], SImode))
8768 operands[3] = force_reg (SImode, operands[3]);
8769 emit_insn (gen_cstore_cc (operands[0], operands[1],
8770 operands[2], operands[3]));
8774 if (operands[3] == const0_rtx)
8776 switch (GET_CODE (operands[1]))
8779 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8783 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8787 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8788 NULL_RTX, 0, OPTAB_WIDEN);
8789 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8790 NULL_RTX, 0, OPTAB_WIDEN);
8791 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8792 operands[0], 1, OPTAB_WIDEN);
8796 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8798 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8799 NULL_RTX, 1, OPTAB_WIDEN);
8803 scratch = expand_binop (SImode, ashr_optab, operands[2],
8804 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8805 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8806 NULL_RTX, 0, OPTAB_WIDEN);
8807 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8811 /* LT is handled by generic code. No need for unsigned with 0. */
8818 switch (GET_CODE (operands[1]))
8821 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8822 NULL_RTX, 0, OPTAB_WIDEN);
8823 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8827 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8828 NULL_RTX, 0, OPTAB_WIDEN);
8829 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8833 op3 = force_reg (SImode, operands[3]);
8835 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8836 NULL_RTX, 1, OPTAB_WIDEN);
8837 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8838 NULL_RTX, 0, OPTAB_WIDEN);
8839 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8845 if (!thumb1_cmp_operand (op3, SImode))
8846 op3 = force_reg (SImode, op3);
8847 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8848 NULL_RTX, 0, OPTAB_WIDEN);
8849 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8850 NULL_RTX, 1, OPTAB_WIDEN);
8851 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8856 op3 = force_reg (SImode, operands[3]);
8857 scratch = force_reg (SImode, const0_rtx);
8858 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8864 if (!thumb1_cmp_operand (op3, SImode))
8865 op3 = force_reg (SImode, op3);
8866 scratch = force_reg (SImode, const0_rtx);
8867 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8873 if (!thumb1_cmp_operand (op3, SImode))
8874 op3 = force_reg (SImode, op3);
8875 scratch = gen_reg_rtx (SImode);
8876 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8880 op3 = force_reg (SImode, operands[3]);
8881 scratch = gen_reg_rtx (SImode);
8882 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8885 /* No good sequences for GT, LT. */
8892 (define_expand "cstoresf4"
8893 [(set (match_operand:SI 0 "s_register_operand" "")
8894 (match_operator:SI 1 "expandable_comparison_operator"
8895 [(match_operand:SF 2 "s_register_operand" "")
8896 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8897 "TARGET_32BIT && TARGET_HARD_FLOAT"
8898 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8899 operands[2], operands[3])); DONE;"
8902 (define_expand "cstoredf4"
8903 [(set (match_operand:SI 0 "s_register_operand" "")
8904 (match_operator:SI 1 "expandable_comparison_operator"
8905 [(match_operand:DF 2 "s_register_operand" "")
8906 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8907 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8908 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8909 operands[2], operands[3])); DONE;"
8912 (define_expand "cstoredi4"
8913 [(set (match_operand:SI 0 "s_register_operand" "")
8914 (match_operator:SI 1 "expandable_comparison_operator"
8915 [(match_operand:DI 2 "s_register_operand" "")
8916 (match_operand:DI 3 "cmpdi_operand" "")]))]
8919 if (!arm_validize_comparison (&operands[1],
8923 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8929 (define_expand "cstoresi_eq0_thumb1"
8931 [(set (match_operand:SI 0 "s_register_operand" "")
8932 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8934 (clobber (match_dup:SI 2))])]
8936 "operands[2] = gen_reg_rtx (SImode);"
8939 (define_expand "cstoresi_ne0_thumb1"
8941 [(set (match_operand:SI 0 "s_register_operand" "")
8942 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8944 (clobber (match_dup:SI 2))])]
8946 "operands[2] = gen_reg_rtx (SImode);"
8949 (define_insn "*cstoresi_eq0_thumb1_insn"
8950 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8951 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8953 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8956 neg\\t%0, %1\;adc\\t%0, %0, %1
8957 neg\\t%2, %1\;adc\\t%0, %1, %2"
8958 [(set_attr "length" "4")]
8961 (define_insn "*cstoresi_ne0_thumb1_insn"
8962 [(set (match_operand:SI 0 "s_register_operand" "=l")
8963 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8965 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8967 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8968 [(set_attr "length" "4")]
8971 ;; Used as part of the expansion of thumb ltu and gtu sequences
8972 (define_insn "cstoresi_nltu_thumb1"
8973 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8974 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8975 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8977 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8978 [(set_attr "length" "4")]
8981 (define_insn_and_split "cstoresi_ltu_thumb1"
8982 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8983 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8984 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8989 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8990 (set (match_dup 0) (neg:SI (match_dup 3)))]
8991 "operands[3] = gen_reg_rtx (SImode);"
8992 [(set_attr "length" "4")]
8995 ;; Used as part of the expansion of thumb les sequence.
8996 (define_insn "thumb1_addsi3_addgeu"
8997 [(set (match_operand:SI 0 "s_register_operand" "=l")
8998 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8999 (match_operand:SI 2 "s_register_operand" "l"))
9000 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
9001 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
9003 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
9004 [(set_attr "length" "4")]
9008 ;; Conditional move insns
9010 (define_expand "movsicc"
9011 [(set (match_operand:SI 0 "s_register_operand" "")
9012 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
9013 (match_operand:SI 2 "arm_not_operand" "")
9014 (match_operand:SI 3 "arm_not_operand" "")))]
9021 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
9022 &XEXP (operands[1], 1)))
9025 code = GET_CODE (operands[1]);
9026 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
9027 XEXP (operands[1], 1), NULL_RTX);
9028 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
9032 (define_expand "movsfcc"
9033 [(set (match_operand:SF 0 "s_register_operand" "")
9034 (if_then_else:SF (match_operand 1 "arm_cond_move_operator" "")
9035 (match_operand:SF 2 "s_register_operand" "")
9036 (match_operand:SF 3 "s_register_operand" "")))]
9037 "TARGET_32BIT && TARGET_HARD_FLOAT"
9040 enum rtx_code code = GET_CODE (operands[1]);
9043 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
9044 &XEXP (operands[1], 1)))
9047 code = GET_CODE (operands[1]);
9048 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
9049 XEXP (operands[1], 1), NULL_RTX);
9050 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
9054 (define_expand "movdfcc"
9055 [(set (match_operand:DF 0 "s_register_operand" "")
9056 (if_then_else:DF (match_operand 1 "arm_cond_move_operator" "")
9057 (match_operand:DF 2 "s_register_operand" "")
9058 (match_operand:DF 3 "s_register_operand" "")))]
9059 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
9062 enum rtx_code code = GET_CODE (operands[1]);
9065 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
9066 &XEXP (operands[1], 1)))
9068 code = GET_CODE (operands[1]);
9069 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
9070 XEXP (operands[1], 1), NULL_RTX);
9071 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
9075 (define_insn "*cmov<mode>"
9076 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
9077 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
9078 [(match_operand 2 "cc_register" "") (const_int 0)])
9079 (match_operand:SDF 3 "s_register_operand"
9081 (match_operand:SDF 4 "s_register_operand"
9082 "<F_constraint>")))]
9083 "TARGET_HARD_FLOAT && TARGET_FPU_ARMV8 <vfp_double_cond>"
9086 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
9093 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
9098 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
9104 [(set_attr "conds" "use")
9105 (set_attr "type" "f_sel<vfp_type>")]
9108 (define_insn_and_split "*movsicc_insn"
9109 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
9111 (match_operator 3 "arm_comparison_operator"
9112 [(match_operand 4 "cc_register" "") (const_int 0)])
9113 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
9114 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
9125 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
9126 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
9127 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
9128 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
9129 "&& reload_completed"
9132 enum rtx_code rev_code;
9133 enum machine_mode mode;
9136 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9138 gen_rtx_SET (VOIDmode,
9142 rev_code = GET_CODE (operands[3]);
9143 mode = GET_MODE (operands[4]);
9144 if (mode == CCFPmode || mode == CCFPEmode)
9145 rev_code = reverse_condition_maybe_unordered (rev_code);
9147 rev_code = reverse_condition (rev_code);
9149 rev_cond = gen_rtx_fmt_ee (rev_code,
9153 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9155 gen_rtx_SET (VOIDmode,
9160 [(set_attr "length" "4,4,4,4,8,8,8,8")
9161 (set_attr "conds" "use")
9162 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")
9163 (set_attr_alternative "type"
9164 [(if_then_else (match_operand 2 "const_int_operand" "")
9165 (const_string "simple_alu_imm")
9167 (const_string "simple_alu_imm")
9168 (if_then_else (match_operand 1 "const_int_operand" "")
9169 (const_string "simple_alu_imm")
9171 (const_string "simple_alu_imm")
9175 (const_string "*")])]
9178 (define_insn "*movsfcc_soft_insn"
9179 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
9180 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
9181 [(match_operand 4 "cc_register" "") (const_int 0)])
9182 (match_operand:SF 1 "s_register_operand" "0,r")
9183 (match_operand:SF 2 "s_register_operand" "r,0")))]
9184 "TARGET_ARM && TARGET_SOFT_FLOAT"
9188 [(set_attr "conds" "use")
9189 (set_attr "insn" "mov")]
9193 ;; Jump and linkage insns
9195 (define_expand "jump"
9197 (label_ref (match_operand 0 "" "")))]
9202 (define_insn "*arm_jump"
9204 (label_ref (match_operand 0 "" "")))]
9208 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
9210 arm_ccfsm_state += 2;
9213 return \"b%?\\t%l0\";
9216 [(set_attr "predicable" "yes")
9217 (set (attr "length")
9219 (and (match_test "TARGET_THUMB2")
9220 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
9221 (le (minus (match_dup 0) (pc)) (const_int 2048))))
9226 (define_insn "*thumb_jump"
9228 (label_ref (match_operand 0 "" "")))]
9231 if (get_attr_length (insn) == 2)
9233 return \"bl\\t%l0\\t%@ far jump\";
9235 [(set (attr "far_jump")
9237 (eq_attr "length" "4")
9238 (const_string "yes")
9239 (const_string "no")))
9240 (set (attr "length")
9242 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
9243 (le (minus (match_dup 0) (pc)) (const_int 2048)))
9248 (define_expand "call"
9249 [(parallel [(call (match_operand 0 "memory_operand" "")
9250 (match_operand 1 "general_operand" ""))
9251 (use (match_operand 2 "" ""))
9252 (clobber (reg:SI LR_REGNUM))])]
9258 /* In an untyped call, we can get NULL for operand 2. */
9259 if (operands[2] == NULL_RTX)
9260 operands[2] = const0_rtx;
9262 /* Decide if we should generate indirect calls by loading the
9263 32-bit address of the callee into a register before performing the
9265 callee = XEXP (operands[0], 0);
9266 if (GET_CODE (callee) == SYMBOL_REF
9267 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
9269 XEXP (operands[0], 0) = force_reg (Pmode, callee);
9271 pat = gen_call_internal (operands[0], operands[1], operands[2]);
9272 arm_emit_call_insn (pat, XEXP (operands[0], 0));
9277 (define_expand "call_internal"
9278 [(parallel [(call (match_operand 0 "memory_operand" "")
9279 (match_operand 1 "general_operand" ""))
9280 (use (match_operand 2 "" ""))
9281 (clobber (reg:SI LR_REGNUM))])])
9283 (define_insn "*call_reg_armv5"
9284 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9285 (match_operand 1 "" ""))
9286 (use (match_operand 2 "" ""))
9287 (clobber (reg:SI LR_REGNUM))]
9288 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9290 [(set_attr "type" "call")]
9293 (define_insn "*call_reg_arm"
9294 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9295 (match_operand 1 "" ""))
9296 (use (match_operand 2 "" ""))
9297 (clobber (reg:SI LR_REGNUM))]
9298 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9300 return output_call (operands);
9302 ;; length is worst case, normally it is only two
9303 [(set_attr "length" "12")
9304 (set_attr "type" "call")]
9308 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
9309 ;; considered a function call by the branch predictor of some cores (PR40887).
9310 ;; Falls back to blx rN (*call_reg_armv5).
9312 (define_insn "*call_mem"
9313 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
9314 (match_operand 1 "" ""))
9315 (use (match_operand 2 "" ""))
9316 (clobber (reg:SI LR_REGNUM))]
9317 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9319 return output_call_mem (operands);
9321 [(set_attr "length" "12")
9322 (set_attr "type" "call")]
9325 (define_insn "*call_reg_thumb1_v5"
9326 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9327 (match_operand 1 "" ""))
9328 (use (match_operand 2 "" ""))
9329 (clobber (reg:SI LR_REGNUM))]
9330 "TARGET_THUMB1 && arm_arch5 && !SIBLING_CALL_P (insn)"
9332 [(set_attr "length" "2")
9333 (set_attr "type" "call")]
9336 (define_insn "*call_reg_thumb1"
9337 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9338 (match_operand 1 "" ""))
9339 (use (match_operand 2 "" ""))
9340 (clobber (reg:SI LR_REGNUM))]
9341 "TARGET_THUMB1 && !arm_arch5 && !SIBLING_CALL_P (insn)"
9344 if (!TARGET_CALLER_INTERWORKING)
9345 return thumb_call_via_reg (operands[0]);
9346 else if (operands[1] == const0_rtx)
9347 return \"bl\\t%__interwork_call_via_%0\";
9348 else if (frame_pointer_needed)
9349 return \"bl\\t%__interwork_r7_call_via_%0\";
9351 return \"bl\\t%__interwork_r11_call_via_%0\";
9353 [(set_attr "type" "call")]
9356 (define_expand "call_value"
9357 [(parallel [(set (match_operand 0 "" "")
9358 (call (match_operand 1 "memory_operand" "")
9359 (match_operand 2 "general_operand" "")))
9360 (use (match_operand 3 "" ""))
9361 (clobber (reg:SI LR_REGNUM))])]
9367 /* In an untyped call, we can get NULL for operand 2. */
9368 if (operands[3] == 0)
9369 operands[3] = const0_rtx;
9371 /* Decide if we should generate indirect calls by loading the
9372 32-bit address of the callee into a register before performing the
9374 callee = XEXP (operands[1], 0);
9375 if (GET_CODE (callee) == SYMBOL_REF
9376 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
9378 XEXP (operands[1], 0) = force_reg (Pmode, callee);
9380 pat = gen_call_value_internal (operands[0], operands[1],
9381 operands[2], operands[3]);
9382 arm_emit_call_insn (pat, XEXP (operands[1], 0));
9387 (define_expand "call_value_internal"
9388 [(parallel [(set (match_operand 0 "" "")
9389 (call (match_operand 1 "memory_operand" "")
9390 (match_operand 2 "general_operand" "")))
9391 (use (match_operand 3 "" ""))
9392 (clobber (reg:SI LR_REGNUM))])])
9394 (define_insn "*call_value_reg_armv5"
9395 [(set (match_operand 0 "" "")
9396 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9397 (match_operand 2 "" "")))
9398 (use (match_operand 3 "" ""))
9399 (clobber (reg:SI LR_REGNUM))]
9400 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9402 [(set_attr "type" "call")]
9405 (define_insn "*call_value_reg_arm"
9406 [(set (match_operand 0 "" "")
9407 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9408 (match_operand 2 "" "")))
9409 (use (match_operand 3 "" ""))
9410 (clobber (reg:SI LR_REGNUM))]
9411 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9413 return output_call (&operands[1]);
9415 [(set_attr "length" "12")
9416 (set_attr "type" "call")]
9419 ;; Note: see *call_mem
9421 (define_insn "*call_value_mem"
9422 [(set (match_operand 0 "" "")
9423 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
9424 (match_operand 2 "" "")))
9425 (use (match_operand 3 "" ""))
9426 (clobber (reg:SI LR_REGNUM))]
9427 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))
9428 && !SIBLING_CALL_P (insn)"
9430 return output_call_mem (&operands[1]);
9432 [(set_attr "length" "12")
9433 (set_attr "type" "call")]
9436 (define_insn "*call_value_reg_thumb1_v5"
9437 [(set (match_operand 0 "" "")
9438 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9439 (match_operand 2 "" "")))
9440 (use (match_operand 3 "" ""))
9441 (clobber (reg:SI LR_REGNUM))]
9442 "TARGET_THUMB1 && arm_arch5"
9444 [(set_attr "length" "2")
9445 (set_attr "type" "call")]
9448 (define_insn "*call_value_reg_thumb1"
9449 [(set (match_operand 0 "" "")
9450 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9451 (match_operand 2 "" "")))
9452 (use (match_operand 3 "" ""))
9453 (clobber (reg:SI LR_REGNUM))]
9454 "TARGET_THUMB1 && !arm_arch5"
9457 if (!TARGET_CALLER_INTERWORKING)
9458 return thumb_call_via_reg (operands[1]);
9459 else if (operands[2] == const0_rtx)
9460 return \"bl\\t%__interwork_call_via_%1\";
9461 else if (frame_pointer_needed)
9462 return \"bl\\t%__interwork_r7_call_via_%1\";
9464 return \"bl\\t%__interwork_r11_call_via_%1\";
9466 [(set_attr "type" "call")]
9469 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
9470 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
9472 (define_insn "*call_symbol"
9473 [(call (mem:SI (match_operand:SI 0 "" ""))
9474 (match_operand 1 "" ""))
9475 (use (match_operand 2 "" ""))
9476 (clobber (reg:SI LR_REGNUM))]
9478 && !SIBLING_CALL_P (insn)
9479 && (GET_CODE (operands[0]) == SYMBOL_REF)
9480 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9483 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
9485 [(set_attr "type" "call")]
9488 (define_insn "*call_value_symbol"
9489 [(set (match_operand 0 "" "")
9490 (call (mem:SI (match_operand:SI 1 "" ""))
9491 (match_operand:SI 2 "" "")))
9492 (use (match_operand 3 "" ""))
9493 (clobber (reg:SI LR_REGNUM))]
9495 && !SIBLING_CALL_P (insn)
9496 && (GET_CODE (operands[1]) == SYMBOL_REF)
9497 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9500 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
9502 [(set_attr "type" "call")]
9505 (define_insn "*call_insn"
9506 [(call (mem:SI (match_operand:SI 0 "" ""))
9507 (match_operand:SI 1 "" ""))
9508 (use (match_operand 2 "" ""))
9509 (clobber (reg:SI LR_REGNUM))]
9511 && GET_CODE (operands[0]) == SYMBOL_REF
9512 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9514 [(set_attr "length" "4")
9515 (set_attr "type" "call")]
9518 (define_insn "*call_value_insn"
9519 [(set (match_operand 0 "" "")
9520 (call (mem:SI (match_operand 1 "" ""))
9521 (match_operand 2 "" "")))
9522 (use (match_operand 3 "" ""))
9523 (clobber (reg:SI LR_REGNUM))]
9525 && GET_CODE (operands[1]) == SYMBOL_REF
9526 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9528 [(set_attr "length" "4")
9529 (set_attr "type" "call")]
9532 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
9533 (define_expand "sibcall"
9534 [(parallel [(call (match_operand 0 "memory_operand" "")
9535 (match_operand 1 "general_operand" ""))
9537 (use (match_operand 2 "" ""))])]
9541 if (!REG_P (XEXP (operands[0], 0))
9542 && (GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF))
9543 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
9545 if (operands[2] == NULL_RTX)
9546 operands[2] = const0_rtx;
9550 (define_expand "sibcall_value"
9551 [(parallel [(set (match_operand 0 "" "")
9552 (call (match_operand 1 "memory_operand" "")
9553 (match_operand 2 "general_operand" "")))
9555 (use (match_operand 3 "" ""))])]
9559 if (!REG_P (XEXP (operands[1], 0)) &&
9560 (GET_CODE (XEXP (operands[1],0)) != SYMBOL_REF))
9561 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
9563 if (operands[3] == NULL_RTX)
9564 operands[3] = const0_rtx;
9568 (define_insn "*sibcall_insn"
9569 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
9570 (match_operand 1 "" ""))
9572 (use (match_operand 2 "" ""))]
9573 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9575 if (which_alternative == 1)
9576 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
9579 if (arm_arch5 || arm_arch4t)
9580 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
9582 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
9585 [(set_attr "type" "call")]
9588 (define_insn "*sibcall_value_insn"
9589 [(set (match_operand 0 "" "")
9590 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
9591 (match_operand 2 "" "")))
9593 (use (match_operand 3 "" ""))]
9594 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9596 if (which_alternative == 1)
9597 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
9600 if (arm_arch5 || arm_arch4t)
9601 return \"bx%?\\t%1\";
9603 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
9606 [(set_attr "type" "call")]
9609 (define_expand "<return_str>return"
9611 "(TARGET_ARM || (TARGET_THUMB2
9612 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
9613 && !IS_STACKALIGN (arm_current_func_type ())))
9614 <return_cond_false>"
9619 thumb2_expand_return (<return_simple_p>);
9626 ;; Often the return insn will be the same as loading from memory, so set attr
9627 (define_insn "*arm_return"
9629 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
9632 if (arm_ccfsm_state == 2)
9634 arm_ccfsm_state += 2;
9637 return output_return_instruction (const_true_rtx, true, false, false);
9639 [(set_attr "type" "load1")
9640 (set_attr "length" "12")
9641 (set_attr "predicable" "yes")]
9644 (define_insn "*cond_<return_str>return"
9646 (if_then_else (match_operator 0 "arm_comparison_operator"
9647 [(match_operand 1 "cc_register" "") (const_int 0)])
9650 "TARGET_ARM <return_cond_true>"
9653 if (arm_ccfsm_state == 2)
9655 arm_ccfsm_state += 2;
9658 return output_return_instruction (operands[0], true, false,
9661 [(set_attr "conds" "use")
9662 (set_attr "length" "12")
9663 (set_attr "type" "load1")]
9666 (define_insn "*cond_<return_str>return_inverted"
9668 (if_then_else (match_operator 0 "arm_comparison_operator"
9669 [(match_operand 1 "cc_register" "") (const_int 0)])
9672 "TARGET_ARM <return_cond_true>"
9675 if (arm_ccfsm_state == 2)
9677 arm_ccfsm_state += 2;
9680 return output_return_instruction (operands[0], true, true,
9683 [(set_attr "conds" "use")
9684 (set_attr "length" "12")
9685 (set_attr "type" "load1")]
9688 (define_insn "*arm_simple_return"
9693 if (arm_ccfsm_state == 2)
9695 arm_ccfsm_state += 2;
9698 return output_return_instruction (const_true_rtx, true, false, true);
9700 [(set_attr "type" "branch")
9701 (set_attr "length" "4")
9702 (set_attr "predicable" "yes")]
9705 ;; Generate a sequence of instructions to determine if the processor is
9706 ;; in 26-bit or 32-bit mode, and return the appropriate return address
9709 (define_expand "return_addr_mask"
9711 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9713 (set (match_operand:SI 0 "s_register_operand" "")
9714 (if_then_else:SI (eq (match_dup 1) (const_int 0))
9716 (const_int 67108860)))] ; 0x03fffffc
9719 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
9722 (define_insn "*check_arch2"
9723 [(set (match_operand:CC_NOOV 0 "cc_register" "")
9724 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9727 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9728 [(set_attr "length" "8")
9729 (set_attr "conds" "set")]
9732 ;; Call subroutine returning any type.
9734 (define_expand "untyped_call"
9735 [(parallel [(call (match_operand 0 "" "")
9737 (match_operand 1 "" "")
9738 (match_operand 2 "" "")])]
9743 rtx par = gen_rtx_PARALLEL (VOIDmode,
9744 rtvec_alloc (XVECLEN (operands[2], 0)));
9745 rtx addr = gen_reg_rtx (Pmode);
9749 emit_move_insn (addr, XEXP (operands[1], 0));
9750 mem = change_address (operands[1], BLKmode, addr);
9752 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9754 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9756 /* Default code only uses r0 as a return value, but we could
9757 be using anything up to 4 registers. */
9758 if (REGNO (src) == R0_REGNUM)
9759 src = gen_rtx_REG (TImode, R0_REGNUM);
9761 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9763 size += GET_MODE_SIZE (GET_MODE (src));
9766 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
9771 for (i = 0; i < XVECLEN (par, 0); i++)
9773 HOST_WIDE_INT offset = 0;
9774 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9777 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9779 mem = change_address (mem, GET_MODE (reg), NULL);
9780 if (REGNO (reg) == R0_REGNUM)
9782 /* On thumb we have to use a write-back instruction. */
9783 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9784 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9785 size = TARGET_ARM ? 16 : 0;
9789 emit_move_insn (mem, reg);
9790 size = GET_MODE_SIZE (GET_MODE (reg));
9794 /* The optimizer does not know that the call sets the function value
9795 registers we stored in the result block. We avoid problems by
9796 claiming that all hard registers are used and clobbered at this
9798 emit_insn (gen_blockage ());
9804 (define_expand "untyped_return"
9805 [(match_operand:BLK 0 "memory_operand" "")
9806 (match_operand 1 "" "")]
9811 rtx addr = gen_reg_rtx (Pmode);
9815 emit_move_insn (addr, XEXP (operands[0], 0));
9816 mem = change_address (operands[0], BLKmode, addr);
9818 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9820 HOST_WIDE_INT offset = 0;
9821 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9824 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9826 mem = change_address (mem, GET_MODE (reg), NULL);
9827 if (REGNO (reg) == R0_REGNUM)
9829 /* On thumb we have to use a write-back instruction. */
9830 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9831 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9832 size = TARGET_ARM ? 16 : 0;
9836 emit_move_insn (reg, mem);
9837 size = GET_MODE_SIZE (GET_MODE (reg));
9841 /* Emit USE insns before the return. */
9842 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9843 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9845 /* Construct the return. */
9846 expand_naked_return ();
9852 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9853 ;; all of memory. This blocks insns from being moved across this point.
9855 (define_insn "blockage"
9856 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9859 [(set_attr "length" "0")
9860 (set_attr "type" "block")]
9863 (define_expand "casesi"
9864 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
9865 (match_operand:SI 1 "const_int_operand" "") ; lower bound
9866 (match_operand:SI 2 "const_int_operand" "") ; total range
9867 (match_operand:SI 3 "" "") ; table label
9868 (match_operand:SI 4 "" "")] ; Out of range label
9869 "TARGET_32BIT || optimize_size || flag_pic"
9872 enum insn_code code;
9873 if (operands[1] != const0_rtx)
9875 rtx reg = gen_reg_rtx (SImode);
9877 emit_insn (gen_addsi3 (reg, operands[0],
9878 gen_int_mode (-INTVAL (operands[1]),
9884 code = CODE_FOR_arm_casesi_internal;
9885 else if (TARGET_THUMB1)
9886 code = CODE_FOR_thumb1_casesi_internal_pic;
9888 code = CODE_FOR_thumb2_casesi_internal_pic;
9890 code = CODE_FOR_thumb2_casesi_internal;
9892 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9893 operands[2] = force_reg (SImode, operands[2]);
9895 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9896 operands[3], operands[4]));
9901 ;; The USE in this pattern is needed to tell flow analysis that this is
9902 ;; a CASESI insn. It has no other purpose.
9903 (define_insn "arm_casesi_internal"
9904 [(parallel [(set (pc)
9906 (leu (match_operand:SI 0 "s_register_operand" "r")
9907 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9908 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9909 (label_ref (match_operand 2 "" ""))))
9910 (label_ref (match_operand 3 "" ""))))
9911 (clobber (reg:CC CC_REGNUM))
9912 (use (label_ref (match_dup 2)))])]
9916 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9917 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9919 [(set_attr "conds" "clob")
9920 (set_attr "length" "12")]
9923 (define_expand "thumb1_casesi_internal_pic"
9924 [(match_operand:SI 0 "s_register_operand" "")
9925 (match_operand:SI 1 "thumb1_cmp_operand" "")
9926 (match_operand 2 "" "")
9927 (match_operand 3 "" "")]
9931 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9932 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9934 reg0 = gen_rtx_REG (SImode, 0);
9935 emit_move_insn (reg0, operands[0]);
9936 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9941 (define_insn "thumb1_casesi_dispatch"
9942 [(parallel [(set (pc) (unspec [(reg:SI 0)
9943 (label_ref (match_operand 0 "" ""))
9944 ;; (label_ref (match_operand 1 "" ""))
9946 UNSPEC_THUMB1_CASESI))
9947 (clobber (reg:SI IP_REGNUM))
9948 (clobber (reg:SI LR_REGNUM))])]
9950 "* return thumb1_output_casesi(operands);"
9951 [(set_attr "length" "4")]
9954 (define_expand "indirect_jump"
9956 (match_operand:SI 0 "s_register_operand" ""))]
9959 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9960 address and use bx. */
9964 tmp = gen_reg_rtx (SImode);
9965 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9971 ;; NB Never uses BX.
9972 (define_insn "*arm_indirect_jump"
9974 (match_operand:SI 0 "s_register_operand" "r"))]
9976 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9977 [(set_attr "predicable" "yes")]
9980 (define_insn "*load_indirect_jump"
9982 (match_operand:SI 0 "memory_operand" "m"))]
9984 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9985 [(set_attr "type" "load1")
9986 (set_attr "pool_range" "4096")
9987 (set_attr "neg_pool_range" "4084")
9988 (set_attr "predicable" "yes")]
9991 ;; NB Never uses BX.
9992 (define_insn "*thumb1_indirect_jump"
9994 (match_operand:SI 0 "register_operand" "l*r"))]
9997 [(set_attr "conds" "clob")
9998 (set_attr "length" "2")]
10008 if (TARGET_UNIFIED_ASM)
10011 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
10012 return \"mov\\tr8, r8\";
10014 [(set (attr "length")
10015 (if_then_else (eq_attr "is_thumb" "yes")
10021 ;; Patterns to allow combination of arithmetic, cond code and shifts
10023 (define_insn "*arith_shiftsi"
10024 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10025 (match_operator:SI 1 "shiftable_operator"
10026 [(match_operator:SI 3 "shift_operator"
10027 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
10028 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
10029 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
10031 "%i1%?\\t%0, %2, %4%S3"
10032 [(set_attr "predicable" "yes")
10033 (set_attr "shift" "4")
10034 (set_attr "arch" "a,t2,t2,a")
10035 ;; Thumb2 doesn't allow the stack pointer to be used for
10036 ;; operand1 for all operations other than add and sub. In this case
10037 ;; the minus operation is a candidate for an rsub and hence needs
10039 ;; We have to make sure to disable the fourth alternative if
10040 ;; the shift_operator is MULT, since otherwise the insn will
10041 ;; also match a multiply_accumulate pattern and validate_change
10042 ;; will allow a replacement of the constant with a register
10043 ;; despite the checks done in shift_operator.
10044 (set_attr_alternative "insn_enabled"
10045 [(const_string "yes")
10047 (match_operand:SI 1 "add_operator" "")
10048 (const_string "yes") (const_string "no"))
10049 (const_string "yes")
10051 (match_operand:SI 3 "mult_operator" "")
10052 (const_string "no") (const_string "yes"))])
10053 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
10056 [(set (match_operand:SI 0 "s_register_operand" "")
10057 (match_operator:SI 1 "shiftable_operator"
10058 [(match_operator:SI 2 "shiftable_operator"
10059 [(match_operator:SI 3 "shift_operator"
10060 [(match_operand:SI 4 "s_register_operand" "")
10061 (match_operand:SI 5 "reg_or_int_operand" "")])
10062 (match_operand:SI 6 "s_register_operand" "")])
10063 (match_operand:SI 7 "arm_rhs_operand" "")]))
10064 (clobber (match_operand:SI 8 "s_register_operand" ""))]
10066 [(set (match_dup 8)
10067 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
10070 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
10073 (define_insn "*arith_shiftsi_compare0"
10074 [(set (reg:CC_NOOV CC_REGNUM)
10076 (match_operator:SI 1 "shiftable_operator"
10077 [(match_operator:SI 3 "shift_operator"
10078 [(match_operand:SI 4 "s_register_operand" "r,r")
10079 (match_operand:SI 5 "shift_amount_operand" "M,r")])
10080 (match_operand:SI 2 "s_register_operand" "r,r")])
10082 (set (match_operand:SI 0 "s_register_operand" "=r,r")
10083 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
10086 "%i1%.\\t%0, %2, %4%S3"
10087 [(set_attr "conds" "set")
10088 (set_attr "shift" "4")
10089 (set_attr "arch" "32,a")
10090 (set_attr "type" "alu_shift,alu_shift_reg")])
10092 (define_insn "*arith_shiftsi_compare0_scratch"
10093 [(set (reg:CC_NOOV CC_REGNUM)
10095 (match_operator:SI 1 "shiftable_operator"
10096 [(match_operator:SI 3 "shift_operator"
10097 [(match_operand:SI 4 "s_register_operand" "r,r")
10098 (match_operand:SI 5 "shift_amount_operand" "M,r")])
10099 (match_operand:SI 2 "s_register_operand" "r,r")])
10101 (clobber (match_scratch:SI 0 "=r,r"))]
10103 "%i1%.\\t%0, %2, %4%S3"
10104 [(set_attr "conds" "set")
10105 (set_attr "shift" "4")
10106 (set_attr "arch" "32,a")
10107 (set_attr "type" "alu_shift,alu_shift_reg")])
10109 (define_insn "*sub_shiftsi"
10110 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10111 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
10112 (match_operator:SI 2 "shift_operator"
10113 [(match_operand:SI 3 "s_register_operand" "r,r")
10114 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
10116 "sub%?\\t%0, %1, %3%S2"
10117 [(set_attr "predicable" "yes")
10118 (set_attr "shift" "3")
10119 (set_attr "arch" "32,a")
10120 (set_attr "type" "alu_shift,alu_shift_reg")])
10122 (define_insn "*sub_shiftsi_compare0"
10123 [(set (reg:CC_NOOV CC_REGNUM)
10125 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
10126 (match_operator:SI 2 "shift_operator"
10127 [(match_operand:SI 3 "s_register_operand" "r,r")
10128 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
10130 (set (match_operand:SI 0 "s_register_operand" "=r,r")
10131 (minus:SI (match_dup 1)
10132 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
10134 "sub%.\\t%0, %1, %3%S2"
10135 [(set_attr "conds" "set")
10136 (set_attr "shift" "3")
10137 (set_attr "arch" "32,a")
10138 (set_attr "type" "alu_shift,alu_shift_reg")])
10140 (define_insn "*sub_shiftsi_compare0_scratch"
10141 [(set (reg:CC_NOOV CC_REGNUM)
10143 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
10144 (match_operator:SI 2 "shift_operator"
10145 [(match_operand:SI 3 "s_register_operand" "r,r")
10146 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
10148 (clobber (match_scratch:SI 0 "=r,r"))]
10150 "sub%.\\t%0, %1, %3%S2"
10151 [(set_attr "conds" "set")
10152 (set_attr "shift" "3")
10153 (set_attr "arch" "32,a")
10154 (set_attr "type" "alu_shift,alu_shift_reg")])
10157 (define_insn_and_split "*and_scc"
10158 [(set (match_operand:SI 0 "s_register_operand" "=r")
10159 (and:SI (match_operator:SI 1 "arm_comparison_operator"
10160 [(match_operand 2 "cc_register" "") (const_int 0)])
10161 (match_operand:SI 3 "s_register_operand" "r")))]
10163 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
10164 "&& reload_completed"
10165 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
10166 (cond_exec (match_dup 4) (set (match_dup 0)
10167 (and:SI (match_dup 3) (const_int 1))))]
10169 enum machine_mode mode = GET_MODE (operands[2]);
10170 enum rtx_code rc = GET_CODE (operands[1]);
10172 /* Note that operands[4] is the same as operands[1],
10173 but with VOIDmode as the result. */
10174 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10175 if (mode == CCFPmode || mode == CCFPEmode)
10176 rc = reverse_condition_maybe_unordered (rc);
10178 rc = reverse_condition (rc);
10179 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10181 [(set_attr "conds" "use")
10182 (set_attr "insn" "mov")
10183 (set_attr "length" "8")]
10186 (define_insn_and_split "*ior_scc"
10187 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10188 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
10189 [(match_operand 2 "cc_register" "") (const_int 0)])
10190 (match_operand:SI 3 "s_register_operand" "0,?r")))]
10193 orr%d1\\t%0, %3, #1
10195 "&& reload_completed
10196 && REGNO (operands [0]) != REGNO (operands[3])"
10197 ;; && which_alternative == 1
10198 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
10199 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
10200 (cond_exec (match_dup 4) (set (match_dup 0)
10201 (ior:SI (match_dup 3) (const_int 1))))]
10203 enum machine_mode mode = GET_MODE (operands[2]);
10204 enum rtx_code rc = GET_CODE (operands[1]);
10206 /* Note that operands[4] is the same as operands[1],
10207 but with VOIDmode as the result. */
10208 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10209 if (mode == CCFPmode || mode == CCFPEmode)
10210 rc = reverse_condition_maybe_unordered (rc);
10212 rc = reverse_condition (rc);
10213 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10215 [(set_attr "conds" "use")
10216 (set_attr "length" "4,8")]
10219 ; A series of splitters for the compare_scc pattern below. Note that
10220 ; order is important.
10222 [(set (match_operand:SI 0 "s_register_operand" "")
10223 (lt:SI (match_operand:SI 1 "s_register_operand" "")
10225 (clobber (reg:CC CC_REGNUM))]
10226 "TARGET_32BIT && reload_completed"
10227 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
10230 [(set (match_operand:SI 0 "s_register_operand" "")
10231 (ge:SI (match_operand:SI 1 "s_register_operand" "")
10233 (clobber (reg:CC CC_REGNUM))]
10234 "TARGET_32BIT && reload_completed"
10235 [(set (match_dup 0) (not:SI (match_dup 1)))
10236 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
10239 [(set (match_operand:SI 0 "s_register_operand" "")
10240 (eq:SI (match_operand:SI 1 "s_register_operand" "")
10242 (clobber (reg:CC CC_REGNUM))]
10243 "TARGET_32BIT && reload_completed"
10245 [(set (reg:CC CC_REGNUM)
10246 (compare:CC (const_int 1) (match_dup 1)))
10248 (minus:SI (const_int 1) (match_dup 1)))])
10249 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
10250 (set (match_dup 0) (const_int 0)))])
10253 [(set (match_operand:SI 0 "s_register_operand" "")
10254 (ne:SI (match_operand:SI 1 "s_register_operand" "")
10255 (match_operand:SI 2 "const_int_operand" "")))
10256 (clobber (reg:CC CC_REGNUM))]
10257 "TARGET_32BIT && reload_completed"
10259 [(set (reg:CC CC_REGNUM)
10260 (compare:CC (match_dup 1) (match_dup 2)))
10261 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
10262 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
10263 (set (match_dup 0) (const_int 1)))]
10265 operands[3] = GEN_INT (-INTVAL (operands[2]));
10269 [(set (match_operand:SI 0 "s_register_operand" "")
10270 (ne:SI (match_operand:SI 1 "s_register_operand" "")
10271 (match_operand:SI 2 "arm_add_operand" "")))
10272 (clobber (reg:CC CC_REGNUM))]
10273 "TARGET_32BIT && reload_completed"
10275 [(set (reg:CC_NOOV CC_REGNUM)
10276 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
10278 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
10279 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
10280 (set (match_dup 0) (const_int 1)))])
10282 (define_insn_and_split "*compare_scc"
10283 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10284 (match_operator:SI 1 "arm_comparison_operator"
10285 [(match_operand:SI 2 "s_register_operand" "r,r")
10286 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
10287 (clobber (reg:CC CC_REGNUM))]
10290 "&& reload_completed"
10291 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
10292 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
10293 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
10296 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10297 operands[2], operands[3]);
10298 enum rtx_code rc = GET_CODE (operands[1]);
10300 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
10302 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10303 if (mode == CCFPmode || mode == CCFPEmode)
10304 rc = reverse_condition_maybe_unordered (rc);
10306 rc = reverse_condition (rc);
10307 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10310 ;; Attempt to improve the sequence generated by the compare_scc splitters
10311 ;; not to use conditional execution.
10313 [(set (reg:CC CC_REGNUM)
10314 (compare:CC (match_operand:SI 1 "register_operand" "")
10315 (match_operand:SI 2 "arm_rhs_operand" "")))
10316 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10317 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10318 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10319 (set (match_dup 0) (const_int 1)))
10320 (match_scratch:SI 3 "r")]
10323 [(set (reg:CC CC_REGNUM)
10324 (compare:CC (match_dup 1) (match_dup 2)))
10325 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
10327 [(set (reg:CC CC_REGNUM)
10328 (compare:CC (const_int 0) (match_dup 3)))
10329 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
10331 [(set (match_dup 0)
10332 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
10333 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
10334 (clobber (reg:CC CC_REGNUM))])])
10336 (define_insn "*cond_move"
10337 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10338 (if_then_else:SI (match_operator 3 "equality_operator"
10339 [(match_operator 4 "arm_comparison_operator"
10340 [(match_operand 5 "cc_register" "") (const_int 0)])
10342 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10343 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
10346 if (GET_CODE (operands[3]) == NE)
10348 if (which_alternative != 1)
10349 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
10350 if (which_alternative != 0)
10351 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
10354 if (which_alternative != 0)
10355 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10356 if (which_alternative != 1)
10357 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
10360 [(set_attr "conds" "use")
10361 (set_attr "insn" "mov")
10362 (set_attr "length" "4,4,8")]
10365 (define_insn "*cond_arith"
10366 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10367 (match_operator:SI 5 "shiftable_operator"
10368 [(match_operator:SI 4 "arm_comparison_operator"
10369 [(match_operand:SI 2 "s_register_operand" "r,r")
10370 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10371 (match_operand:SI 1 "s_register_operand" "0,?r")]))
10372 (clobber (reg:CC CC_REGNUM))]
10375 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
10376 return \"%i5\\t%0, %1, %2, lsr #31\";
10378 output_asm_insn (\"cmp\\t%2, %3\", operands);
10379 if (GET_CODE (operands[5]) == AND)
10380 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
10381 else if (GET_CODE (operands[5]) == MINUS)
10382 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
10383 else if (which_alternative != 0)
10384 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10385 return \"%i5%d4\\t%0, %1, #1\";
10387 [(set_attr "conds" "clob")
10388 (set_attr "length" "12")]
10391 (define_insn "*cond_sub"
10392 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10393 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
10394 (match_operator:SI 4 "arm_comparison_operator"
10395 [(match_operand:SI 2 "s_register_operand" "r,r")
10396 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10397 (clobber (reg:CC CC_REGNUM))]
10400 output_asm_insn (\"cmp\\t%2, %3\", operands);
10401 if (which_alternative != 0)
10402 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10403 return \"sub%d4\\t%0, %1, #1\";
10405 [(set_attr "conds" "clob")
10406 (set_attr "length" "8,12")]
10409 (define_insn "*cmp_ite0"
10410 [(set (match_operand 6 "dominant_cc_register" "")
10413 (match_operator 4 "arm_comparison_operator"
10414 [(match_operand:SI 0 "s_register_operand"
10415 "l,l,l,r,r,r,r,r,r")
10416 (match_operand:SI 1 "arm_add_operand"
10417 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10418 (match_operator:SI 5 "arm_comparison_operator"
10419 [(match_operand:SI 2 "s_register_operand"
10420 "l,r,r,l,l,r,r,r,r")
10421 (match_operand:SI 3 "arm_add_operand"
10422 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10428 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10430 {\"cmp%d5\\t%0, %1\",
10431 \"cmp%d4\\t%2, %3\"},
10432 {\"cmn%d5\\t%0, #%n1\",
10433 \"cmp%d4\\t%2, %3\"},
10434 {\"cmp%d5\\t%0, %1\",
10435 \"cmn%d4\\t%2, #%n3\"},
10436 {\"cmn%d5\\t%0, #%n1\",
10437 \"cmn%d4\\t%2, #%n3\"}
10439 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10444 \"cmn\\t%0, #%n1\"},
10445 {\"cmn\\t%2, #%n3\",
10447 {\"cmn\\t%2, #%n3\",
10448 \"cmn\\t%0, #%n1\"}
10450 static const char * const ite[2] =
10455 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10456 CMP_CMP, CMN_CMP, CMP_CMP,
10457 CMN_CMP, CMP_CMN, CMN_CMN};
10459 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10461 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10462 if (TARGET_THUMB2) {
10463 output_asm_insn (ite[swap], operands);
10465 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10468 [(set_attr "conds" "set")
10469 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10470 (set_attr_alternative "length"
10476 (if_then_else (eq_attr "is_thumb" "no")
10479 (if_then_else (eq_attr "is_thumb" "no")
10482 (if_then_else (eq_attr "is_thumb" "no")
10485 (if_then_else (eq_attr "is_thumb" "no")
10490 (define_insn "*cmp_ite1"
10491 [(set (match_operand 6 "dominant_cc_register" "")
10494 (match_operator 4 "arm_comparison_operator"
10495 [(match_operand:SI 0 "s_register_operand"
10496 "l,l,l,r,r,r,r,r,r")
10497 (match_operand:SI 1 "arm_add_operand"
10498 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10499 (match_operator:SI 5 "arm_comparison_operator"
10500 [(match_operand:SI 2 "s_register_operand"
10501 "l,r,r,l,l,r,r,r,r")
10502 (match_operand:SI 3 "arm_add_operand"
10503 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10509 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10513 {\"cmn\\t%0, #%n1\",
10516 \"cmn\\t%2, #%n3\"},
10517 {\"cmn\\t%0, #%n1\",
10518 \"cmn\\t%2, #%n3\"}
10520 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10522 {\"cmp%d4\\t%2, %3\",
10523 \"cmp%D5\\t%0, %1\"},
10524 {\"cmp%d4\\t%2, %3\",
10525 \"cmn%D5\\t%0, #%n1\"},
10526 {\"cmn%d4\\t%2, #%n3\",
10527 \"cmp%D5\\t%0, %1\"},
10528 {\"cmn%d4\\t%2, #%n3\",
10529 \"cmn%D5\\t%0, #%n1\"}
10531 static const char * const ite[2] =
10536 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10537 CMP_CMP, CMN_CMP, CMP_CMP,
10538 CMN_CMP, CMP_CMN, CMN_CMN};
10540 comparison_dominates_p (GET_CODE (operands[5]),
10541 reverse_condition (GET_CODE (operands[4])));
10543 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10544 if (TARGET_THUMB2) {
10545 output_asm_insn (ite[swap], operands);
10547 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10550 [(set_attr "conds" "set")
10551 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10552 (set_attr_alternative "length"
10558 (if_then_else (eq_attr "is_thumb" "no")
10561 (if_then_else (eq_attr "is_thumb" "no")
10564 (if_then_else (eq_attr "is_thumb" "no")
10567 (if_then_else (eq_attr "is_thumb" "no")
10572 (define_insn "*cmp_and"
10573 [(set (match_operand 6 "dominant_cc_register" "")
10576 (match_operator 4 "arm_comparison_operator"
10577 [(match_operand:SI 0 "s_register_operand"
10578 "l,l,l,r,r,r,r,r,r")
10579 (match_operand:SI 1 "arm_add_operand"
10580 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10581 (match_operator:SI 5 "arm_comparison_operator"
10582 [(match_operand:SI 2 "s_register_operand"
10583 "l,r,r,l,l,r,r,r,r")
10584 (match_operand:SI 3 "arm_add_operand"
10585 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10590 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10592 {\"cmp%d5\\t%0, %1\",
10593 \"cmp%d4\\t%2, %3\"},
10594 {\"cmn%d5\\t%0, #%n1\",
10595 \"cmp%d4\\t%2, %3\"},
10596 {\"cmp%d5\\t%0, %1\",
10597 \"cmn%d4\\t%2, #%n3\"},
10598 {\"cmn%d5\\t%0, #%n1\",
10599 \"cmn%d4\\t%2, #%n3\"}
10601 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10606 \"cmn\\t%0, #%n1\"},
10607 {\"cmn\\t%2, #%n3\",
10609 {\"cmn\\t%2, #%n3\",
10610 \"cmn\\t%0, #%n1\"}
10612 static const char *const ite[2] =
10617 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10618 CMP_CMP, CMN_CMP, CMP_CMP,
10619 CMN_CMP, CMP_CMN, CMN_CMN};
10621 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10623 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10624 if (TARGET_THUMB2) {
10625 output_asm_insn (ite[swap], operands);
10627 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10630 [(set_attr "conds" "set")
10631 (set_attr "predicable" "no")
10632 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10633 (set_attr_alternative "length"
10639 (if_then_else (eq_attr "is_thumb" "no")
10642 (if_then_else (eq_attr "is_thumb" "no")
10645 (if_then_else (eq_attr "is_thumb" "no")
10648 (if_then_else (eq_attr "is_thumb" "no")
10653 (define_insn "*cmp_ior"
10654 [(set (match_operand 6 "dominant_cc_register" "")
10657 (match_operator 4 "arm_comparison_operator"
10658 [(match_operand:SI 0 "s_register_operand"
10659 "l,l,l,r,r,r,r,r,r")
10660 (match_operand:SI 1 "arm_add_operand"
10661 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10662 (match_operator:SI 5 "arm_comparison_operator"
10663 [(match_operand:SI 2 "s_register_operand"
10664 "l,r,r,l,l,r,r,r,r")
10665 (match_operand:SI 3 "arm_add_operand"
10666 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10671 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10675 {\"cmn\\t%0, #%n1\",
10678 \"cmn\\t%2, #%n3\"},
10679 {\"cmn\\t%0, #%n1\",
10680 \"cmn\\t%2, #%n3\"}
10682 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10684 {\"cmp%D4\\t%2, %3\",
10685 \"cmp%D5\\t%0, %1\"},
10686 {\"cmp%D4\\t%2, %3\",
10687 \"cmn%D5\\t%0, #%n1\"},
10688 {\"cmn%D4\\t%2, #%n3\",
10689 \"cmp%D5\\t%0, %1\"},
10690 {\"cmn%D4\\t%2, #%n3\",
10691 \"cmn%D5\\t%0, #%n1\"}
10693 static const char *const ite[2] =
10698 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10699 CMP_CMP, CMN_CMP, CMP_CMP,
10700 CMN_CMP, CMP_CMN, CMN_CMN};
10702 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10704 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10705 if (TARGET_THUMB2) {
10706 output_asm_insn (ite[swap], operands);
10708 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10712 [(set_attr "conds" "set")
10713 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10714 (set_attr_alternative "length"
10720 (if_then_else (eq_attr "is_thumb" "no")
10723 (if_then_else (eq_attr "is_thumb" "no")
10726 (if_then_else (eq_attr "is_thumb" "no")
10729 (if_then_else (eq_attr "is_thumb" "no")
10734 (define_insn_and_split "*ior_scc_scc"
10735 [(set (match_operand:SI 0 "s_register_operand" "=Ts")
10736 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10737 [(match_operand:SI 1 "s_register_operand" "r")
10738 (match_operand:SI 2 "arm_add_operand" "rIL")])
10739 (match_operator:SI 6 "arm_comparison_operator"
10740 [(match_operand:SI 4 "s_register_operand" "r")
10741 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10742 (clobber (reg:CC CC_REGNUM))]
10744 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10747 "TARGET_32BIT && reload_completed"
10748 [(set (match_dup 7)
10751 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10752 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10754 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10756 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10759 [(set_attr "conds" "clob")
10760 (set_attr "length" "16")])
10762 ; If the above pattern is followed by a CMP insn, then the compare is
10763 ; redundant, since we can rework the conditional instruction that follows.
10764 (define_insn_and_split "*ior_scc_scc_cmp"
10765 [(set (match_operand 0 "dominant_cc_register" "")
10766 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10767 [(match_operand:SI 1 "s_register_operand" "r")
10768 (match_operand:SI 2 "arm_add_operand" "rIL")])
10769 (match_operator:SI 6 "arm_comparison_operator"
10770 [(match_operand:SI 4 "s_register_operand" "r")
10771 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10773 (set (match_operand:SI 7 "s_register_operand" "=Ts")
10774 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10775 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10778 "TARGET_32BIT && reload_completed"
10779 [(set (match_dup 0)
10782 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10783 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10785 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10787 [(set_attr "conds" "set")
10788 (set_attr "length" "16")])
10790 (define_insn_and_split "*and_scc_scc"
10791 [(set (match_operand:SI 0 "s_register_operand" "=Ts")
10792 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10793 [(match_operand:SI 1 "s_register_operand" "r")
10794 (match_operand:SI 2 "arm_add_operand" "rIL")])
10795 (match_operator:SI 6 "arm_comparison_operator"
10796 [(match_operand:SI 4 "s_register_operand" "r")
10797 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10798 (clobber (reg:CC CC_REGNUM))]
10800 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10803 "TARGET_32BIT && reload_completed
10804 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10806 [(set (match_dup 7)
10809 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10810 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10812 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10814 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10817 [(set_attr "conds" "clob")
10818 (set_attr "length" "16")])
10820 ; If the above pattern is followed by a CMP insn, then the compare is
10821 ; redundant, since we can rework the conditional instruction that follows.
10822 (define_insn_and_split "*and_scc_scc_cmp"
10823 [(set (match_operand 0 "dominant_cc_register" "")
10824 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10825 [(match_operand:SI 1 "s_register_operand" "r")
10826 (match_operand:SI 2 "arm_add_operand" "rIL")])
10827 (match_operator:SI 6 "arm_comparison_operator"
10828 [(match_operand:SI 4 "s_register_operand" "r")
10829 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10831 (set (match_operand:SI 7 "s_register_operand" "=Ts")
10832 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10833 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10836 "TARGET_32BIT && reload_completed"
10837 [(set (match_dup 0)
10840 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10841 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10843 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10845 [(set_attr "conds" "set")
10846 (set_attr "length" "16")])
10848 ;; If there is no dominance in the comparison, then we can still save an
10849 ;; instruction in the AND case, since we can know that the second compare
10850 ;; need only zero the value if false (if true, then the value is already
10852 (define_insn_and_split "*and_scc_scc_nodom"
10853 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10854 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10855 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10856 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10857 (match_operator:SI 6 "arm_comparison_operator"
10858 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10859 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10860 (clobber (reg:CC CC_REGNUM))]
10862 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10865 "TARGET_32BIT && reload_completed"
10866 [(parallel [(set (match_dup 0)
10867 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10868 (clobber (reg:CC CC_REGNUM))])
10869 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10871 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10874 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10875 operands[4], operands[5]),
10877 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10879 [(set_attr "conds" "clob")
10880 (set_attr "length" "20")])
10883 [(set (reg:CC_NOOV CC_REGNUM)
10884 (compare:CC_NOOV (ior:SI
10885 (and:SI (match_operand:SI 0 "s_register_operand" "")
10887 (match_operator:SI 1 "arm_comparison_operator"
10888 [(match_operand:SI 2 "s_register_operand" "")
10889 (match_operand:SI 3 "arm_add_operand" "")]))
10891 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10893 [(set (match_dup 4)
10894 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10896 (set (reg:CC_NOOV CC_REGNUM)
10897 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10902 [(set (reg:CC_NOOV CC_REGNUM)
10903 (compare:CC_NOOV (ior:SI
10904 (match_operator:SI 1 "arm_comparison_operator"
10905 [(match_operand:SI 2 "s_register_operand" "")
10906 (match_operand:SI 3 "arm_add_operand" "")])
10907 (and:SI (match_operand:SI 0 "s_register_operand" "")
10910 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10912 [(set (match_dup 4)
10913 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10915 (set (reg:CC_NOOV CC_REGNUM)
10916 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10919 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10921 (define_insn_and_split "*negscc"
10922 [(set (match_operand:SI 0 "s_register_operand" "=r")
10923 (neg:SI (match_operator 3 "arm_comparison_operator"
10924 [(match_operand:SI 1 "s_register_operand" "r")
10925 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10926 (clobber (reg:CC CC_REGNUM))]
10929 "&& reload_completed"
10932 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10934 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10936 /* Emit mov\\t%0, %1, asr #31 */
10937 emit_insn (gen_rtx_SET (VOIDmode,
10939 gen_rtx_ASHIFTRT (SImode,
10944 else if (GET_CODE (operands[3]) == NE)
10946 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10947 if (CONST_INT_P (operands[2]))
10948 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10949 GEN_INT (- INTVAL (operands[2]))));
10951 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10953 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10954 gen_rtx_NE (SImode,
10957 gen_rtx_SET (SImode,
10964 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10965 emit_insn (gen_rtx_SET (VOIDmode,
10967 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10968 enum rtx_code rc = GET_CODE (operands[3]);
10970 rc = reverse_condition (rc);
10971 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10972 gen_rtx_fmt_ee (rc,
10976 gen_rtx_SET (VOIDmode, operands[0], const0_rtx)));
10977 rc = GET_CODE (operands[3]);
10978 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10979 gen_rtx_fmt_ee (rc,
10983 gen_rtx_SET (VOIDmode,
10990 [(set_attr "conds" "clob")
10991 (set_attr "length" "12")]
10994 (define_insn "movcond"
10995 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10997 (match_operator 5 "arm_comparison_operator"
10998 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10999 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
11000 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
11001 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
11002 (clobber (reg:CC CC_REGNUM))]
11005 if (GET_CODE (operands[5]) == LT
11006 && (operands[4] == const0_rtx))
11008 if (which_alternative != 1 && REG_P (operands[1]))
11010 if (operands[2] == const0_rtx)
11011 return \"and\\t%0, %1, %3, asr #31\";
11012 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
11014 else if (which_alternative != 0 && REG_P (operands[2]))
11016 if (operands[1] == const0_rtx)
11017 return \"bic\\t%0, %2, %3, asr #31\";
11018 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
11020 /* The only case that falls through to here is when both ops 1 & 2
11024 if (GET_CODE (operands[5]) == GE
11025 && (operands[4] == const0_rtx))
11027 if (which_alternative != 1 && REG_P (operands[1]))
11029 if (operands[2] == const0_rtx)
11030 return \"bic\\t%0, %1, %3, asr #31\";
11031 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
11033 else if (which_alternative != 0 && REG_P (operands[2]))
11035 if (operands[1] == const0_rtx)
11036 return \"and\\t%0, %2, %3, asr #31\";
11037 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
11039 /* The only case that falls through to here is when both ops 1 & 2
11042 if (CONST_INT_P (operands[4])
11043 && !const_ok_for_arm (INTVAL (operands[4])))
11044 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
11046 output_asm_insn (\"cmp\\t%3, %4\", operands);
11047 if (which_alternative != 0)
11048 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
11049 if (which_alternative != 1)
11050 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
11053 [(set_attr "conds" "clob")
11054 (set_attr "length" "8,8,12")]
11057 ;; ??? The patterns below need checking for Thumb-2 usefulness.
11059 (define_insn "*ifcompare_plus_move"
11060 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11061 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11062 [(match_operand:SI 4 "s_register_operand" "r,r")
11063 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11065 (match_operand:SI 2 "s_register_operand" "r,r")
11066 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
11067 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
11068 (clobber (reg:CC CC_REGNUM))]
11071 [(set_attr "conds" "clob")
11072 (set_attr "length" "8,12")]
11075 (define_insn "*if_plus_move"
11076 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
11078 (match_operator 4 "arm_comparison_operator"
11079 [(match_operand 5 "cc_register" "") (const_int 0)])
11081 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
11082 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
11083 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
11086 add%d4\\t%0, %2, %3
11087 sub%d4\\t%0, %2, #%n3
11088 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
11089 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
11090 [(set_attr "conds" "use")
11091 (set_attr "length" "4,4,8,8")
11092 (set_attr_alternative "type"
11093 [(if_then_else (match_operand 3 "const_int_operand" "")
11094 (const_string "simple_alu_imm" )
11095 (const_string "*"))
11096 (const_string "simple_alu_imm")
11098 (const_string "*")])]
11101 (define_insn "*ifcompare_move_plus"
11102 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11103 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11104 [(match_operand:SI 4 "s_register_operand" "r,r")
11105 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11106 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11108 (match_operand:SI 2 "s_register_operand" "r,r")
11109 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
11110 (clobber (reg:CC CC_REGNUM))]
11113 [(set_attr "conds" "clob")
11114 (set_attr "length" "8,12")]
11117 (define_insn "*if_move_plus"
11118 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
11120 (match_operator 4 "arm_comparison_operator"
11121 [(match_operand 5 "cc_register" "") (const_int 0)])
11122 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
11124 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
11125 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
11128 add%D4\\t%0, %2, %3
11129 sub%D4\\t%0, %2, #%n3
11130 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
11131 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
11132 [(set_attr "conds" "use")
11133 (set_attr "length" "4,4,8,8")
11134 (set_attr_alternative "type"
11135 [(if_then_else (match_operand 3 "const_int_operand" "")
11136 (const_string "simple_alu_imm" )
11137 (const_string "*"))
11138 (const_string "simple_alu_imm")
11140 (const_string "*")])]
11143 (define_insn "*ifcompare_arith_arith"
11144 [(set (match_operand:SI 0 "s_register_operand" "=r")
11145 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
11146 [(match_operand:SI 5 "s_register_operand" "r")
11147 (match_operand:SI 6 "arm_add_operand" "rIL")])
11148 (match_operator:SI 8 "shiftable_operator"
11149 [(match_operand:SI 1 "s_register_operand" "r")
11150 (match_operand:SI 2 "arm_rhs_operand" "rI")])
11151 (match_operator:SI 7 "shiftable_operator"
11152 [(match_operand:SI 3 "s_register_operand" "r")
11153 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
11154 (clobber (reg:CC CC_REGNUM))]
11157 [(set_attr "conds" "clob")
11158 (set_attr "length" "12")]
11161 (define_insn "*if_arith_arith"
11162 [(set (match_operand:SI 0 "s_register_operand" "=r")
11163 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
11164 [(match_operand 8 "cc_register" "") (const_int 0)])
11165 (match_operator:SI 6 "shiftable_operator"
11166 [(match_operand:SI 1 "s_register_operand" "r")
11167 (match_operand:SI 2 "arm_rhs_operand" "rI")])
11168 (match_operator:SI 7 "shiftable_operator"
11169 [(match_operand:SI 3 "s_register_operand" "r")
11170 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
11172 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
11173 [(set_attr "conds" "use")
11174 (set_attr "length" "8")]
11177 (define_insn "*ifcompare_arith_move"
11178 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11179 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11180 [(match_operand:SI 2 "s_register_operand" "r,r")
11181 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
11182 (match_operator:SI 7 "shiftable_operator"
11183 [(match_operand:SI 4 "s_register_operand" "r,r")
11184 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
11185 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
11186 (clobber (reg:CC CC_REGNUM))]
11189 /* If we have an operation where (op x 0) is the identity operation and
11190 the conditional operator is LT or GE and we are comparing against zero and
11191 everything is in registers then we can do this in two instructions. */
11192 if (operands[3] == const0_rtx
11193 && GET_CODE (operands[7]) != AND
11194 && REG_P (operands[5])
11195 && REG_P (operands[1])
11196 && REGNO (operands[1]) == REGNO (operands[4])
11197 && REGNO (operands[4]) != REGNO (operands[0]))
11199 if (GET_CODE (operands[6]) == LT)
11200 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
11201 else if (GET_CODE (operands[6]) == GE)
11202 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
11204 if (CONST_INT_P (operands[3])
11205 && !const_ok_for_arm (INTVAL (operands[3])))
11206 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
11208 output_asm_insn (\"cmp\\t%2, %3\", operands);
11209 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
11210 if (which_alternative != 0)
11211 return \"mov%D6\\t%0, %1\";
11214 [(set_attr "conds" "clob")
11215 (set_attr "length" "8,12")]
11218 (define_insn "*if_arith_move"
11219 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11220 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11221 [(match_operand 6 "cc_register" "") (const_int 0)])
11222 (match_operator:SI 5 "shiftable_operator"
11223 [(match_operand:SI 2 "s_register_operand" "r,r")
11224 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
11225 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
11228 %I5%d4\\t%0, %2, %3
11229 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
11230 [(set_attr "conds" "use")
11231 (set_attr "length" "4,8")
11232 (set_attr "type" "*,*")]
11235 (define_insn "*ifcompare_move_arith"
11236 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11237 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11238 [(match_operand:SI 4 "s_register_operand" "r,r")
11239 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11240 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11241 (match_operator:SI 7 "shiftable_operator"
11242 [(match_operand:SI 2 "s_register_operand" "r,r")
11243 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
11244 (clobber (reg:CC CC_REGNUM))]
11247 /* If we have an operation where (op x 0) is the identity operation and
11248 the conditional operator is LT or GE and we are comparing against zero and
11249 everything is in registers then we can do this in two instructions */
11250 if (operands[5] == const0_rtx
11251 && GET_CODE (operands[7]) != AND
11252 && REG_P (operands[3])
11253 && REG_P (operands[1])
11254 && REGNO (operands[1]) == REGNO (operands[2])
11255 && REGNO (operands[2]) != REGNO (operands[0]))
11257 if (GET_CODE (operands[6]) == GE)
11258 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11259 else if (GET_CODE (operands[6]) == LT)
11260 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11263 if (CONST_INT_P (operands[5])
11264 && !const_ok_for_arm (INTVAL (operands[5])))
11265 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
11267 output_asm_insn (\"cmp\\t%4, %5\", operands);
11269 if (which_alternative != 0)
11270 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
11271 return \"%I7%D6\\t%0, %2, %3\";
11273 [(set_attr "conds" "clob")
11274 (set_attr "length" "8,12")]
11277 (define_insn "*if_move_arith"
11278 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11280 (match_operator 4 "arm_comparison_operator"
11281 [(match_operand 6 "cc_register" "") (const_int 0)])
11282 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11283 (match_operator:SI 5 "shiftable_operator"
11284 [(match_operand:SI 2 "s_register_operand" "r,r")
11285 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
11288 %I5%D4\\t%0, %2, %3
11289 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
11290 [(set_attr "conds" "use")
11291 (set_attr "length" "4,8")
11292 (set_attr "type" "*,*")]
11295 (define_insn "*ifcompare_move_not"
11296 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11298 (match_operator 5 "arm_comparison_operator"
11299 [(match_operand:SI 3 "s_register_operand" "r,r")
11300 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11301 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11303 (match_operand:SI 2 "s_register_operand" "r,r"))))
11304 (clobber (reg:CC CC_REGNUM))]
11307 [(set_attr "conds" "clob")
11308 (set_attr "length" "8,12")]
11311 (define_insn "*if_move_not"
11312 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11314 (match_operator 4 "arm_comparison_operator"
11315 [(match_operand 3 "cc_register" "") (const_int 0)])
11316 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11317 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11321 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
11322 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
11323 [(set_attr "conds" "use")
11324 (set_attr "insn" "mvn")
11325 (set_attr "length" "4,8,8")]
11328 (define_insn "*ifcompare_not_move"
11329 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11331 (match_operator 5 "arm_comparison_operator"
11332 [(match_operand:SI 3 "s_register_operand" "r,r")
11333 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11335 (match_operand:SI 2 "s_register_operand" "r,r"))
11336 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11337 (clobber (reg:CC CC_REGNUM))]
11340 [(set_attr "conds" "clob")
11341 (set_attr "length" "8,12")]
11344 (define_insn "*if_not_move"
11345 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11347 (match_operator 4 "arm_comparison_operator"
11348 [(match_operand 3 "cc_register" "") (const_int 0)])
11349 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11350 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11354 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11355 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11356 [(set_attr "conds" "use")
11357 (set_attr "insn" "mvn")
11358 (set_attr "length" "4,8,8")]
11361 (define_insn "*ifcompare_shift_move"
11362 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11364 (match_operator 6 "arm_comparison_operator"
11365 [(match_operand:SI 4 "s_register_operand" "r,r")
11366 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11367 (match_operator:SI 7 "shift_operator"
11368 [(match_operand:SI 2 "s_register_operand" "r,r")
11369 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11370 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11371 (clobber (reg:CC CC_REGNUM))]
11374 [(set_attr "conds" "clob")
11375 (set_attr "length" "8,12")]
11378 (define_insn "*if_shift_move"
11379 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11381 (match_operator 5 "arm_comparison_operator"
11382 [(match_operand 6 "cc_register" "") (const_int 0)])
11383 (match_operator:SI 4 "shift_operator"
11384 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11385 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11386 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11390 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11391 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11392 [(set_attr "conds" "use")
11393 (set_attr "shift" "2")
11394 (set_attr "length" "4,8,8")
11395 (set_attr "insn" "mov")
11396 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11397 (const_string "alu_shift")
11398 (const_string "alu_shift_reg")))]
11401 (define_insn "*ifcompare_move_shift"
11402 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11404 (match_operator 6 "arm_comparison_operator"
11405 [(match_operand:SI 4 "s_register_operand" "r,r")
11406 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11407 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11408 (match_operator:SI 7 "shift_operator"
11409 [(match_operand:SI 2 "s_register_operand" "r,r")
11410 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11411 (clobber (reg:CC CC_REGNUM))]
11414 [(set_attr "conds" "clob")
11415 (set_attr "length" "8,12")]
11418 (define_insn "*if_move_shift"
11419 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11421 (match_operator 5 "arm_comparison_operator"
11422 [(match_operand 6 "cc_register" "") (const_int 0)])
11423 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11424 (match_operator:SI 4 "shift_operator"
11425 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11426 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11430 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11431 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11432 [(set_attr "conds" "use")
11433 (set_attr "shift" "2")
11434 (set_attr "length" "4,8,8")
11435 (set_attr "insn" "mov")
11436 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11437 (const_string "alu_shift")
11438 (const_string "alu_shift_reg")))]
11441 (define_insn "*ifcompare_shift_shift"
11442 [(set (match_operand:SI 0 "s_register_operand" "=r")
11444 (match_operator 7 "arm_comparison_operator"
11445 [(match_operand:SI 5 "s_register_operand" "r")
11446 (match_operand:SI 6 "arm_add_operand" "rIL")])
11447 (match_operator:SI 8 "shift_operator"
11448 [(match_operand:SI 1 "s_register_operand" "r")
11449 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11450 (match_operator:SI 9 "shift_operator"
11451 [(match_operand:SI 3 "s_register_operand" "r")
11452 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11453 (clobber (reg:CC CC_REGNUM))]
11456 [(set_attr "conds" "clob")
11457 (set_attr "length" "12")]
11460 (define_insn "*if_shift_shift"
11461 [(set (match_operand:SI 0 "s_register_operand" "=r")
11463 (match_operator 5 "arm_comparison_operator"
11464 [(match_operand 8 "cc_register" "") (const_int 0)])
11465 (match_operator:SI 6 "shift_operator"
11466 [(match_operand:SI 1 "s_register_operand" "r")
11467 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11468 (match_operator:SI 7 "shift_operator"
11469 [(match_operand:SI 3 "s_register_operand" "r")
11470 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11472 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11473 [(set_attr "conds" "use")
11474 (set_attr "shift" "1")
11475 (set_attr "length" "8")
11476 (set_attr "insn" "mov")
11477 (set (attr "type") (if_then_else
11478 (and (match_operand 2 "const_int_operand" "")
11479 (match_operand 4 "const_int_operand" ""))
11480 (const_string "alu_shift")
11481 (const_string "alu_shift_reg")))]
11484 (define_insn "*ifcompare_not_arith"
11485 [(set (match_operand:SI 0 "s_register_operand" "=r")
11487 (match_operator 6 "arm_comparison_operator"
11488 [(match_operand:SI 4 "s_register_operand" "r")
11489 (match_operand:SI 5 "arm_add_operand" "rIL")])
11490 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11491 (match_operator:SI 7 "shiftable_operator"
11492 [(match_operand:SI 2 "s_register_operand" "r")
11493 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11494 (clobber (reg:CC CC_REGNUM))]
11497 [(set_attr "conds" "clob")
11498 (set_attr "length" "12")]
11501 (define_insn "*if_not_arith"
11502 [(set (match_operand:SI 0 "s_register_operand" "=r")
11504 (match_operator 5 "arm_comparison_operator"
11505 [(match_operand 4 "cc_register" "") (const_int 0)])
11506 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11507 (match_operator:SI 6 "shiftable_operator"
11508 [(match_operand:SI 2 "s_register_operand" "r")
11509 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11511 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11512 [(set_attr "conds" "use")
11513 (set_attr "insn" "mvn")
11514 (set_attr "length" "8")]
11517 (define_insn "*ifcompare_arith_not"
11518 [(set (match_operand:SI 0 "s_register_operand" "=r")
11520 (match_operator 6 "arm_comparison_operator"
11521 [(match_operand:SI 4 "s_register_operand" "r")
11522 (match_operand:SI 5 "arm_add_operand" "rIL")])
11523 (match_operator:SI 7 "shiftable_operator"
11524 [(match_operand:SI 2 "s_register_operand" "r")
11525 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11526 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11527 (clobber (reg:CC CC_REGNUM))]
11530 [(set_attr "conds" "clob")
11531 (set_attr "length" "12")]
11534 (define_insn "*if_arith_not"
11535 [(set (match_operand:SI 0 "s_register_operand" "=r")
11537 (match_operator 5 "arm_comparison_operator"
11538 [(match_operand 4 "cc_register" "") (const_int 0)])
11539 (match_operator:SI 6 "shiftable_operator"
11540 [(match_operand:SI 2 "s_register_operand" "r")
11541 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11542 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11544 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11545 [(set_attr "conds" "use")
11546 (set_attr "insn" "mvn")
11547 (set_attr "length" "8")]
11550 (define_insn "*ifcompare_neg_move"
11551 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11553 (match_operator 5 "arm_comparison_operator"
11554 [(match_operand:SI 3 "s_register_operand" "r,r")
11555 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11556 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11557 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11558 (clobber (reg:CC CC_REGNUM))]
11561 [(set_attr "conds" "clob")
11562 (set_attr "length" "8,12")]
11565 (define_insn "*if_neg_move"
11566 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11568 (match_operator 4 "arm_comparison_operator"
11569 [(match_operand 3 "cc_register" "") (const_int 0)])
11570 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11571 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11574 rsb%d4\\t%0, %2, #0
11575 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
11576 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
11577 [(set_attr "conds" "use")
11578 (set_attr "length" "4,8,8")]
11581 (define_insn "*ifcompare_move_neg"
11582 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11584 (match_operator 5 "arm_comparison_operator"
11585 [(match_operand:SI 3 "s_register_operand" "r,r")
11586 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11587 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11588 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11589 (clobber (reg:CC CC_REGNUM))]
11592 [(set_attr "conds" "clob")
11593 (set_attr "length" "8,12")]
11596 (define_insn "*if_move_neg"
11597 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11599 (match_operator 4 "arm_comparison_operator"
11600 [(match_operand 3 "cc_register" "") (const_int 0)])
11601 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11602 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11605 rsb%D4\\t%0, %2, #0
11606 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
11607 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
11608 [(set_attr "conds" "use")
11609 (set_attr "length" "4,8,8")]
11612 (define_insn "*arith_adjacentmem"
11613 [(set (match_operand:SI 0 "s_register_operand" "=r")
11614 (match_operator:SI 1 "shiftable_operator"
11615 [(match_operand:SI 2 "memory_operand" "m")
11616 (match_operand:SI 3 "memory_operand" "m")]))
11617 (clobber (match_scratch:SI 4 "=r"))]
11618 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11624 HOST_WIDE_INT val1 = 0, val2 = 0;
11626 if (REGNO (operands[0]) > REGNO (operands[4]))
11628 ldm[1] = operands[4];
11629 ldm[2] = operands[0];
11633 ldm[1] = operands[0];
11634 ldm[2] = operands[4];
11637 base_reg = XEXP (operands[2], 0);
11639 if (!REG_P (base_reg))
11641 val1 = INTVAL (XEXP (base_reg, 1));
11642 base_reg = XEXP (base_reg, 0);
11645 if (!REG_P (XEXP (operands[3], 0)))
11646 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11648 arith[0] = operands[0];
11649 arith[3] = operands[1];
11663 if (val1 !=0 && val2 != 0)
11667 if (val1 == 4 || val2 == 4)
11668 /* Other val must be 8, since we know they are adjacent and neither
11670 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
11671 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11673 ldm[0] = ops[0] = operands[4];
11675 ops[2] = GEN_INT (val1);
11676 output_add_immediate (ops);
11678 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11680 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11684 /* Offset is out of range for a single add, so use two ldr. */
11687 ops[2] = GEN_INT (val1);
11688 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11690 ops[2] = GEN_INT (val2);
11691 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11694 else if (val1 != 0)
11697 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11699 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11704 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11706 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11708 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11711 [(set_attr "length" "12")
11712 (set_attr "predicable" "yes")
11713 (set_attr "type" "load1")]
11716 ; This pattern is never tried by combine, so do it as a peephole
11719 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11720 (match_operand:SI 1 "arm_general_register_operand" ""))
11721 (set (reg:CC CC_REGNUM)
11722 (compare:CC (match_dup 1) (const_int 0)))]
11724 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11725 (set (match_dup 0) (match_dup 1))])]
11730 [(set (match_operand:SI 0 "s_register_operand" "")
11731 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11733 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11734 [(match_operand:SI 3 "s_register_operand" "")
11735 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11736 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11738 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11739 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11744 ;; This split can be used because CC_Z mode implies that the following
11745 ;; branch will be an equality, or an unsigned inequality, so the sign
11746 ;; extension is not needed.
11749 [(set (reg:CC_Z CC_REGNUM)
11751 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11753 (match_operand 1 "const_int_operand" "")))
11754 (clobber (match_scratch:SI 2 ""))]
11756 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
11757 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
11758 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11759 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11761 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11764 ;; ??? Check the patterns above for Thumb-2 usefulness
11766 (define_expand "prologue"
11767 [(clobber (const_int 0))]
11770 arm_expand_prologue ();
11772 thumb1_expand_prologue ();
11777 (define_expand "epilogue"
11778 [(clobber (const_int 0))]
11781 if (crtl->calls_eh_return)
11782 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11785 thumb1_expand_epilogue ();
11786 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11787 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11789 else if (HAVE_return)
11791 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11792 no need for explicit testing again. */
11793 emit_jump_insn (gen_return ());
11795 else if (TARGET_32BIT)
11797 arm_expand_epilogue (true);
11803 (define_insn "prologue_thumb1_interwork"
11804 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
11806 "* return thumb1_output_interwork ();"
11807 [(set_attr "length" "8")]
11810 ;; Note - although unspec_volatile's USE all hard registers,
11811 ;; USEs are ignored after relaod has completed. Thus we need
11812 ;; to add an unspec of the link register to ensure that flow
11813 ;; does not think that it is unused by the sibcall branch that
11814 ;; will replace the standard function epilogue.
11815 (define_expand "sibcall_epilogue"
11816 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11817 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11820 arm_expand_epilogue (false);
11825 (define_insn "*epilogue_insns"
11826 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
11829 return thumb1_unexpanded_epilogue ();
11831 ; Length is absolute worst case
11832 [(set_attr "length" "44")
11833 (set_attr "type" "block")
11834 ;; We don't clobber the conditions, but the potential length of this
11835 ;; operation is sufficient to make conditionalizing the sequence
11836 ;; unlikely to be profitable.
11837 (set_attr "conds" "clob")]
11840 (define_expand "eh_epilogue"
11841 [(use (match_operand:SI 0 "register_operand" ""))
11842 (use (match_operand:SI 1 "register_operand" ""))
11843 (use (match_operand:SI 2 "register_operand" ""))]
11847 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11848 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11850 rtx ra = gen_rtx_REG (Pmode, 2);
11852 emit_move_insn (ra, operands[2]);
11855 /* This is a hack -- we may have crystalized the function type too
11857 cfun->machine->func_type = 0;
11861 ;; This split is only used during output to reduce the number of patterns
11862 ;; that need assembler instructions adding to them. We allowed the setting
11863 ;; of the conditions to be implicit during rtl generation so that
11864 ;; the conditional compare patterns would work. However this conflicts to
11865 ;; some extent with the conditional data operations, so we have to split them
11868 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11869 ;; conditional execution sufficient?
11872 [(set (match_operand:SI 0 "s_register_operand" "")
11873 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11874 [(match_operand 2 "" "") (match_operand 3 "" "")])
11876 (match_operand 4 "" "")))
11877 (clobber (reg:CC CC_REGNUM))]
11878 "TARGET_ARM && reload_completed"
11879 [(set (match_dup 5) (match_dup 6))
11880 (cond_exec (match_dup 7)
11881 (set (match_dup 0) (match_dup 4)))]
11884 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11885 operands[2], operands[3]);
11886 enum rtx_code rc = GET_CODE (operands[1]);
11888 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11889 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11890 if (mode == CCFPmode || mode == CCFPEmode)
11891 rc = reverse_condition_maybe_unordered (rc);
11893 rc = reverse_condition (rc);
11895 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11900 [(set (match_operand:SI 0 "s_register_operand" "")
11901 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11902 [(match_operand 2 "" "") (match_operand 3 "" "")])
11903 (match_operand 4 "" "")
11905 (clobber (reg:CC CC_REGNUM))]
11906 "TARGET_ARM && reload_completed"
11907 [(set (match_dup 5) (match_dup 6))
11908 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11909 (set (match_dup 0) (match_dup 4)))]
11912 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11913 operands[2], operands[3]);
11915 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11916 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11921 [(set (match_operand:SI 0 "s_register_operand" "")
11922 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11923 [(match_operand 2 "" "") (match_operand 3 "" "")])
11924 (match_operand 4 "" "")
11925 (match_operand 5 "" "")))
11926 (clobber (reg:CC CC_REGNUM))]
11927 "TARGET_ARM && reload_completed"
11928 [(set (match_dup 6) (match_dup 7))
11929 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11930 (set (match_dup 0) (match_dup 4)))
11931 (cond_exec (match_dup 8)
11932 (set (match_dup 0) (match_dup 5)))]
11935 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11936 operands[2], operands[3]);
11937 enum rtx_code rc = GET_CODE (operands[1]);
11939 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11940 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11941 if (mode == CCFPmode || mode == CCFPEmode)
11942 rc = reverse_condition_maybe_unordered (rc);
11944 rc = reverse_condition (rc);
11946 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11951 [(set (match_operand:SI 0 "s_register_operand" "")
11952 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11953 [(match_operand:SI 2 "s_register_operand" "")
11954 (match_operand:SI 3 "arm_add_operand" "")])
11955 (match_operand:SI 4 "arm_rhs_operand" "")
11957 (match_operand:SI 5 "s_register_operand" ""))))
11958 (clobber (reg:CC CC_REGNUM))]
11959 "TARGET_ARM && reload_completed"
11960 [(set (match_dup 6) (match_dup 7))
11961 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11962 (set (match_dup 0) (match_dup 4)))
11963 (cond_exec (match_dup 8)
11964 (set (match_dup 0) (not:SI (match_dup 5))))]
11967 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11968 operands[2], operands[3]);
11969 enum rtx_code rc = GET_CODE (operands[1]);
11971 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11972 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11973 if (mode == CCFPmode || mode == CCFPEmode)
11974 rc = reverse_condition_maybe_unordered (rc);
11976 rc = reverse_condition (rc);
11978 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11982 (define_insn "*cond_move_not"
11983 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11984 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11985 [(match_operand 3 "cc_register" "") (const_int 0)])
11986 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11988 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11992 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11993 [(set_attr "conds" "use")
11994 (set_attr "insn" "mvn")
11995 (set_attr "length" "4,8")]
11998 ;; The next two patterns occur when an AND operation is followed by a
11999 ;; scc insn sequence
12001 (define_insn "*sign_extract_onebit"
12002 [(set (match_operand:SI 0 "s_register_operand" "=r")
12003 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12005 (match_operand:SI 2 "const_int_operand" "n")))
12006 (clobber (reg:CC CC_REGNUM))]
12009 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
12010 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
12011 return \"mvnne\\t%0, #0\";
12013 [(set_attr "conds" "clob")
12014 (set_attr "length" "8")]
12017 (define_insn "*not_signextract_onebit"
12018 [(set (match_operand:SI 0 "s_register_operand" "=r")
12020 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12022 (match_operand:SI 2 "const_int_operand" "n"))))
12023 (clobber (reg:CC CC_REGNUM))]
12026 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
12027 output_asm_insn (\"tst\\t%1, %2\", operands);
12028 output_asm_insn (\"mvneq\\t%0, #0\", operands);
12029 return \"movne\\t%0, #0\";
12031 [(set_attr "conds" "clob")
12032 (set_attr "length" "12")]
12034 ;; ??? The above patterns need auditing for Thumb-2
12036 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
12037 ;; expressions. For simplicity, the first register is also in the unspec
12039 ;; To avoid the usage of GNU extension, the length attribute is computed
12040 ;; in a C function arm_attr_length_push_multi.
12041 (define_insn "*push_multi"
12042 [(match_parallel 2 "multi_register_push"
12043 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
12044 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
12045 UNSPEC_PUSH_MULT))])]
12049 int num_saves = XVECLEN (operands[2], 0);
12051 /* For the StrongARM at least it is faster to
12052 use STR to store only a single register.
12053 In Thumb mode always use push, and the assembler will pick
12054 something appropriate. */
12055 if (num_saves == 1 && TARGET_ARM)
12056 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
12063 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
12064 else if (TARGET_THUMB2)
12065 strcpy (pattern, \"push%?\\t{%1\");
12067 strcpy (pattern, \"push\\t{%1\");
12069 for (i = 1; i < num_saves; i++)
12071 strcat (pattern, \", %|\");
12073 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
12076 strcat (pattern, \"}\");
12077 output_asm_insn (pattern, operands);
12082 [(set_attr "type" "store4")
12083 (set (attr "length")
12084 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
12087 (define_insn "stack_tie"
12088 [(set (mem:BLK (scratch))
12089 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
12090 (match_operand:SI 1 "s_register_operand" "rk")]
12094 [(set_attr "length" "0")]
12097 ;; Pop (as used in epilogue RTL)
12099 (define_insn "*load_multiple_with_writeback"
12100 [(match_parallel 0 "load_multiple_operation"
12101 [(set (match_operand:SI 1 "s_register_operand" "+rk")
12102 (plus:SI (match_dup 1)
12103 (match_operand:SI 2 "const_int_operand" "I")))
12104 (set (match_operand:SI 3 "s_register_operand" "=rk")
12105 (mem:SI (match_dup 1)))
12107 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12110 arm_output_multireg_pop (operands, /*return_pc=*/false,
12111 /*cond=*/const_true_rtx,
12117 [(set_attr "type" "load4")
12118 (set_attr "predicable" "yes")]
12121 ;; Pop with return (as used in epilogue RTL)
12123 ;; This instruction is generated when the registers are popped at the end of
12124 ;; epilogue. Here, instead of popping the value into LR and then generating
12125 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
12127 (define_insn "*pop_multiple_with_writeback_and_return"
12128 [(match_parallel 0 "pop_multiple_return"
12130 (set (match_operand:SI 1 "s_register_operand" "+rk")
12131 (plus:SI (match_dup 1)
12132 (match_operand:SI 2 "const_int_operand" "I")))
12133 (set (match_operand:SI 3 "s_register_operand" "=rk")
12134 (mem:SI (match_dup 1)))
12136 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12139 arm_output_multireg_pop (operands, /*return_pc=*/true,
12140 /*cond=*/const_true_rtx,
12146 [(set_attr "type" "load4")
12147 (set_attr "predicable" "yes")]
12150 (define_insn "*pop_multiple_with_return"
12151 [(match_parallel 0 "pop_multiple_return"
12153 (set (match_operand:SI 2 "s_register_operand" "=rk")
12154 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12156 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12159 arm_output_multireg_pop (operands, /*return_pc=*/true,
12160 /*cond=*/const_true_rtx,
12166 [(set_attr "type" "load4")
12167 (set_attr "predicable" "yes")]
12170 ;; Load into PC and return
12171 (define_insn "*ldr_with_return"
12173 (set (reg:SI PC_REGNUM)
12174 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
12175 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12176 "ldr%?\t%|pc, [%0], #4"
12177 [(set_attr "type" "load1")
12178 (set_attr "predicable" "yes")]
12180 ;; Pop for floating point registers (as used in epilogue RTL)
12181 (define_insn "*vfp_pop_multiple_with_writeback"
12182 [(match_parallel 0 "pop_multiple_fp"
12183 [(set (match_operand:SI 1 "s_register_operand" "+rk")
12184 (plus:SI (match_dup 1)
12185 (match_operand:SI 2 "const_int_operand" "I")))
12186 (set (match_operand:DF 3 "arm_hard_register_operand" "")
12187 (mem:DF (match_dup 1)))])]
12188 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
12191 int num_regs = XVECLEN (operands[0], 0);
12194 strcpy (pattern, \"fldmfdd\\t\");
12195 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
12196 strcat (pattern, \"!, {\");
12197 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
12198 strcat (pattern, \"%P0\");
12199 if ((num_regs - 1) > 1)
12201 strcat (pattern, \"-%P1\");
12202 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
12205 strcat (pattern, \"}\");
12206 output_asm_insn (pattern, op_list);
12210 [(set_attr "type" "load4")
12211 (set_attr "conds" "unconditional")
12212 (set_attr "predicable" "no")]
12215 ;; Special patterns for dealing with the constant pool
12217 (define_insn "align_4"
12218 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
12221 assemble_align (32);
12226 (define_insn "align_8"
12227 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
12230 assemble_align (64);
12235 (define_insn "consttable_end"
12236 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
12239 making_const_table = FALSE;
12244 (define_insn "consttable_1"
12245 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
12248 making_const_table = TRUE;
12249 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
12250 assemble_zeros (3);
12253 [(set_attr "length" "4")]
12256 (define_insn "consttable_2"
12257 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
12260 making_const_table = TRUE;
12261 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
12262 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
12263 assemble_zeros (2);
12266 [(set_attr "length" "4")]
12269 (define_insn "consttable_4"
12270 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
12274 rtx x = operands[0];
12275 making_const_table = TRUE;
12276 switch (GET_MODE_CLASS (GET_MODE (x)))
12279 if (GET_MODE (x) == HFmode)
12280 arm_emit_fp16_const (x);
12284 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
12285 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
12289 /* XXX: Sometimes gcc does something really dumb and ends up with
12290 a HIGH in a constant pool entry, usually because it's trying to
12291 load into a VFP register. We know this will always be used in
12292 combination with a LO_SUM which ignores the high bits, so just
12293 strip off the HIGH. */
12294 if (GET_CODE (x) == HIGH)
12296 assemble_integer (x, 4, BITS_PER_WORD, 1);
12297 mark_symbol_refs_as_used (x);
12302 [(set_attr "length" "4")]
12305 (define_insn "consttable_8"
12306 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
12310 making_const_table = TRUE;
12311 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12316 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12317 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12321 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12326 [(set_attr "length" "8")]
12329 (define_insn "consttable_16"
12330 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12334 making_const_table = TRUE;
12335 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12340 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12341 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12345 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12350 [(set_attr "length" "16")]
12353 ;; Miscellaneous Thumb patterns
12355 (define_expand "tablejump"
12356 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
12357 (use (label_ref (match_operand 1 "" "")))])]
12362 /* Hopefully, CSE will eliminate this copy. */
12363 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
12364 rtx reg2 = gen_reg_rtx (SImode);
12366 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
12367 operands[0] = reg2;
12372 ;; NB never uses BX.
12373 (define_insn "*thumb1_tablejump"
12374 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
12375 (use (label_ref (match_operand 1 "" "")))]
12378 [(set_attr "length" "2")]
12381 ;; V5 Instructions,
12383 (define_insn "clzsi2"
12384 [(set (match_operand:SI 0 "s_register_operand" "=r")
12385 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12386 "TARGET_32BIT && arm_arch5"
12388 [(set_attr "predicable" "yes")
12389 (set_attr "insn" "clz")])
12391 (define_insn "rbitsi2"
12392 [(set (match_operand:SI 0 "s_register_operand" "=r")
12393 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12394 "TARGET_32BIT && arm_arch_thumb2"
12396 [(set_attr "predicable" "yes")
12397 (set_attr "insn" "clz")])
12399 (define_expand "ctzsi2"
12400 [(set (match_operand:SI 0 "s_register_operand" "")
12401 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
12402 "TARGET_32BIT && arm_arch_thumb2"
12405 rtx tmp = gen_reg_rtx (SImode);
12406 emit_insn (gen_rbitsi2 (tmp, operands[1]));
12407 emit_insn (gen_clzsi2 (operands[0], tmp));
12413 ;; V5E instructions.
12415 (define_insn "prefetch"
12416 [(prefetch (match_operand:SI 0 "address_operand" "p")
12417 (match_operand:SI 1 "" "")
12418 (match_operand:SI 2 "" ""))]
12419 "TARGET_32BIT && arm_arch5e"
12422 ;; General predication pattern
12425 [(match_operator 0 "arm_comparison_operator"
12426 [(match_operand 1 "cc_register" "")
12430 [(set_attr "predicated" "yes")]
12433 (define_insn "force_register_use"
12434 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12437 [(set_attr "length" "0")]
12441 ;; Patterns for exception handling
12443 (define_expand "eh_return"
12444 [(use (match_operand 0 "general_operand" ""))]
12449 emit_insn (gen_arm_eh_return (operands[0]));
12451 emit_insn (gen_thumb_eh_return (operands[0]));
12456 ;; We can't expand this before we know where the link register is stored.
12457 (define_insn_and_split "arm_eh_return"
12458 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12460 (clobber (match_scratch:SI 1 "=&r"))]
12463 "&& reload_completed"
12467 arm_set_return_address (operands[0], operands[1]);
12472 (define_insn_and_split "thumb_eh_return"
12473 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
12475 (clobber (match_scratch:SI 1 "=&l"))]
12478 "&& reload_completed"
12482 thumb_set_return_address (operands[0], operands[1]);
12490 (define_insn "load_tp_hard"
12491 [(set (match_operand:SI 0 "register_operand" "=r")
12492 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12494 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12495 [(set_attr "predicable" "yes")]
12498 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12499 (define_insn "load_tp_soft"
12500 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12501 (clobber (reg:SI LR_REGNUM))
12502 (clobber (reg:SI IP_REGNUM))
12503 (clobber (reg:CC CC_REGNUM))]
12505 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12506 [(set_attr "conds" "clob")]
12509 ;; tls descriptor call
12510 (define_insn "tlscall"
12511 [(set (reg:SI R0_REGNUM)
12512 (unspec:SI [(reg:SI R0_REGNUM)
12513 (match_operand:SI 0 "" "X")
12514 (match_operand 1 "" "")] UNSPEC_TLS))
12515 (clobber (reg:SI R1_REGNUM))
12516 (clobber (reg:SI LR_REGNUM))
12517 (clobber (reg:SI CC_REGNUM))]
12520 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12521 INTVAL (operands[1]));
12522 return "bl\\t%c0(tlscall)";
12524 [(set_attr "conds" "clob")
12525 (set_attr "length" "4")]
12528 ;; For thread pointer builtin
12529 (define_expand "get_thread_pointersi"
12530 [(match_operand:SI 0 "s_register_operand" "=r")]
12534 arm_load_tp (operands[0]);
12540 ;; We only care about the lower 16 bits of the constant
12541 ;; being inserted into the upper 16 bits of the register.
12542 (define_insn "*arm_movtas_ze"
12543 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
12546 (match_operand:SI 1 "const_int_operand" ""))]
12549 [(set_attr "predicable" "yes")
12550 (set_attr "length" "4")]
12553 (define_insn "*arm_rev"
12554 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12555 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12561 [(set_attr "arch" "t1,t2,32")
12562 (set_attr "length" "2,2,4")]
12565 (define_expand "arm_legacy_rev"
12566 [(set (match_operand:SI 2 "s_register_operand" "")
12567 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
12571 (lshiftrt:SI (match_dup 2)
12573 (set (match_operand:SI 3 "s_register_operand" "")
12574 (rotatert:SI (match_dup 1)
12577 (and:SI (match_dup 2)
12578 (const_int -65281)))
12579 (set (match_operand:SI 0 "s_register_operand" "")
12580 (xor:SI (match_dup 3)
12586 ;; Reuse temporaries to keep register pressure down.
12587 (define_expand "thumb_legacy_rev"
12588 [(set (match_operand:SI 2 "s_register_operand" "")
12589 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
12591 (set (match_operand:SI 3 "s_register_operand" "")
12592 (lshiftrt:SI (match_dup 1)
12595 (ior:SI (match_dup 3)
12597 (set (match_operand:SI 4 "s_register_operand" "")
12599 (set (match_operand:SI 5 "s_register_operand" "")
12600 (rotatert:SI (match_dup 1)
12603 (ashift:SI (match_dup 5)
12606 (lshiftrt:SI (match_dup 5)
12609 (ior:SI (match_dup 5)
12612 (rotatert:SI (match_dup 5)
12614 (set (match_operand:SI 0 "s_register_operand" "")
12615 (ior:SI (match_dup 5)
12621 (define_expand "bswapsi2"
12622 [(set (match_operand:SI 0 "s_register_operand" "=r")
12623 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
12624 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12628 rtx op2 = gen_reg_rtx (SImode);
12629 rtx op3 = gen_reg_rtx (SImode);
12633 rtx op4 = gen_reg_rtx (SImode);
12634 rtx op5 = gen_reg_rtx (SImode);
12636 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12637 op2, op3, op4, op5));
12641 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12650 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12651 ;; and unsigned variants, respectively. For rev16, expose
12652 ;; byte-swapping in the lower 16 bits only.
12653 (define_insn "*arm_revsh"
12654 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12655 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12661 [(set_attr "arch" "t1,t2,32")
12662 (set_attr "length" "2,2,4")]
12665 (define_insn "*arm_rev16"
12666 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12667 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12673 [(set_attr "arch" "t1,t2,32")
12674 (set_attr "length" "2,2,4")]
12677 (define_expand "bswaphi2"
12678 [(set (match_operand:HI 0 "s_register_operand" "=r")
12679 (bswap:HI (match_operand:HI 1 "s_register_operand" "r")))]
12684 ;; Patterns for LDRD/STRD in Thumb2 mode
12686 (define_insn "*thumb2_ldrd"
12687 [(set (match_operand:SI 0 "s_register_operand" "=r")
12688 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12689 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12690 (set (match_operand:SI 3 "s_register_operand" "=r")
12691 (mem:SI (plus:SI (match_dup 1)
12692 (match_operand:SI 4 "const_int_operand" ""))))]
12693 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12694 && current_tune->prefer_ldrd_strd
12695 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12696 && (operands_ok_ldrd_strd (operands[0], operands[3],
12697 operands[1], INTVAL (operands[2]),
12699 "ldrd%?\t%0, %3, [%1, %2]"
12700 [(set_attr "type" "load2")
12701 (set_attr "predicable" "yes")
12702 (set_attr "predicable_short_it" "no")])
12704 (define_insn "*thumb2_ldrd_base"
12705 [(set (match_operand:SI 0 "s_register_operand" "=r")
12706 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12707 (set (match_operand:SI 2 "s_register_operand" "=r")
12708 (mem:SI (plus:SI (match_dup 1)
12710 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12711 && current_tune->prefer_ldrd_strd
12712 && (operands_ok_ldrd_strd (operands[0], operands[2],
12713 operands[1], 0, false, true))"
12714 "ldrd%?\t%0, %2, [%1]"
12715 [(set_attr "type" "load2")
12716 (set_attr "predicable" "yes")
12717 (set_attr "predicable_short_it" "no")])
12719 (define_insn "*thumb2_ldrd_base_neg"
12720 [(set (match_operand:SI 0 "s_register_operand" "=r")
12721 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12723 (set (match_operand:SI 2 "s_register_operand" "=r")
12724 (mem:SI (match_dup 1)))]
12725 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12726 && current_tune->prefer_ldrd_strd
12727 && (operands_ok_ldrd_strd (operands[0], operands[2],
12728 operands[1], -4, false, true))"
12729 "ldrd%?\t%0, %2, [%1, #-4]"
12730 [(set_attr "type" "load2")
12731 (set_attr "predicable" "yes")
12732 (set_attr "predicable_short_it" "no")])
12734 (define_insn "*thumb2_strd"
12735 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12736 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12737 (match_operand:SI 2 "s_register_operand" "r"))
12738 (set (mem:SI (plus:SI (match_dup 0)
12739 (match_operand:SI 3 "const_int_operand" "")))
12740 (match_operand:SI 4 "s_register_operand" "r"))]
12741 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12742 && current_tune->prefer_ldrd_strd
12743 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12744 && (operands_ok_ldrd_strd (operands[2], operands[4],
12745 operands[0], INTVAL (operands[1]),
12747 "strd%?\t%2, %4, [%0, %1]"
12748 [(set_attr "type" "store2")
12749 (set_attr "predicable" "yes")
12750 (set_attr "predicable_short_it" "no")])
12752 (define_insn "*thumb2_strd_base"
12753 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12754 (match_operand:SI 1 "s_register_operand" "r"))
12755 (set (mem:SI (plus:SI (match_dup 0)
12757 (match_operand:SI 2 "s_register_operand" "r"))]
12758 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12759 && current_tune->prefer_ldrd_strd
12760 && (operands_ok_ldrd_strd (operands[1], operands[2],
12761 operands[0], 0, false, false))"
12762 "strd%?\t%1, %2, [%0]"
12763 [(set_attr "type" "store2")
12764 (set_attr "predicable" "yes")
12765 (set_attr "predicable_short_it" "no")])
12767 (define_insn "*thumb2_strd_base_neg"
12768 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12770 (match_operand:SI 1 "s_register_operand" "r"))
12771 (set (mem:SI (match_dup 0))
12772 (match_operand:SI 2 "s_register_operand" "r"))]
12773 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12774 && current_tune->prefer_ldrd_strd
12775 && (operands_ok_ldrd_strd (operands[1], operands[2],
12776 operands[0], -4, false, false))"
12777 "strd%?\t%1, %2, [%0, #-4]"
12778 [(set_attr "type" "store2")
12779 (set_attr "predicable" "yes")
12780 (set_attr "predicable_short_it" "no")])
12783 ;; Load the load/store double peephole optimizations.
12784 (include "ldrdstrd.md")
12786 ;; Load the load/store multiple patterns
12787 (include "ldmstm.md")
12789 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12790 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12791 (define_insn "*load_multiple"
12792 [(match_parallel 0 "load_multiple_operation"
12793 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12794 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12799 arm_output_multireg_pop (operands, /*return_pc=*/false,
12800 /*cond=*/const_true_rtx,
12806 [(set_attr "predicable" "yes")]
12809 ;; Vector bits common to IWMMXT and Neon
12810 (include "vec-common.md")
12811 ;; Load the Intel Wireless Multimedia Extension patterns
12812 (include "iwmmxt.md")
12813 ;; Load the VFP co-processor patterns
12815 ;; Thumb-2 patterns
12816 (include "thumb2.md")
12818 (include "neon.md")
12819 ;; Synchronization Primitives
12820 (include "sync.md")
12821 ;; Fixed-point patterns
12822 (include "arm-fixed.md")