1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2013 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (IP_REGNUM 12) ; Scratch register
34 (SP_REGNUM 13) ; Stack pointer
35 (LR_REGNUM 14) ; Return address register
36 (PC_REGNUM 15) ; Program counter
37 (LAST_ARM_REGNUM 15) ;
38 (CC_REGNUM 100) ; Condition code pseudo register
39 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ;; 3rd operand to select_dominance_cc_mode
49 ;; conditional compare combination
60 ;;---------------------------------------------------------------------------
63 ;; Processor type. This is created automatically from arm-cores.def.
64 (include "arm-tune.md")
66 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
67 ; generating ARM code. This is used to control the length of some insn
68 ; patterns that share the same RTL in both ARM and Thumb code.
69 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
71 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
72 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
74 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
75 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
77 ; We use this attribute to disable alternatives that can produce 32-bit
78 ; instructions inside an IT-block in Thumb2 state. ARMv8 deprecates IT blocks
79 ; that contain 32-bit instructions.
80 (define_attr "enabled_for_depr_it" "no,yes" (const_string "yes"))
82 ; This attribute is used to disable a predicated alternative when we have
84 (define_attr "predicable_short_it" "no,yes" (const_string "yes"))
86 ;; Operand number of an input operand that is shifted. Zero if the
87 ;; given instruction does not shift one of its input operands.
88 (define_attr "shift" "" (const_int 0))
90 ; Floating Point Unit. If we only have floating point emulation, then there
91 ; is no point in scheduling the floating point insns. (Well, for best
92 ; performance we should try and group them together).
93 (define_attr "fpu" "none,vfp"
94 (const (symbol_ref "arm_fpu_attr")))
96 (define_attr "predicated" "yes,no" (const_string "no"))
98 ; LENGTH of an instruction (in bytes)
99 (define_attr "length" ""
102 ; The architecture which supports the instruction (or alternative).
103 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
104 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
105 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
106 ; arm_arch6. This attribute is used to compute attribute "enabled",
107 ; use type "any" to enable an alternative in all cases.
108 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,neon_for_64bits,avoid_neon_for_64bits,iwmmxt,iwmmxt2"
109 (const_string "any"))
111 (define_attr "arch_enabled" "no,yes"
112 (cond [(eq_attr "arch" "any")
115 (and (eq_attr "arch" "a")
116 (match_test "TARGET_ARM"))
119 (and (eq_attr "arch" "t")
120 (match_test "TARGET_THUMB"))
123 (and (eq_attr "arch" "t1")
124 (match_test "TARGET_THUMB1"))
127 (and (eq_attr "arch" "t2")
128 (match_test "TARGET_THUMB2"))
131 (and (eq_attr "arch" "32")
132 (match_test "TARGET_32BIT"))
135 (and (eq_attr "arch" "v6")
136 (match_test "TARGET_32BIT && arm_arch6"))
139 (and (eq_attr "arch" "nov6")
140 (match_test "TARGET_32BIT && !arm_arch6"))
143 (and (eq_attr "arch" "avoid_neon_for_64bits")
144 (match_test "TARGET_NEON")
145 (not (match_test "TARGET_PREFER_NEON_64BITS")))
148 (and (eq_attr "arch" "neon_for_64bits")
149 (match_test "TARGET_NEON")
150 (match_test "TARGET_PREFER_NEON_64BITS"))
153 (and (eq_attr "arch" "iwmmxt2")
154 (match_test "TARGET_REALLY_IWMMXT2"))
155 (const_string "yes")]
157 (const_string "no")))
159 (define_attr "opt" "any,speed,size"
160 (const_string "any"))
162 (define_attr "opt_enabled" "no,yes"
163 (cond [(eq_attr "opt" "any")
166 (and (eq_attr "opt" "speed")
167 (match_test "optimize_function_for_speed_p (cfun)"))
170 (and (eq_attr "opt" "size")
171 (match_test "optimize_function_for_size_p (cfun)"))
172 (const_string "yes")]
173 (const_string "no")))
175 ; Allows an insn to disable certain alternatives for reasons other than
177 (define_attr "insn_enabled" "no,yes"
178 (const_string "yes"))
180 ; Enable all alternatives that are both arch_enabled and insn_enabled.
181 (define_attr "enabled" "no,yes"
182 (cond [(eq_attr "insn_enabled" "no")
185 (and (eq_attr "predicable_short_it" "no")
186 (and (eq_attr "predicated" "yes")
187 (match_test "arm_restrict_it")))
190 (and (eq_attr "enabled_for_depr_it" "no")
191 (match_test "arm_restrict_it"))
194 (eq_attr "arch_enabled" "no")
197 (eq_attr "opt_enabled" "no")
199 (const_string "yes")))
201 ; POOL_RANGE is how far away from a constant pool entry that this insn
202 ; can be placed. If the distance is zero, then this insn will never
203 ; reference the pool.
204 ; Note that for Thumb constant pools the PC value is rounded down to the
205 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
206 ; Thumb insns) should be set to <max_range> - 2.
207 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
208 ; before its address. It is set to <max_range> - (8 + <data_size>).
209 (define_attr "arm_pool_range" "" (const_int 0))
210 (define_attr "thumb2_pool_range" "" (const_int 0))
211 (define_attr "arm_neg_pool_range" "" (const_int 0))
212 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
214 (define_attr "pool_range" ""
215 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
216 (attr "arm_pool_range")))
217 (define_attr "neg_pool_range" ""
218 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
219 (attr "arm_neg_pool_range")))
221 ; An assembler sequence may clobber the condition codes without us knowing.
222 ; If such an insn references the pool, then we have no way of knowing how,
223 ; so use the most conservative value for pool_range.
224 (define_asm_attributes
225 [(set_attr "conds" "clob")
226 (set_attr "length" "4")
227 (set_attr "pool_range" "250")])
229 ; TYPE attribute is used to classify instructions for use in scheduling.
231 ; Instruction classification:
233 ; arlo_imm any arithmetic or logical instruction that doesn't have
234 ; a shifted operand and has an immediate operand. This
235 ; excludes MOV, MVN and RSB(S) immediate.
236 ; arlo_reg any arithmetic or logical instruction that doesn't have
237 ; a shifted or an immediate operand. This excludes
238 ; MOV and MVN but includes MOVT. This is also the default.
239 ; arlo_shift any arithmetic or logical instruction that has a source
240 ; operand shifted by a constant. This excludes
242 ; arlo_shift_reg as arlo_shift, with the shift amount specified in a
244 ; block blockage insn, this blocks all functional units.
246 ; call subroutine call.
247 ; clz count leading zeros (CLZ).
248 ; extend extend instruction (SXTB, SXTH, UXTB, UXTH).
249 ; f_2_r transfer from float to core (no memory needed).
250 ; f_cvt conversion between float and integral.
251 ; f_flag transfer of co-processor flags to the CPSR.
252 ; f_load[d,s] double/single load from memory. Used for VFP unit.
253 ; f_minmax[d,s] double/single floating point minimum/maximum.
254 ; f_rint[d,s] double/single floating point rount to integral.
255 ; f_sel[d,s] double/single floating byte select.
256 ; f_store[d,s] double/single store to memory. Used for VFP unit.
257 ; fadd[d,s] double/single floating-point scalar addition.
258 ; fcmp[d,s] double/single floating-point compare.
259 ; fconst[d,s] double/single load immediate.
260 ; fcpys single precision floating point cpy.
261 ; fdiv[d,s] double/single precision floating point division.
262 ; ffarith[d,s] double/single floating point abs/neg/cpy.
263 ; ffma[d,s] double/single floating point fused multiply-accumulate.
264 ; float floating point arithmetic operation.
265 ; fmac[d,s] double/single floating point multiply-accumulate.
266 ; fmul[d,s] double/single floating point multiply.
267 ; load_byte load byte(s) from memory to arm registers.
268 ; load1 load 1 word from memory to arm registers.
269 ; load2 load 2 words from memory to arm registers.
270 ; load3 load 3 words from memory to arm registers.
271 ; load4 load 4 words from memory to arm registers.
272 ; mla integer multiply accumulate.
273 ; mlas integer multiply accumulate, flag setting.
274 ; mov_imm simple MOV instruction that moves an immediate to
275 ; register. This includes MOVW, but not MOVT.
276 ; mov_reg simple MOV instruction that moves a register to another
277 ; register. This includes MOVW, but not MOVT.
278 ; mov_shift simple MOV instruction, shifted operand by a constant.
279 ; mov_shift_reg simple MOV instruction, shifted operand by a register.
280 ; mul integer multiply.
281 ; muls integer multiply, flag setting.
282 ; mvn_imm inverting move instruction, immediate.
283 ; mvn_reg inverting move instruction, register.
284 ; mvn_shift inverting move instruction, shifted operand by a constant.
285 ; mvn_shift_reg inverting move instruction, shifted operand by a register.
286 ; r_2_f transfer from core to float.
287 ; sdiv signed division.
288 ; shift simple shift operation (LSL, LSR, ASR, ROR) with an
290 ; shift_reg simple shift by a register.
291 ; smlad signed multiply accumulate dual.
292 ; smladx signed multiply accumulate dual reverse.
293 ; smlal signed multiply accumulate long.
294 ; smlald signed multiply accumulate long dual.
295 ; smlals signed multiply accumulate long, flag setting.
296 ; smlalxy signed multiply accumulate, 16x16-bit, 64-bit accumulate.
297 ; smlawx signed multiply accumulate, 32x16-bit, 32-bit accumulate.
298 ; smlawy signed multiply accumulate wide, 32x16-bit,
300 ; smlaxy signed multiply accumulate, 16x16-bit, 32-bit accumulate.
301 ; smlsd signed multiply subtract dual.
302 ; smlsdx signed multiply subtract dual reverse.
303 ; smlsld signed multiply subtract long dual.
304 ; smmla signed most significant word multiply accumulate.
305 ; smmul signed most significant word multiply.
306 ; smmulr signed most significant word multiply, rounded.
307 ; smuad signed dual multiply add.
308 ; smuadx signed dual multiply add reverse.
309 ; smull signed multiply long.
310 ; smulls signed multiply long, flag setting.
311 ; smulwy signed multiply wide, 32x16-bit, 32-bit accumulate.
312 ; smulxy signed multiply, 16x16-bit, 32-bit accumulate.
313 ; smusd signed dual multiply subtract.
314 ; smusdx signed dual multiply subtract reverse.
315 ; store1 store 1 word to memory from arm registers.
316 ; store2 store 2 words to memory from arm registers.
317 ; store3 store 3 words to memory from arm registers.
318 ; store4 store 4 (or more) words to memory from arm registers.
319 ; udiv unsigned division.
320 ; umaal unsigned multiply accumulate accumulate long.
321 ; umlal unsigned multiply accumulate long.
322 ; umlals unsigned multiply accumulate long, flag setting.
323 ; umull unsigned multiply long.
324 ; umulls unsigned multiply long, flag setting.
326 ; The classification below is for instructions used by the Wireless MMX
327 ; Technology. Each attribute value is used to classify an instruction of the
328 ; same name or family.
544 (const_string "arlo_reg"))
546 ; Is this an (integer side) multiply with a 32-bit (or smaller) result?
547 (define_attr "mul32" "no,yes"
550 "smulxy,smlaxy,smulwy,smlawx,mul,muls,mla,mlas,smlawy,smuad,smuadx,\
551 smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,smlald,smlsld")
553 (const_string "no")))
555 ; Is this an (integer side) multiply with a 64-bit result?
556 (define_attr "mul64" "no,yes"
559 "smlalxy,umull,umulls,umaal,umlal,umlals,smull,smulls,smlal,smlals")
561 (const_string "no")))
563 ; Load scheduling, set from the arm_ld_sched variable
564 ; initialized by arm_option_override()
565 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
567 ;; Classification of NEON instructions for scheduling purposes.
568 (define_attr "neon_type"
579 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
580 neon_mul_qqq_8_16_32_ddd_32,\
581 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
582 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
584 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
585 neon_mla_qqq_32_qqd_32_scalar,\
586 neon_mul_ddd_16_scalar_32_16_long_scalar,\
587 neon_mul_qqd_32_scalar,\
588 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
593 neon_vqshl_vrshl_vqrshl_qqq,\
595 neon_fp_vadd_ddd_vabs_dd,\
596 neon_fp_vadd_qqq_vabs_qq,\
602 neon_fp_vmla_ddd_scalar,\
603 neon_fp_vmla_qqq_scalar,\
604 neon_fp_vrecps_vrsqrts_ddd,\
605 neon_fp_vrecps_vrsqrts_qqq,\
613 neon_vld2_2_regs_vld1_vld2_all_lanes,\
616 neon_vst1_1_2_regs_vst2_2_regs,\
618 neon_vst2_4_regs_vst3_vst4,\
620 neon_vld1_vld2_lane,\
621 neon_vld3_vld4_lane,\
622 neon_vst1_vst2_lane,\
623 neon_vst3_vst4_lane,\
624 neon_vld3_vld4_all_lanes,\
632 (const_string "none"))
634 ; condition codes: this one is used by final_prescan_insn to speed up
635 ; conditionalizing instructions. It saves having to scan the rtl to see if
636 ; it uses or alters the condition codes.
638 ; USE means that the condition codes are used by the insn in the process of
639 ; outputting code, this means (at present) that we can't use the insn in
642 ; SET means that the purpose of the insn is to set the condition codes in a
643 ; well defined manner.
645 ; CLOB means that the condition codes are altered in an undefined manner, if
646 ; they are altered at all
648 ; UNCONDITIONAL means the instruction can not be conditionally executed and
649 ; that the instruction does not use or alter the condition codes.
651 ; NOCOND means that the instruction does not use or alter the condition
652 ; codes but can be converted into a conditionally exectuted instruction.
654 (define_attr "conds" "use,set,clob,unconditional,nocond"
656 (ior (eq_attr "is_thumb1" "yes")
657 (eq_attr "type" "call"))
658 (const_string "clob")
659 (if_then_else (eq_attr "neon_type" "none")
660 (const_string "nocond")
661 (const_string "unconditional"))))
663 ; Predicable means that the insn can be conditionally executed based on
664 ; an automatically added predicate (additional patterns are generated by
665 ; gen...). We default to 'no' because no Thumb patterns match this rule
666 ; and not all ARM patterns do.
667 (define_attr "predicable" "no,yes" (const_string "no"))
669 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
670 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
671 ; suffer blockages enough to warrant modelling this (and it can adversely
672 ; affect the schedule).
673 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
675 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
676 ; to stall the processor. Used with model_wbuf above.
677 (define_attr "write_conflict" "no,yes"
678 (if_then_else (eq_attr "type"
681 (const_string "no")))
683 ; Classify the insns into those that take one cycle and those that take more
684 ; than one on the main cpu execution unit.
685 (define_attr "core_cycles" "single,multi"
686 (if_then_else (eq_attr "type"
687 "arlo_imm, arlo_reg,\
688 extend, shift, arlo_shift, float, fdivd, fdivs,\
689 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
690 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
691 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
692 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
693 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
694 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
695 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
696 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
697 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
698 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
699 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
700 (const_string "single")
701 (const_string "multi")))
703 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
704 ;; distant label. Only applicable to Thumb code.
705 (define_attr "far_jump" "yes,no" (const_string "no"))
708 ;; The number of machine instructions this pattern expands to.
709 ;; Used for Thumb-2 conditional execution.
710 (define_attr "ce_count" "" (const_int 1))
712 ;;---------------------------------------------------------------------------
715 (include "unspecs.md")
717 ;;---------------------------------------------------------------------------
720 (include "iterators.md")
722 ;;---------------------------------------------------------------------------
725 (include "predicates.md")
726 (include "constraints.md")
728 ;;---------------------------------------------------------------------------
729 ;; Pipeline descriptions
731 (define_attr "tune_cortexr4" "yes,no"
733 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
735 (const_string "no"))))
737 ;; True if the generic scheduling description should be used.
739 (define_attr "generic_sched" "yes,no"
741 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa7,cortexa8,cortexa9,cortexa15,cortexa53,cortexm4,marvell_pj4")
742 (eq_attr "tune_cortexr4" "yes"))
744 (const_string "yes"))))
746 (define_attr "generic_vfp" "yes,no"
748 (and (eq_attr "fpu" "vfp")
749 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa7,cortexa8,cortexa9,cortexa53,cortexm4,marvell_pj4")
750 (eq_attr "tune_cortexr4" "no"))
752 (const_string "no"))))
754 (include "marvell-f-iwmmxt.md")
755 (include "arm-generic.md")
756 (include "arm926ejs.md")
757 (include "arm1020e.md")
758 (include "arm1026ejs.md")
759 (include "arm1136jfs.md")
761 (include "fa606te.md")
762 (include "fa626te.md")
763 (include "fmp626.md")
764 (include "fa726te.md")
765 (include "cortex-a5.md")
766 (include "cortex-a7.md")
767 (include "cortex-a8.md")
768 (include "cortex-a9.md")
769 (include "cortex-a15.md")
770 (include "cortex-a53.md")
771 (include "cortex-r4.md")
772 (include "cortex-r4f.md")
773 (include "cortex-m4.md")
774 (include "cortex-m4-fpu.md")
776 (include "marvell-pj4.md")
779 ;;---------------------------------------------------------------------------
784 ;; Note: For DImode insns, there is normally no reason why operands should
785 ;; not be in the same register, what we don't want is for something being
786 ;; written to partially overlap something that is an input.
788 (define_expand "adddi3"
790 [(set (match_operand:DI 0 "s_register_operand" "")
791 (plus:DI (match_operand:DI 1 "s_register_operand" "")
792 (match_operand:DI 2 "arm_adddi_operand" "")))
793 (clobber (reg:CC CC_REGNUM))])]
798 if (!REG_P (operands[1]))
799 operands[1] = force_reg (DImode, operands[1]);
800 if (!REG_P (operands[2]))
801 operands[2] = force_reg (DImode, operands[2]);
806 (define_insn "*thumb1_adddi3"
807 [(set (match_operand:DI 0 "register_operand" "=l")
808 (plus:DI (match_operand:DI 1 "register_operand" "%0")
809 (match_operand:DI 2 "register_operand" "l")))
810 (clobber (reg:CC CC_REGNUM))
813 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
814 [(set_attr "length" "4")]
817 (define_insn_and_split "*arm_adddi3"
818 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r,&r,&r")
819 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0, r, 0, r")
820 (match_operand:DI 2 "arm_adddi_operand" "r, 0, r, Dd, Dd")))
821 (clobber (reg:CC CC_REGNUM))]
822 "TARGET_32BIT && !TARGET_NEON"
824 "TARGET_32BIT && reload_completed
825 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
826 [(parallel [(set (reg:CC_C CC_REGNUM)
827 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
829 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
830 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
831 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
834 operands[3] = gen_highpart (SImode, operands[0]);
835 operands[0] = gen_lowpart (SImode, operands[0]);
836 operands[4] = gen_highpart (SImode, operands[1]);
837 operands[1] = gen_lowpart (SImode, operands[1]);
838 operands[5] = gen_highpart_mode (SImode, DImode, operands[2]);
839 operands[2] = gen_lowpart (SImode, operands[2]);
841 [(set_attr "conds" "clob")
842 (set_attr "length" "8")]
845 (define_insn_and_split "*adddi_sesidi_di"
846 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
847 (plus:DI (sign_extend:DI
848 (match_operand:SI 2 "s_register_operand" "r,r"))
849 (match_operand:DI 1 "s_register_operand" "0,r")))
850 (clobber (reg:CC CC_REGNUM))]
853 "TARGET_32BIT && reload_completed"
854 [(parallel [(set (reg:CC_C CC_REGNUM)
855 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
857 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
858 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
861 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
864 operands[3] = gen_highpart (SImode, operands[0]);
865 operands[0] = gen_lowpart (SImode, operands[0]);
866 operands[4] = gen_highpart (SImode, operands[1]);
867 operands[1] = gen_lowpart (SImode, operands[1]);
868 operands[2] = gen_lowpart (SImode, operands[2]);
870 [(set_attr "conds" "clob")
871 (set_attr "length" "8")]
874 (define_insn_and_split "*adddi_zesidi_di"
875 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
876 (plus:DI (zero_extend:DI
877 (match_operand:SI 2 "s_register_operand" "r,r"))
878 (match_operand:DI 1 "s_register_operand" "0,r")))
879 (clobber (reg:CC CC_REGNUM))]
882 "TARGET_32BIT && reload_completed"
883 [(parallel [(set (reg:CC_C CC_REGNUM)
884 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
886 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
887 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
888 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
891 operands[3] = gen_highpart (SImode, operands[0]);
892 operands[0] = gen_lowpart (SImode, operands[0]);
893 operands[4] = gen_highpart (SImode, operands[1]);
894 operands[1] = gen_lowpart (SImode, operands[1]);
895 operands[2] = gen_lowpart (SImode, operands[2]);
897 [(set_attr "conds" "clob")
898 (set_attr "length" "8")]
901 (define_expand "addsi3"
902 [(set (match_operand:SI 0 "s_register_operand" "")
903 (plus:SI (match_operand:SI 1 "s_register_operand" "")
904 (match_operand:SI 2 "reg_or_int_operand" "")))]
907 if (TARGET_32BIT && CONST_INT_P (operands[2]))
909 arm_split_constant (PLUS, SImode, NULL_RTX,
910 INTVAL (operands[2]), operands[0], operands[1],
911 optimize && can_create_pseudo_p ());
917 ; If there is a scratch available, this will be faster than synthesizing the
920 [(match_scratch:SI 3 "r")
921 (set (match_operand:SI 0 "arm_general_register_operand" "")
922 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
923 (match_operand:SI 2 "const_int_operand" "")))]
925 !(const_ok_for_arm (INTVAL (operands[2]))
926 || const_ok_for_arm (-INTVAL (operands[2])))
927 && const_ok_for_arm (~INTVAL (operands[2]))"
928 [(set (match_dup 3) (match_dup 2))
929 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
933 ;; The r/r/k alternative is required when reloading the address
934 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
935 ;; put the duplicated register first, and not try the commutative version.
936 (define_insn_and_split "*arm_addsi3"
937 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,r ,k ,r ,k,k,r ,k ,r")
938 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,rk,k ,rk,k,r,rk,k ,rk")
939 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
954 subw%?\\t%0, %1, #%n2
955 subw%?\\t%0, %1, #%n2
958 && CONST_INT_P (operands[2])
959 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
960 && (reload_completed || !arm_eliminable_register (operands[1]))"
961 [(clobber (const_int 0))]
963 arm_split_constant (PLUS, SImode, curr_insn,
964 INTVAL (operands[2]), operands[0],
968 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
969 (set_attr "predicable" "yes")
970 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no")
971 (set_attr "arch" "t2,t2,t2,t2,*,*,*,t2,t2,*,*,a,t2,t2,*")
972 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
973 (const_string "arlo_imm")
974 (const_string "arlo_reg")))
978 (define_insn_and_split "*thumb1_addsi3"
979 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
980 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
981 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
984 static const char * const asms[] =
986 \"add\\t%0, %0, %2\",
987 \"sub\\t%0, %0, #%n2\",
988 \"add\\t%0, %1, %2\",
989 \"add\\t%0, %0, %2\",
990 \"add\\t%0, %0, %2\",
991 \"add\\t%0, %1, %2\",
992 \"add\\t%0, %1, %2\",
997 if ((which_alternative == 2 || which_alternative == 6)
998 && CONST_INT_P (operands[2])
999 && INTVAL (operands[2]) < 0)
1000 return \"sub\\t%0, %1, #%n2\";
1001 return asms[which_alternative];
1003 "&& reload_completed && CONST_INT_P (operands[2])
1004 && ((operands[1] != stack_pointer_rtx
1005 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
1006 || (operands[1] == stack_pointer_rtx
1007 && INTVAL (operands[2]) > 1020))"
1008 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
1009 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
1011 HOST_WIDE_INT offset = INTVAL (operands[2]);
1012 if (operands[1] == stack_pointer_rtx)
1018 else if (offset < -255)
1021 operands[3] = GEN_INT (offset);
1022 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
1024 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
1027 ;; Reloading and elimination of the frame pointer can
1028 ;; sometimes cause this optimization to be missed.
1030 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1031 (match_operand:SI 1 "const_int_operand" ""))
1033 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
1035 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
1036 && (INTVAL (operands[1]) & 3) == 0"
1037 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
1041 (define_insn "addsi3_compare0"
1042 [(set (reg:CC_NOOV CC_REGNUM)
1044 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1045 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1047 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1048 (plus:SI (match_dup 1) (match_dup 2)))]
1052 sub%.\\t%0, %1, #%n2
1054 [(set_attr "conds" "set")
1055 (set_attr "type" "arlo_imm,arlo_imm,*")]
1058 (define_insn "*addsi3_compare0_scratch"
1059 [(set (reg:CC_NOOV CC_REGNUM)
1061 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1062 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1069 [(set_attr "conds" "set")
1070 (set_attr "predicable" "yes")
1071 (set_attr "type" "arlo_imm,arlo_imm,*")
1075 (define_insn "*compare_negsi_si"
1076 [(set (reg:CC_Z CC_REGNUM)
1078 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1079 (match_operand:SI 1 "s_register_operand" "l,r")))]
1082 [(set_attr "conds" "set")
1083 (set_attr "predicable" "yes")
1084 (set_attr "arch" "t2,*")
1085 (set_attr "length" "2,4")
1086 (set_attr "predicable_short_it" "yes,no")]
1089 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
1090 ;; addend is a constant.
1091 (define_insn "cmpsi2_addneg"
1092 [(set (reg:CC CC_REGNUM)
1094 (match_operand:SI 1 "s_register_operand" "r,r")
1095 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
1096 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1097 (plus:SI (match_dup 1)
1098 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
1099 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
1102 sub%.\\t%0, %1, #%n3"
1103 [(set_attr "conds" "set")]
1106 ;; Convert the sequence
1108 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1112 ;; bcs dest ((unsigned)rn >= 1)
1113 ;; similarly for the beq variant using bcc.
1114 ;; This is a common looping idiom (while (n--))
1116 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1117 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1119 (set (match_operand 2 "cc_register" "")
1120 (compare (match_dup 0) (const_int -1)))
1122 (if_then_else (match_operator 3 "equality_operator"
1123 [(match_dup 2) (const_int 0)])
1124 (match_operand 4 "" "")
1125 (match_operand 5 "" "")))]
1126 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1130 (match_dup 1) (const_int 1)))
1131 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1133 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1136 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1137 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1140 operands[2], const0_rtx);"
1143 ;; The next four insns work because they compare the result with one of
1144 ;; the operands, and we know that the use of the condition code is
1145 ;; either GEU or LTU, so we can use the carry flag from the addition
1146 ;; instead of doing the compare a second time.
1147 (define_insn "*addsi3_compare_op1"
1148 [(set (reg:CC_C CC_REGNUM)
1150 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1151 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1153 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1154 (plus:SI (match_dup 1) (match_dup 2)))]
1158 sub%.\\t%0, %1, #%n2
1160 [(set_attr "conds" "set")
1161 (set_attr "type" "arlo_imm,arlo_imm,*")]
1164 (define_insn "*addsi3_compare_op2"
1165 [(set (reg:CC_C CC_REGNUM)
1167 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1168 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1170 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1171 (plus:SI (match_dup 1) (match_dup 2)))]
1176 sub%.\\t%0, %1, #%n2"
1177 [(set_attr "conds" "set")
1178 (set_attr "type" "arlo_imm,arlo_imm,*")]
1181 (define_insn "*compare_addsi2_op0"
1182 [(set (reg:CC_C CC_REGNUM)
1184 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
1185 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
1194 [(set_attr "conds" "set")
1195 (set_attr "predicable" "yes")
1196 (set_attr "arch" "t2,t2,*,*,*")
1197 (set_attr "predicable_short_it" "yes,yes,no,no,no")
1198 (set_attr "length" "2,2,4,4,4")
1199 (set_attr "type" "arlo_imm,*,arlo_imm,arlo_imm,*")]
1202 (define_insn "*compare_addsi2_op1"
1203 [(set (reg:CC_C CC_REGNUM)
1205 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
1206 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
1215 [(set_attr "conds" "set")
1216 (set_attr "predicable" "yes")
1217 (set_attr "arch" "t2,t2,*,*,*")
1218 (set_attr "predicable_short_it" "yes,yes,no,no,no")
1219 (set_attr "length" "2,2,4,4,4")
1221 "arlo_imm,*,arlo_imm,arlo_imm,*")]
1224 (define_insn "*addsi3_carryin_<optab>"
1225 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1226 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1227 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1228 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1233 sbc%?\\t%0, %1, #%B2"
1234 [(set_attr "conds" "use")
1235 (set_attr "predicable" "yes")
1236 (set_attr "arch" "t2,*,*")
1237 (set_attr "length" "4")
1238 (set_attr "predicable_short_it" "yes,no,no")]
1241 (define_insn "*addsi3_carryin_alt2_<optab>"
1242 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1243 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
1244 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1245 (match_operand:SI 2 "arm_rhs_operand" "l,rI,K")))]
1250 sbc%?\\t%0, %1, #%B2"
1251 [(set_attr "conds" "use")
1252 (set_attr "predicable" "yes")
1253 (set_attr "arch" "t2,*,*")
1254 (set_attr "length" "4")
1255 (set_attr "predicable_short_it" "yes,no,no")]
1258 (define_insn "*addsi3_carryin_shift_<optab>"
1259 [(set (match_operand:SI 0 "s_register_operand" "=r")
1261 (match_operator:SI 2 "shift_operator"
1262 [(match_operand:SI 3 "s_register_operand" "r")
1263 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1264 (match_operand:SI 1 "s_register_operand" "r"))
1265 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1267 "adc%?\\t%0, %1, %3%S2"
1268 [(set_attr "conds" "use")
1269 (set_attr "predicable" "yes")
1270 (set_attr "predicable_short_it" "no")
1271 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1272 (const_string "arlo_shift")
1273 (const_string "arlo_shift_reg")))]
1276 (define_insn "*addsi3_carryin_clobercc_<optab>"
1277 [(set (match_operand:SI 0 "s_register_operand" "=r")
1278 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1279 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1280 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1281 (clobber (reg:CC CC_REGNUM))]
1283 "adc%.\\t%0, %1, %2"
1284 [(set_attr "conds" "set")]
1287 (define_insn "*subsi3_carryin"
1288 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1289 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I")
1290 (match_operand:SI 2 "s_register_operand" "r,r"))
1291 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1296 [(set_attr "conds" "use")
1297 (set_attr "arch" "*,a")
1298 (set_attr "predicable" "yes")
1299 (set_attr "predicable_short_it" "no")]
1302 (define_insn "*subsi3_carryin_const"
1303 [(set (match_operand:SI 0 "s_register_operand" "=r")
1304 (minus:SI (plus:SI (match_operand:SI 1 "reg_or_int_operand" "r")
1305 (match_operand:SI 2 "arm_not_operand" "K"))
1306 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1308 "sbc\\t%0, %1, #%B2"
1309 [(set_attr "conds" "use")]
1312 (define_insn "*subsi3_carryin_compare"
1313 [(set (reg:CC CC_REGNUM)
1314 (compare:CC (match_operand:SI 1 "s_register_operand" "r")
1315 (match_operand:SI 2 "s_register_operand" "r")))
1316 (set (match_operand:SI 0 "s_register_operand" "=r")
1317 (minus:SI (minus:SI (match_dup 1)
1319 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1322 [(set_attr "conds" "set")]
1325 (define_insn "*subsi3_carryin_compare_const"
1326 [(set (reg:CC CC_REGNUM)
1327 (compare:CC (match_operand:SI 1 "reg_or_int_operand" "r")
1328 (match_operand:SI 2 "arm_not_operand" "K")))
1329 (set (match_operand:SI 0 "s_register_operand" "=r")
1330 (minus:SI (plus:SI (match_dup 1)
1332 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1334 "sbcs\\t%0, %1, #%B2"
1335 [(set_attr "conds" "set")]
1338 (define_insn "*subsi3_carryin_shift"
1339 [(set (match_operand:SI 0 "s_register_operand" "=r")
1341 (match_operand:SI 1 "s_register_operand" "r")
1342 (match_operator:SI 2 "shift_operator"
1343 [(match_operand:SI 3 "s_register_operand" "r")
1344 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1345 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1347 "sbc%?\\t%0, %1, %3%S2"
1348 [(set_attr "conds" "use")
1349 (set_attr "predicable" "yes")
1350 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1351 (const_string "arlo_shift")
1352 (const_string "arlo_shift_reg")))]
1355 (define_insn "*rsbsi3_carryin_shift"
1356 [(set (match_operand:SI 0 "s_register_operand" "=r")
1358 (match_operator:SI 2 "shift_operator"
1359 [(match_operand:SI 3 "s_register_operand" "r")
1360 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1361 (match_operand:SI 1 "s_register_operand" "r"))
1362 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1364 "rsc%?\\t%0, %1, %3%S2"
1365 [(set_attr "conds" "use")
1366 (set_attr "predicable" "yes")
1367 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1368 (const_string "arlo_shift")
1369 (const_string "arlo_shift_reg")))]
1372 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1374 [(set (match_operand:SI 0 "s_register_operand" "")
1375 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1376 (match_operand:SI 2 "s_register_operand" ""))
1378 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1380 [(set (match_dup 3) (match_dup 1))
1381 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1383 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1386 (define_expand "addsf3"
1387 [(set (match_operand:SF 0 "s_register_operand" "")
1388 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1389 (match_operand:SF 2 "s_register_operand" "")))]
1390 "TARGET_32BIT && TARGET_HARD_FLOAT"
1394 (define_expand "adddf3"
1395 [(set (match_operand:DF 0 "s_register_operand" "")
1396 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1397 (match_operand:DF 2 "s_register_operand" "")))]
1398 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1402 (define_expand "subdi3"
1404 [(set (match_operand:DI 0 "s_register_operand" "")
1405 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1406 (match_operand:DI 2 "s_register_operand" "")))
1407 (clobber (reg:CC CC_REGNUM))])]
1412 if (!REG_P (operands[1]))
1413 operands[1] = force_reg (DImode, operands[1]);
1414 if (!REG_P (operands[2]))
1415 operands[2] = force_reg (DImode, operands[2]);
1420 (define_insn_and_split "*arm_subdi3"
1421 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1422 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1423 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1424 (clobber (reg:CC CC_REGNUM))]
1425 "TARGET_32BIT && !TARGET_NEON"
1426 "#" ; "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1427 "&& reload_completed"
1428 [(parallel [(set (reg:CC CC_REGNUM)
1429 (compare:CC (match_dup 1) (match_dup 2)))
1430 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1431 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4) (match_dup 5))
1432 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1434 operands[3] = gen_highpart (SImode, operands[0]);
1435 operands[0] = gen_lowpart (SImode, operands[0]);
1436 operands[4] = gen_highpart (SImode, operands[1]);
1437 operands[1] = gen_lowpart (SImode, operands[1]);
1438 operands[5] = gen_highpart (SImode, operands[2]);
1439 operands[2] = gen_lowpart (SImode, operands[2]);
1441 [(set_attr "conds" "clob")
1442 (set_attr "length" "8")]
1445 (define_insn "*thumb_subdi3"
1446 [(set (match_operand:DI 0 "register_operand" "=l")
1447 (minus:DI (match_operand:DI 1 "register_operand" "0")
1448 (match_operand:DI 2 "register_operand" "l")))
1449 (clobber (reg:CC CC_REGNUM))]
1451 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1452 [(set_attr "length" "4")]
1455 (define_insn_and_split "*subdi_di_zesidi"
1456 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1457 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1459 (match_operand:SI 2 "s_register_operand" "r,r"))))
1460 (clobber (reg:CC CC_REGNUM))]
1462 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1463 "&& reload_completed"
1464 [(parallel [(set (reg:CC CC_REGNUM)
1465 (compare:CC (match_dup 1) (match_dup 2)))
1466 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1467 (set (match_dup 3) (minus:SI (plus:SI (match_dup 4) (match_dup 5))
1468 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1470 operands[3] = gen_highpart (SImode, operands[0]);
1471 operands[0] = gen_lowpart (SImode, operands[0]);
1472 operands[4] = gen_highpart (SImode, operands[1]);
1473 operands[1] = gen_lowpart (SImode, operands[1]);
1474 operands[5] = GEN_INT (~0);
1476 [(set_attr "conds" "clob")
1477 (set_attr "length" "8")]
1480 (define_insn_and_split "*subdi_di_sesidi"
1481 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1482 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1484 (match_operand:SI 2 "s_register_operand" "r,r"))))
1485 (clobber (reg:CC CC_REGNUM))]
1487 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1488 "&& reload_completed"
1489 [(parallel [(set (reg:CC CC_REGNUM)
1490 (compare:CC (match_dup 1) (match_dup 2)))
1491 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1492 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4)
1493 (ashiftrt:SI (match_dup 2)
1495 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1497 operands[3] = gen_highpart (SImode, operands[0]);
1498 operands[0] = gen_lowpart (SImode, operands[0]);
1499 operands[4] = gen_highpart (SImode, operands[1]);
1500 operands[1] = gen_lowpart (SImode, operands[1]);
1502 [(set_attr "conds" "clob")
1503 (set_attr "length" "8")]
1506 (define_insn_and_split "*subdi_zesidi_di"
1507 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1508 (minus:DI (zero_extend:DI
1509 (match_operand:SI 2 "s_register_operand" "r,r"))
1510 (match_operand:DI 1 "s_register_operand" "0,r")))
1511 (clobber (reg:CC CC_REGNUM))]
1513 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1515 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, #0"
1516 "&& reload_completed"
1517 [(parallel [(set (reg:CC CC_REGNUM)
1518 (compare:CC (match_dup 2) (match_dup 1)))
1519 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1520 (set (match_dup 3) (minus:SI (minus:SI (const_int 0) (match_dup 4))
1521 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1523 operands[3] = gen_highpart (SImode, operands[0]);
1524 operands[0] = gen_lowpart (SImode, operands[0]);
1525 operands[4] = gen_highpart (SImode, operands[1]);
1526 operands[1] = gen_lowpart (SImode, operands[1]);
1528 [(set_attr "conds" "clob")
1529 (set_attr "length" "8")]
1532 (define_insn_and_split "*subdi_sesidi_di"
1533 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1534 (minus:DI (sign_extend:DI
1535 (match_operand:SI 2 "s_register_operand" "r,r"))
1536 (match_operand:DI 1 "s_register_operand" "0,r")))
1537 (clobber (reg:CC CC_REGNUM))]
1539 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1541 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, %2, asr #31"
1542 "&& reload_completed"
1543 [(parallel [(set (reg:CC CC_REGNUM)
1544 (compare:CC (match_dup 2) (match_dup 1)))
1545 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1546 (set (match_dup 3) (minus:SI (minus:SI
1547 (ashiftrt:SI (match_dup 2)
1550 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1552 operands[3] = gen_highpart (SImode, operands[0]);
1553 operands[0] = gen_lowpart (SImode, operands[0]);
1554 operands[4] = gen_highpart (SImode, operands[1]);
1555 operands[1] = gen_lowpart (SImode, operands[1]);
1557 [(set_attr "conds" "clob")
1558 (set_attr "length" "8")]
1561 (define_insn_and_split "*subdi_zesidi_zesidi"
1562 [(set (match_operand:DI 0 "s_register_operand" "=r")
1563 (minus:DI (zero_extend:DI
1564 (match_operand:SI 1 "s_register_operand" "r"))
1566 (match_operand:SI 2 "s_register_operand" "r"))))
1567 (clobber (reg:CC CC_REGNUM))]
1569 "#" ; "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1570 "&& reload_completed"
1571 [(parallel [(set (reg:CC CC_REGNUM)
1572 (compare:CC (match_dup 1) (match_dup 2)))
1573 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1574 (set (match_dup 3) (minus:SI (minus:SI (match_dup 1) (match_dup 1))
1575 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1577 operands[3] = gen_highpart (SImode, operands[0]);
1578 operands[0] = gen_lowpart (SImode, operands[0]);
1580 [(set_attr "conds" "clob")
1581 (set_attr "length" "8")]
1584 (define_expand "subsi3"
1585 [(set (match_operand:SI 0 "s_register_operand" "")
1586 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1587 (match_operand:SI 2 "s_register_operand" "")))]
1590 if (CONST_INT_P (operands[1]))
1594 arm_split_constant (MINUS, SImode, NULL_RTX,
1595 INTVAL (operands[1]), operands[0],
1596 operands[2], optimize && can_create_pseudo_p ());
1599 else /* TARGET_THUMB1 */
1600 operands[1] = force_reg (SImode, operands[1]);
1605 (define_insn "thumb1_subsi3_insn"
1606 [(set (match_operand:SI 0 "register_operand" "=l")
1607 (minus:SI (match_operand:SI 1 "register_operand" "l")
1608 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1611 [(set_attr "length" "2")
1612 (set_attr "conds" "set")])
1614 ; ??? Check Thumb-2 split length
1615 (define_insn_and_split "*arm_subsi3_insn"
1616 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r ,r,r,rk,r")
1617 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,rI,r,r,k ,?n")
1618 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r ,I,r,r ,r")))]
1630 "&& (CONST_INT_P (operands[1])
1631 && !const_ok_for_arm (INTVAL (operands[1])))"
1632 [(clobber (const_int 0))]
1634 arm_split_constant (MINUS, SImode, curr_insn,
1635 INTVAL (operands[1]), operands[0], operands[2], 0);
1638 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1639 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1640 (set_attr "predicable" "yes")
1641 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1642 (set_attr "type" "*,*,*,*,arlo_imm,arlo_imm,*,*,arlo_imm")]
1646 [(match_scratch:SI 3 "r")
1647 (set (match_operand:SI 0 "arm_general_register_operand" "")
1648 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1649 (match_operand:SI 2 "arm_general_register_operand" "")))]
1651 && !const_ok_for_arm (INTVAL (operands[1]))
1652 && const_ok_for_arm (~INTVAL (operands[1]))"
1653 [(set (match_dup 3) (match_dup 1))
1654 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1658 (define_insn "*subsi3_compare0"
1659 [(set (reg:CC_NOOV CC_REGNUM)
1661 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1662 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1664 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1665 (minus:SI (match_dup 1) (match_dup 2)))]
1671 [(set_attr "conds" "set")
1672 (set_attr "type" "arlo_imm,*,*")]
1675 (define_insn "subsi3_compare"
1676 [(set (reg:CC CC_REGNUM)
1677 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1678 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1679 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1680 (minus:SI (match_dup 1) (match_dup 2)))]
1686 [(set_attr "conds" "set")
1687 (set_attr "type" "arlo_imm,*,*")]
1690 (define_expand "subsf3"
1691 [(set (match_operand:SF 0 "s_register_operand" "")
1692 (minus:SF (match_operand:SF 1 "s_register_operand" "")
1693 (match_operand:SF 2 "s_register_operand" "")))]
1694 "TARGET_32BIT && TARGET_HARD_FLOAT"
1698 (define_expand "subdf3"
1699 [(set (match_operand:DF 0 "s_register_operand" "")
1700 (minus:DF (match_operand:DF 1 "s_register_operand" "")
1701 (match_operand:DF 2 "s_register_operand" "")))]
1702 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1707 ;; Multiplication insns
1709 (define_expand "mulhi3"
1710 [(set (match_operand:HI 0 "s_register_operand" "")
1711 (mult:HI (match_operand:HI 1 "s_register_operand" "")
1712 (match_operand:HI 2 "s_register_operand" "")))]
1713 "TARGET_DSP_MULTIPLY"
1716 rtx result = gen_reg_rtx (SImode);
1717 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1718 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1723 (define_expand "mulsi3"
1724 [(set (match_operand:SI 0 "s_register_operand" "")
1725 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1726 (match_operand:SI 1 "s_register_operand" "")))]
1731 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1732 (define_insn "*arm_mulsi3"
1733 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1734 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1735 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1736 "TARGET_32BIT && !arm_arch6"
1737 "mul%?\\t%0, %2, %1"
1738 [(set_attr "type" "mul")
1739 (set_attr "predicable" "yes")]
1742 (define_insn "*arm_mulsi3_v6"
1743 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
1744 (mult:SI (match_operand:SI 1 "s_register_operand" "0,l,r")
1745 (match_operand:SI 2 "s_register_operand" "l,0,r")))]
1746 "TARGET_32BIT && arm_arch6"
1747 "mul%?\\t%0, %1, %2"
1748 [(set_attr "type" "mul")
1749 (set_attr "predicable" "yes")
1750 (set_attr "arch" "t2,t2,*")
1751 (set_attr "length" "4")
1752 (set_attr "predicable_short_it" "yes,yes,no")]
1755 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1756 ; 1 and 2; are the same, because reload will make operand 0 match
1757 ; operand 1 without realizing that this conflicts with operand 2. We fix
1758 ; this by adding another alternative to match this case, and then `reload'
1759 ; it ourselves. This alternative must come first.
1760 (define_insn "*thumb_mulsi3"
1761 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1762 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1763 (match_operand:SI 2 "register_operand" "l,l,l")))]
1764 "TARGET_THUMB1 && !arm_arch6"
1766 if (which_alternative < 2)
1767 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1769 return \"mul\\t%0, %2\";
1771 [(set_attr "length" "4,4,2")
1772 (set_attr "type" "muls")]
1775 (define_insn "*thumb_mulsi3_v6"
1776 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1777 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1778 (match_operand:SI 2 "register_operand" "l,0,0")))]
1779 "TARGET_THUMB1 && arm_arch6"
1784 [(set_attr "length" "2")
1785 (set_attr "type" "muls")]
1788 (define_insn "*mulsi3_compare0"
1789 [(set (reg:CC_NOOV CC_REGNUM)
1790 (compare:CC_NOOV (mult:SI
1791 (match_operand:SI 2 "s_register_operand" "r,r")
1792 (match_operand:SI 1 "s_register_operand" "%0,r"))
1794 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1795 (mult:SI (match_dup 2) (match_dup 1)))]
1796 "TARGET_ARM && !arm_arch6"
1797 "mul%.\\t%0, %2, %1"
1798 [(set_attr "conds" "set")
1799 (set_attr "type" "muls")]
1802 (define_insn "*mulsi3_compare0_v6"
1803 [(set (reg:CC_NOOV CC_REGNUM)
1804 (compare:CC_NOOV (mult:SI
1805 (match_operand:SI 2 "s_register_operand" "r")
1806 (match_operand:SI 1 "s_register_operand" "r"))
1808 (set (match_operand:SI 0 "s_register_operand" "=r")
1809 (mult:SI (match_dup 2) (match_dup 1)))]
1810 "TARGET_ARM && arm_arch6 && optimize_size"
1811 "mul%.\\t%0, %2, %1"
1812 [(set_attr "conds" "set")
1813 (set_attr "type" "muls")]
1816 (define_insn "*mulsi_compare0_scratch"
1817 [(set (reg:CC_NOOV CC_REGNUM)
1818 (compare:CC_NOOV (mult:SI
1819 (match_operand:SI 2 "s_register_operand" "r,r")
1820 (match_operand:SI 1 "s_register_operand" "%0,r"))
1822 (clobber (match_scratch:SI 0 "=&r,&r"))]
1823 "TARGET_ARM && !arm_arch6"
1824 "mul%.\\t%0, %2, %1"
1825 [(set_attr "conds" "set")
1826 (set_attr "type" "muls")]
1829 (define_insn "*mulsi_compare0_scratch_v6"
1830 [(set (reg:CC_NOOV CC_REGNUM)
1831 (compare:CC_NOOV (mult:SI
1832 (match_operand:SI 2 "s_register_operand" "r")
1833 (match_operand:SI 1 "s_register_operand" "r"))
1835 (clobber (match_scratch:SI 0 "=r"))]
1836 "TARGET_ARM && arm_arch6 && optimize_size"
1837 "mul%.\\t%0, %2, %1"
1838 [(set_attr "conds" "set")
1839 (set_attr "type" "muls")]
1842 ;; Unnamed templates to match MLA instruction.
1844 (define_insn "*mulsi3addsi"
1845 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1847 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1848 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1849 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1850 "TARGET_32BIT && !arm_arch6"
1851 "mla%?\\t%0, %2, %1, %3"
1852 [(set_attr "type" "mla")
1853 (set_attr "predicable" "yes")]
1856 (define_insn "*mulsi3addsi_v6"
1857 [(set (match_operand:SI 0 "s_register_operand" "=r")
1859 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1860 (match_operand:SI 1 "s_register_operand" "r"))
1861 (match_operand:SI 3 "s_register_operand" "r")))]
1862 "TARGET_32BIT && arm_arch6"
1863 "mla%?\\t%0, %2, %1, %3"
1864 [(set_attr "type" "mla")
1865 (set_attr "predicable" "yes")
1866 (set_attr "predicable_short_it" "no")]
1869 (define_insn "*mulsi3addsi_compare0"
1870 [(set (reg:CC_NOOV CC_REGNUM)
1873 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1874 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1875 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1877 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1878 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1880 "TARGET_ARM && arm_arch6"
1881 "mla%.\\t%0, %2, %1, %3"
1882 [(set_attr "conds" "set")
1883 (set_attr "type" "mlas")]
1886 (define_insn "*mulsi3addsi_compare0_v6"
1887 [(set (reg:CC_NOOV CC_REGNUM)
1890 (match_operand:SI 2 "s_register_operand" "r")
1891 (match_operand:SI 1 "s_register_operand" "r"))
1892 (match_operand:SI 3 "s_register_operand" "r"))
1894 (set (match_operand:SI 0 "s_register_operand" "=r")
1895 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1897 "TARGET_ARM && arm_arch6 && optimize_size"
1898 "mla%.\\t%0, %2, %1, %3"
1899 [(set_attr "conds" "set")
1900 (set_attr "type" "mlas")]
1903 (define_insn "*mulsi3addsi_compare0_scratch"
1904 [(set (reg:CC_NOOV CC_REGNUM)
1907 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1908 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1909 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1911 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1912 "TARGET_ARM && !arm_arch6"
1913 "mla%.\\t%0, %2, %1, %3"
1914 [(set_attr "conds" "set")
1915 (set_attr "type" "mlas")]
1918 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1919 [(set (reg:CC_NOOV CC_REGNUM)
1922 (match_operand:SI 2 "s_register_operand" "r")
1923 (match_operand:SI 1 "s_register_operand" "r"))
1924 (match_operand:SI 3 "s_register_operand" "r"))
1926 (clobber (match_scratch:SI 0 "=r"))]
1927 "TARGET_ARM && arm_arch6 && optimize_size"
1928 "mla%.\\t%0, %2, %1, %3"
1929 [(set_attr "conds" "set")
1930 (set_attr "type" "mlas")]
1933 (define_insn "*mulsi3subsi"
1934 [(set (match_operand:SI 0 "s_register_operand" "=r")
1936 (match_operand:SI 3 "s_register_operand" "r")
1937 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1938 (match_operand:SI 1 "s_register_operand" "r"))))]
1939 "TARGET_32BIT && arm_arch_thumb2"
1940 "mls%?\\t%0, %2, %1, %3"
1941 [(set_attr "type" "mla")
1942 (set_attr "predicable" "yes")
1943 (set_attr "predicable_short_it" "no")]
1946 (define_expand "maddsidi4"
1947 [(set (match_operand:DI 0 "s_register_operand" "")
1950 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1951 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1952 (match_operand:DI 3 "s_register_operand" "")))]
1953 "TARGET_32BIT && arm_arch3m"
1956 (define_insn "*mulsidi3adddi"
1957 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1960 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1961 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1962 (match_operand:DI 1 "s_register_operand" "0")))]
1963 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1964 "smlal%?\\t%Q0, %R0, %3, %2"
1965 [(set_attr "type" "smlal")
1966 (set_attr "predicable" "yes")]
1969 (define_insn "*mulsidi3adddi_v6"
1970 [(set (match_operand:DI 0 "s_register_operand" "=r")
1973 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1974 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1975 (match_operand:DI 1 "s_register_operand" "0")))]
1976 "TARGET_32BIT && arm_arch6"
1977 "smlal%?\\t%Q0, %R0, %3, %2"
1978 [(set_attr "type" "smlal")
1979 (set_attr "predicable" "yes")
1980 (set_attr "predicable_short_it" "no")]
1983 ;; 32x32->64 widening multiply.
1984 ;; As with mulsi3, the only difference between the v3-5 and v6+
1985 ;; versions of these patterns is the requirement that the output not
1986 ;; overlap the inputs, but that still means we have to have a named
1987 ;; expander and two different starred insns.
1989 (define_expand "mulsidi3"
1990 [(set (match_operand:DI 0 "s_register_operand" "")
1992 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1993 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1994 "TARGET_32BIT && arm_arch3m"
1998 (define_insn "*mulsidi3_nov6"
1999 [(set (match_operand:DI 0 "s_register_operand" "=&r")
2001 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
2002 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
2003 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2004 "smull%?\\t%Q0, %R0, %1, %2"
2005 [(set_attr "type" "smull")
2006 (set_attr "predicable" "yes")]
2009 (define_insn "*mulsidi3_v6"
2010 [(set (match_operand:DI 0 "s_register_operand" "=r")
2012 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
2013 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
2014 "TARGET_32BIT && arm_arch6"
2015 "smull%?\\t%Q0, %R0, %1, %2"
2016 [(set_attr "type" "smull")
2017 (set_attr "predicable" "yes")
2018 (set_attr "predicable_short_it" "no")]
2021 (define_expand "umulsidi3"
2022 [(set (match_operand:DI 0 "s_register_operand" "")
2024 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2025 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
2026 "TARGET_32BIT && arm_arch3m"
2030 (define_insn "*umulsidi3_nov6"
2031 [(set (match_operand:DI 0 "s_register_operand" "=&r")
2033 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
2034 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
2035 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2036 "umull%?\\t%Q0, %R0, %1, %2"
2037 [(set_attr "type" "umull")
2038 (set_attr "predicable" "yes")]
2041 (define_insn "*umulsidi3_v6"
2042 [(set (match_operand:DI 0 "s_register_operand" "=r")
2044 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
2045 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
2046 "TARGET_32BIT && arm_arch6"
2047 "umull%?\\t%Q0, %R0, %1, %2"
2048 [(set_attr "type" "umull")
2049 (set_attr "predicable" "yes")
2050 (set_attr "predicable_short_it" "no")]
2053 (define_expand "umaddsidi4"
2054 [(set (match_operand:DI 0 "s_register_operand" "")
2057 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2058 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
2059 (match_operand:DI 3 "s_register_operand" "")))]
2060 "TARGET_32BIT && arm_arch3m"
2063 (define_insn "*umulsidi3adddi"
2064 [(set (match_operand:DI 0 "s_register_operand" "=&r")
2067 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
2068 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
2069 (match_operand:DI 1 "s_register_operand" "0")))]
2070 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2071 "umlal%?\\t%Q0, %R0, %3, %2"
2072 [(set_attr "type" "umlal")
2073 (set_attr "predicable" "yes")]
2076 (define_insn "*umulsidi3adddi_v6"
2077 [(set (match_operand:DI 0 "s_register_operand" "=r")
2080 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
2081 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
2082 (match_operand:DI 1 "s_register_operand" "0")))]
2083 "TARGET_32BIT && arm_arch6"
2084 "umlal%?\\t%Q0, %R0, %3, %2"
2085 [(set_attr "type" "umlal")
2086 (set_attr "predicable" "yes")
2087 (set_attr "predicable_short_it" "no")]
2090 (define_expand "smulsi3_highpart"
2092 [(set (match_operand:SI 0 "s_register_operand" "")
2096 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2097 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
2099 (clobber (match_scratch:SI 3 ""))])]
2100 "TARGET_32BIT && arm_arch3m"
2104 (define_insn "*smulsi3_highpart_nov6"
2105 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2109 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
2110 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
2112 (clobber (match_scratch:SI 3 "=&r,&r"))]
2113 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2114 "smull%?\\t%3, %0, %2, %1"
2115 [(set_attr "type" "smull")
2116 (set_attr "predicable" "yes")]
2119 (define_insn "*smulsi3_highpart_v6"
2120 [(set (match_operand:SI 0 "s_register_operand" "=r")
2124 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
2125 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
2127 (clobber (match_scratch:SI 3 "=r"))]
2128 "TARGET_32BIT && arm_arch6"
2129 "smull%?\\t%3, %0, %2, %1"
2130 [(set_attr "type" "smull")
2131 (set_attr "predicable" "yes")
2132 (set_attr "predicable_short_it" "no")]
2135 (define_expand "umulsi3_highpart"
2137 [(set (match_operand:SI 0 "s_register_operand" "")
2141 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2142 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
2144 (clobber (match_scratch:SI 3 ""))])]
2145 "TARGET_32BIT && arm_arch3m"
2149 (define_insn "*umulsi3_highpart_nov6"
2150 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2154 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
2155 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
2157 (clobber (match_scratch:SI 3 "=&r,&r"))]
2158 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2159 "umull%?\\t%3, %0, %2, %1"
2160 [(set_attr "type" "umull")
2161 (set_attr "predicable" "yes")]
2164 (define_insn "*umulsi3_highpart_v6"
2165 [(set (match_operand:SI 0 "s_register_operand" "=r")
2169 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
2170 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
2172 (clobber (match_scratch:SI 3 "=r"))]
2173 "TARGET_32BIT && arm_arch6"
2174 "umull%?\\t%3, %0, %2, %1"
2175 [(set_attr "type" "umull")
2176 (set_attr "predicable" "yes")
2177 (set_attr "predicable_short_it" "no")]
2180 (define_insn "mulhisi3"
2181 [(set (match_operand:SI 0 "s_register_operand" "=r")
2182 (mult:SI (sign_extend:SI
2183 (match_operand:HI 1 "s_register_operand" "%r"))
2185 (match_operand:HI 2 "s_register_operand" "r"))))]
2186 "TARGET_DSP_MULTIPLY"
2187 "smulbb%?\\t%0, %1, %2"
2188 [(set_attr "type" "smulxy")
2189 (set_attr "predicable" "yes")]
2192 (define_insn "*mulhisi3tb"
2193 [(set (match_operand:SI 0 "s_register_operand" "=r")
2194 (mult:SI (ashiftrt:SI
2195 (match_operand:SI 1 "s_register_operand" "r")
2198 (match_operand:HI 2 "s_register_operand" "r"))))]
2199 "TARGET_DSP_MULTIPLY"
2200 "smultb%?\\t%0, %1, %2"
2201 [(set_attr "type" "smulxy")
2202 (set_attr "predicable" "yes")
2203 (set_attr "predicable_short_it" "no")]
2206 (define_insn "*mulhisi3bt"
2207 [(set (match_operand:SI 0 "s_register_operand" "=r")
2208 (mult:SI (sign_extend:SI
2209 (match_operand:HI 1 "s_register_operand" "r"))
2211 (match_operand:SI 2 "s_register_operand" "r")
2213 "TARGET_DSP_MULTIPLY"
2214 "smulbt%?\\t%0, %1, %2"
2215 [(set_attr "type" "smulxy")
2216 (set_attr "predicable" "yes")
2217 (set_attr "predicable_short_it" "no")]
2220 (define_insn "*mulhisi3tt"
2221 [(set (match_operand:SI 0 "s_register_operand" "=r")
2222 (mult:SI (ashiftrt:SI
2223 (match_operand:SI 1 "s_register_operand" "r")
2226 (match_operand:SI 2 "s_register_operand" "r")
2228 "TARGET_DSP_MULTIPLY"
2229 "smultt%?\\t%0, %1, %2"
2230 [(set_attr "type" "smulxy")
2231 (set_attr "predicable" "yes")
2232 (set_attr "predicable_short_it" "no")]
2235 (define_insn "maddhisi4"
2236 [(set (match_operand:SI 0 "s_register_operand" "=r")
2237 (plus:SI (mult:SI (sign_extend:SI
2238 (match_operand:HI 1 "s_register_operand" "r"))
2240 (match_operand:HI 2 "s_register_operand" "r")))
2241 (match_operand:SI 3 "s_register_operand" "r")))]
2242 "TARGET_DSP_MULTIPLY"
2243 "smlabb%?\\t%0, %1, %2, %3"
2244 [(set_attr "type" "smlaxy")
2245 (set_attr "predicable" "yes")
2246 (set_attr "predicable_short_it" "no")]
2249 ;; Note: there is no maddhisi4ibt because this one is canonical form
2250 (define_insn "*maddhisi4tb"
2251 [(set (match_operand:SI 0 "s_register_operand" "=r")
2252 (plus:SI (mult:SI (ashiftrt:SI
2253 (match_operand:SI 1 "s_register_operand" "r")
2256 (match_operand:HI 2 "s_register_operand" "r")))
2257 (match_operand:SI 3 "s_register_operand" "r")))]
2258 "TARGET_DSP_MULTIPLY"
2259 "smlatb%?\\t%0, %1, %2, %3"
2260 [(set_attr "type" "smlaxy")
2261 (set_attr "predicable" "yes")
2262 (set_attr "predicable_short_it" "no")]
2265 (define_insn "*maddhisi4tt"
2266 [(set (match_operand:SI 0 "s_register_operand" "=r")
2267 (plus:SI (mult:SI (ashiftrt:SI
2268 (match_operand:SI 1 "s_register_operand" "r")
2271 (match_operand:SI 2 "s_register_operand" "r")
2273 (match_operand:SI 3 "s_register_operand" "r")))]
2274 "TARGET_DSP_MULTIPLY"
2275 "smlatt%?\\t%0, %1, %2, %3"
2276 [(set_attr "type" "smlaxy")
2277 (set_attr "predicable" "yes")
2278 (set_attr "predicable_short_it" "no")]
2281 (define_insn "maddhidi4"
2282 [(set (match_operand:DI 0 "s_register_operand" "=r")
2284 (mult:DI (sign_extend:DI
2285 (match_operand:HI 1 "s_register_operand" "r"))
2287 (match_operand:HI 2 "s_register_operand" "r")))
2288 (match_operand:DI 3 "s_register_operand" "0")))]
2289 "TARGET_DSP_MULTIPLY"
2290 "smlalbb%?\\t%Q0, %R0, %1, %2"
2291 [(set_attr "type" "smlalxy")
2292 (set_attr "predicable" "yes")
2293 (set_attr "predicable_short_it" "no")])
2295 ;; Note: there is no maddhidi4ibt because this one is canonical form
2296 (define_insn "*maddhidi4tb"
2297 [(set (match_operand:DI 0 "s_register_operand" "=r")
2299 (mult:DI (sign_extend:DI
2301 (match_operand:SI 1 "s_register_operand" "r")
2304 (match_operand:HI 2 "s_register_operand" "r")))
2305 (match_operand:DI 3 "s_register_operand" "0")))]
2306 "TARGET_DSP_MULTIPLY"
2307 "smlaltb%?\\t%Q0, %R0, %1, %2"
2308 [(set_attr "type" "smlalxy")
2309 (set_attr "predicable" "yes")
2310 (set_attr "predicable_short_it" "no")])
2312 (define_insn "*maddhidi4tt"
2313 [(set (match_operand:DI 0 "s_register_operand" "=r")
2315 (mult:DI (sign_extend:DI
2317 (match_operand:SI 1 "s_register_operand" "r")
2321 (match_operand:SI 2 "s_register_operand" "r")
2323 (match_operand:DI 3 "s_register_operand" "0")))]
2324 "TARGET_DSP_MULTIPLY"
2325 "smlaltt%?\\t%Q0, %R0, %1, %2"
2326 [(set_attr "type" "smlalxy")
2327 (set_attr "predicable" "yes")
2328 (set_attr "predicable_short_it" "no")])
2330 (define_expand "mulsf3"
2331 [(set (match_operand:SF 0 "s_register_operand" "")
2332 (mult:SF (match_operand:SF 1 "s_register_operand" "")
2333 (match_operand:SF 2 "s_register_operand" "")))]
2334 "TARGET_32BIT && TARGET_HARD_FLOAT"
2338 (define_expand "muldf3"
2339 [(set (match_operand:DF 0 "s_register_operand" "")
2340 (mult:DF (match_operand:DF 1 "s_register_operand" "")
2341 (match_operand:DF 2 "s_register_operand" "")))]
2342 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2348 (define_expand "divsf3"
2349 [(set (match_operand:SF 0 "s_register_operand" "")
2350 (div:SF (match_operand:SF 1 "s_register_operand" "")
2351 (match_operand:SF 2 "s_register_operand" "")))]
2352 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
2355 (define_expand "divdf3"
2356 [(set (match_operand:DF 0 "s_register_operand" "")
2357 (div:DF (match_operand:DF 1 "s_register_operand" "")
2358 (match_operand:DF 2 "s_register_operand" "")))]
2359 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2362 ;; Boolean and,ior,xor insns
2364 ;; Split up double word logical operations
2366 ;; Split up simple DImode logical operations. Simply perform the logical
2367 ;; operation on the upper and lower halves of the registers.
2369 [(set (match_operand:DI 0 "s_register_operand" "")
2370 (match_operator:DI 6 "logical_binary_operator"
2371 [(match_operand:DI 1 "s_register_operand" "")
2372 (match_operand:DI 2 "s_register_operand" "")]))]
2373 "TARGET_32BIT && reload_completed
2374 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2375 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2376 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2377 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2380 operands[3] = gen_highpart (SImode, operands[0]);
2381 operands[0] = gen_lowpart (SImode, operands[0]);
2382 operands[4] = gen_highpart (SImode, operands[1]);
2383 operands[1] = gen_lowpart (SImode, operands[1]);
2384 operands[5] = gen_highpart (SImode, operands[2]);
2385 operands[2] = gen_lowpart (SImode, operands[2]);
2390 [(set (match_operand:DI 0 "s_register_operand" "")
2391 (match_operator:DI 6 "logical_binary_operator"
2392 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2393 (match_operand:DI 1 "s_register_operand" "")]))]
2394 "TARGET_32BIT && reload_completed"
2395 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2396 (set (match_dup 3) (match_op_dup:SI 6
2397 [(ashiftrt:SI (match_dup 2) (const_int 31))
2401 operands[3] = gen_highpart (SImode, operands[0]);
2402 operands[0] = gen_lowpart (SImode, operands[0]);
2403 operands[4] = gen_highpart (SImode, operands[1]);
2404 operands[1] = gen_lowpart (SImode, operands[1]);
2405 operands[5] = gen_highpart (SImode, operands[2]);
2406 operands[2] = gen_lowpart (SImode, operands[2]);
2410 ;; The zero extend of operand 2 means we can just copy the high part of
2411 ;; operand1 into operand0.
2413 [(set (match_operand:DI 0 "s_register_operand" "")
2415 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2416 (match_operand:DI 1 "s_register_operand" "")))]
2417 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2418 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2419 (set (match_dup 3) (match_dup 4))]
2422 operands[4] = gen_highpart (SImode, operands[1]);
2423 operands[3] = gen_highpart (SImode, operands[0]);
2424 operands[0] = gen_lowpart (SImode, operands[0]);
2425 operands[1] = gen_lowpart (SImode, operands[1]);
2429 ;; The zero extend of operand 2 means we can just copy the high part of
2430 ;; operand1 into operand0.
2432 [(set (match_operand:DI 0 "s_register_operand" "")
2434 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2435 (match_operand:DI 1 "s_register_operand" "")))]
2436 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2437 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2438 (set (match_dup 3) (match_dup 4))]
2441 operands[4] = gen_highpart (SImode, operands[1]);
2442 operands[3] = gen_highpart (SImode, operands[0]);
2443 operands[0] = gen_lowpart (SImode, operands[0]);
2444 operands[1] = gen_lowpart (SImode, operands[1]);
2448 (define_expand "anddi3"
2449 [(set (match_operand:DI 0 "s_register_operand" "")
2450 (and:DI (match_operand:DI 1 "s_register_operand" "")
2451 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2456 (define_insn_and_split "*anddi3_insn"
2457 [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
2458 (and:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
2459 (match_operand:DI 2 "arm_anddi_operand_neon" "w ,DL,r ,r ,De,De,w ,DL")))]
2460 "TARGET_32BIT && !TARGET_IWMMXT"
2462 switch (which_alternative)
2464 case 0: /* fall through */
2465 case 6: return "vand\t%P0, %P1, %P2";
2466 case 1: /* fall through */
2467 case 7: return neon_output_logic_immediate ("vand", &operands[2],
2468 DImode, 1, VALID_NEON_QREG_MODE (DImode));
2472 case 5: /* fall through */
2474 default: gcc_unreachable ();
2477 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
2478 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
2479 [(set (match_dup 3) (match_dup 4))
2480 (set (match_dup 5) (match_dup 6))]
2483 operands[3] = gen_lowpart (SImode, operands[0]);
2484 operands[5] = gen_highpart (SImode, operands[0]);
2486 operands[4] = simplify_gen_binary (AND, SImode,
2487 gen_lowpart (SImode, operands[1]),
2488 gen_lowpart (SImode, operands[2]));
2489 operands[6] = simplify_gen_binary (AND, SImode,
2490 gen_highpart (SImode, operands[1]),
2491 gen_highpart_mode (SImode, DImode, operands[2]));
2494 [(set_attr "neon_type" "neon_int_1,neon_int_1,*,*,*,*,neon_int_1,neon_int_1")
2495 (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,
2496 avoid_neon_for_64bits,avoid_neon_for_64bits")
2497 (set_attr "length" "*,*,8,8,8,8,*,*")
2501 (define_insn_and_split "*anddi_zesidi_di"
2502 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2503 (and:DI (zero_extend:DI
2504 (match_operand:SI 2 "s_register_operand" "r,r"))
2505 (match_operand:DI 1 "s_register_operand" "0,r")))]
2508 "TARGET_32BIT && reload_completed"
2509 ; The zero extend of operand 2 clears the high word of the output
2511 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2512 (set (match_dup 3) (const_int 0))]
2515 operands[3] = gen_highpart (SImode, operands[0]);
2516 operands[0] = gen_lowpart (SImode, operands[0]);
2517 operands[1] = gen_lowpart (SImode, operands[1]);
2519 [(set_attr "length" "8")]
2522 (define_insn "*anddi_sesdi_di"
2523 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2524 (and:DI (sign_extend:DI
2525 (match_operand:SI 2 "s_register_operand" "r,r"))
2526 (match_operand:DI 1 "s_register_operand" "0,r")))]
2529 [(set_attr "length" "8")]
2532 (define_expand "andsi3"
2533 [(set (match_operand:SI 0 "s_register_operand" "")
2534 (and:SI (match_operand:SI 1 "s_register_operand" "")
2535 (match_operand:SI 2 "reg_or_int_operand" "")))]
2540 if (CONST_INT_P (operands[2]))
2542 if (INTVAL (operands[2]) == 255 && arm_arch6)
2544 operands[1] = convert_to_mode (QImode, operands[1], 1);
2545 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2549 arm_split_constant (AND, SImode, NULL_RTX,
2550 INTVAL (operands[2]), operands[0],
2552 optimize && can_create_pseudo_p ());
2557 else /* TARGET_THUMB1 */
2559 if (!CONST_INT_P (operands[2]))
2561 rtx tmp = force_reg (SImode, operands[2]);
2562 if (rtx_equal_p (operands[0], operands[1]))
2566 operands[2] = operands[1];
2574 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2576 operands[2] = force_reg (SImode,
2577 GEN_INT (~INTVAL (operands[2])));
2579 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2584 for (i = 9; i <= 31; i++)
2586 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2588 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2592 else if ((((HOST_WIDE_INT) 1) << i) - 1
2593 == ~INTVAL (operands[2]))
2595 rtx shift = GEN_INT (i);
2596 rtx reg = gen_reg_rtx (SImode);
2598 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2599 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2605 operands[2] = force_reg (SImode, operands[2]);
2611 ; ??? Check split length for Thumb-2
2612 (define_insn_and_split "*arm_andsi3_insn"
2613 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2614 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2615 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2620 bic%?\\t%0, %1, #%B2
2624 && CONST_INT_P (operands[2])
2625 && !(const_ok_for_arm (INTVAL (operands[2]))
2626 || const_ok_for_arm (~INTVAL (operands[2])))"
2627 [(clobber (const_int 0))]
2629 arm_split_constant (AND, SImode, curr_insn,
2630 INTVAL (operands[2]), operands[0], operands[1], 0);
2633 [(set_attr "length" "4,4,4,4,16")
2634 (set_attr "predicable" "yes")
2635 (set_attr "predicable_short_it" "no,yes,no,no,no")
2637 "arlo_imm,arlo_imm,*,*,arlo_imm")]
2640 (define_insn "*thumb1_andsi3_insn"
2641 [(set (match_operand:SI 0 "register_operand" "=l")
2642 (and:SI (match_operand:SI 1 "register_operand" "%0")
2643 (match_operand:SI 2 "register_operand" "l")))]
2646 [(set_attr "length" "2")
2647 (set_attr "type" "arlo_imm")
2648 (set_attr "conds" "set")])
2650 (define_insn "*andsi3_compare0"
2651 [(set (reg:CC_NOOV CC_REGNUM)
2653 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2654 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2656 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2657 (and:SI (match_dup 1) (match_dup 2)))]
2661 bic%.\\t%0, %1, #%B2
2663 [(set_attr "conds" "set")
2664 (set_attr "type" "arlo_imm,arlo_imm,*")]
2667 (define_insn "*andsi3_compare0_scratch"
2668 [(set (reg:CC_NOOV CC_REGNUM)
2670 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2671 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2673 (clobber (match_scratch:SI 2 "=X,r,X"))]
2677 bic%.\\t%2, %0, #%B1
2679 [(set_attr "conds" "set")
2680 (set_attr "type" "arlo_imm,arlo_imm,*")]
2683 (define_insn "*zeroextractsi_compare0_scratch"
2684 [(set (reg:CC_NOOV CC_REGNUM)
2685 (compare:CC_NOOV (zero_extract:SI
2686 (match_operand:SI 0 "s_register_operand" "r")
2687 (match_operand 1 "const_int_operand" "n")
2688 (match_operand 2 "const_int_operand" "n"))
2691 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2692 && INTVAL (operands[1]) > 0
2693 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2694 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2696 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2697 << INTVAL (operands[2]));
2698 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2701 [(set_attr "conds" "set")
2702 (set_attr "predicable" "yes")
2703 (set_attr "predicable_short_it" "no")
2704 (set_attr "type" "arlo_imm")]
2707 (define_insn_and_split "*ne_zeroextractsi"
2708 [(set (match_operand:SI 0 "s_register_operand" "=r")
2709 (ne:SI (zero_extract:SI
2710 (match_operand:SI 1 "s_register_operand" "r")
2711 (match_operand:SI 2 "const_int_operand" "n")
2712 (match_operand:SI 3 "const_int_operand" "n"))
2714 (clobber (reg:CC CC_REGNUM))]
2716 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2717 && INTVAL (operands[2]) > 0
2718 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2719 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2722 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2723 && INTVAL (operands[2]) > 0
2724 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2725 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2726 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2727 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2729 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2731 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2732 (match_dup 0) (const_int 1)))]
2734 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2735 << INTVAL (operands[3]));
2737 [(set_attr "conds" "clob")
2738 (set (attr "length")
2739 (if_then_else (eq_attr "is_thumb" "yes")
2744 (define_insn_and_split "*ne_zeroextractsi_shifted"
2745 [(set (match_operand:SI 0 "s_register_operand" "=r")
2746 (ne:SI (zero_extract:SI
2747 (match_operand:SI 1 "s_register_operand" "r")
2748 (match_operand:SI 2 "const_int_operand" "n")
2751 (clobber (reg:CC CC_REGNUM))]
2755 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2756 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2758 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2760 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2761 (match_dup 0) (const_int 1)))]
2763 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2765 [(set_attr "conds" "clob")
2766 (set_attr "length" "8")]
2769 (define_insn_and_split "*ite_ne_zeroextractsi"
2770 [(set (match_operand:SI 0 "s_register_operand" "=r")
2771 (if_then_else:SI (ne (zero_extract:SI
2772 (match_operand:SI 1 "s_register_operand" "r")
2773 (match_operand:SI 2 "const_int_operand" "n")
2774 (match_operand:SI 3 "const_int_operand" "n"))
2776 (match_operand:SI 4 "arm_not_operand" "rIK")
2778 (clobber (reg:CC CC_REGNUM))]
2780 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2781 && INTVAL (operands[2]) > 0
2782 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2783 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2784 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2787 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2788 && INTVAL (operands[2]) > 0
2789 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2790 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2791 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2792 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2793 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2795 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2797 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2798 (match_dup 0) (match_dup 4)))]
2800 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2801 << INTVAL (operands[3]));
2803 [(set_attr "conds" "clob")
2804 (set_attr "length" "8")]
2807 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2808 [(set (match_operand:SI 0 "s_register_operand" "=r")
2809 (if_then_else:SI (ne (zero_extract:SI
2810 (match_operand:SI 1 "s_register_operand" "r")
2811 (match_operand:SI 2 "const_int_operand" "n")
2814 (match_operand:SI 3 "arm_not_operand" "rIK")
2816 (clobber (reg:CC CC_REGNUM))]
2817 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2819 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2820 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2821 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2823 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2825 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2826 (match_dup 0) (match_dup 3)))]
2828 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2830 [(set_attr "conds" "clob")
2831 (set_attr "length" "8")]
2835 [(set (match_operand:SI 0 "s_register_operand" "")
2836 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2837 (match_operand:SI 2 "const_int_operand" "")
2838 (match_operand:SI 3 "const_int_operand" "")))
2839 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2841 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2842 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2844 HOST_WIDE_INT temp = INTVAL (operands[2]);
2846 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2847 operands[3] = GEN_INT (32 - temp);
2851 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2853 [(set (match_operand:SI 0 "s_register_operand" "")
2854 (match_operator:SI 1 "shiftable_operator"
2855 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2856 (match_operand:SI 3 "const_int_operand" "")
2857 (match_operand:SI 4 "const_int_operand" ""))
2858 (match_operand:SI 5 "s_register_operand" "")]))
2859 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2861 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2864 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2867 HOST_WIDE_INT temp = INTVAL (operands[3]);
2869 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2870 operands[4] = GEN_INT (32 - temp);
2875 [(set (match_operand:SI 0 "s_register_operand" "")
2876 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2877 (match_operand:SI 2 "const_int_operand" "")
2878 (match_operand:SI 3 "const_int_operand" "")))]
2880 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2881 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2883 HOST_WIDE_INT temp = INTVAL (operands[2]);
2885 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2886 operands[3] = GEN_INT (32 - temp);
2891 [(set (match_operand:SI 0 "s_register_operand" "")
2892 (match_operator:SI 1 "shiftable_operator"
2893 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2894 (match_operand:SI 3 "const_int_operand" "")
2895 (match_operand:SI 4 "const_int_operand" ""))
2896 (match_operand:SI 5 "s_register_operand" "")]))
2897 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2899 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2902 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2905 HOST_WIDE_INT temp = INTVAL (operands[3]);
2907 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2908 operands[4] = GEN_INT (32 - temp);
2912 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2913 ;;; represented by the bitfield, then this will produce incorrect results.
2914 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2915 ;;; which have a real bit-field insert instruction, the truncation happens
2916 ;;; in the bit-field insert instruction itself. Since arm does not have a
2917 ;;; bit-field insert instruction, we would have to emit code here to truncate
2918 ;;; the value before we insert. This loses some of the advantage of having
2919 ;;; this insv pattern, so this pattern needs to be reevalutated.
2921 (define_expand "insv"
2922 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2923 (match_operand 1 "general_operand" "")
2924 (match_operand 2 "general_operand" ""))
2925 (match_operand 3 "reg_or_int_operand" ""))]
2926 "TARGET_ARM || arm_arch_thumb2"
2929 int start_bit = INTVAL (operands[2]);
2930 int width = INTVAL (operands[1]);
2931 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2932 rtx target, subtarget;
2934 if (arm_arch_thumb2)
2936 if (unaligned_access && MEM_P (operands[0])
2937 && s_register_operand (operands[3], GET_MODE (operands[3]))
2938 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2942 if (BYTES_BIG_ENDIAN)
2943 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2948 base_addr = adjust_address (operands[0], SImode,
2949 start_bit / BITS_PER_UNIT);
2950 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2954 rtx tmp = gen_reg_rtx (HImode);
2956 base_addr = adjust_address (operands[0], HImode,
2957 start_bit / BITS_PER_UNIT);
2958 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2959 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2963 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2965 bool use_bfi = TRUE;
2967 if (CONST_INT_P (operands[3]))
2969 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2973 emit_insn (gen_insv_zero (operands[0], operands[1],
2978 /* See if the set can be done with a single orr instruction. */
2979 if (val == mask && const_ok_for_arm (val << start_bit))
2985 if (!REG_P (operands[3]))
2986 operands[3] = force_reg (SImode, operands[3]);
2988 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2997 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3000 target = copy_rtx (operands[0]);
3001 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3002 subreg as the final target. */
3003 if (GET_CODE (target) == SUBREG)
3005 subtarget = gen_reg_rtx (SImode);
3006 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3007 < GET_MODE_SIZE (SImode))
3008 target = SUBREG_REG (target);
3013 if (CONST_INT_P (operands[3]))
3015 /* Since we are inserting a known constant, we may be able to
3016 reduce the number of bits that we have to clear so that
3017 the mask becomes simple. */
3018 /* ??? This code does not check to see if the new mask is actually
3019 simpler. It may not be. */
3020 rtx op1 = gen_reg_rtx (SImode);
3021 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3022 start of this pattern. */
3023 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3024 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3026 emit_insn (gen_andsi3 (op1, operands[0],
3027 gen_int_mode (~mask2, SImode)));
3028 emit_insn (gen_iorsi3 (subtarget, op1,
3029 gen_int_mode (op3_value << start_bit, SImode)));
3031 else if (start_bit == 0
3032 && !(const_ok_for_arm (mask)
3033 || const_ok_for_arm (~mask)))
3035 /* A Trick, since we are setting the bottom bits in the word,
3036 we can shift operand[3] up, operand[0] down, OR them together
3037 and rotate the result back again. This takes 3 insns, and
3038 the third might be mergeable into another op. */
3039 /* The shift up copes with the possibility that operand[3] is
3040 wider than the bitfield. */
3041 rtx op0 = gen_reg_rtx (SImode);
3042 rtx op1 = gen_reg_rtx (SImode);
3044 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3045 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3046 emit_insn (gen_iorsi3 (op1, op1, op0));
3047 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3049 else if ((width + start_bit == 32)
3050 && !(const_ok_for_arm (mask)
3051 || const_ok_for_arm (~mask)))
3053 /* Similar trick, but slightly less efficient. */
3055 rtx op0 = gen_reg_rtx (SImode);
3056 rtx op1 = gen_reg_rtx (SImode);
3058 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3059 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3060 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3061 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3065 rtx op0 = gen_int_mode (mask, SImode);
3066 rtx op1 = gen_reg_rtx (SImode);
3067 rtx op2 = gen_reg_rtx (SImode);
3069 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3071 rtx tmp = gen_reg_rtx (SImode);
3073 emit_insn (gen_movsi (tmp, op0));
3077 /* Mask out any bits in operand[3] that are not needed. */
3078 emit_insn (gen_andsi3 (op1, operands[3], op0));
3080 if (CONST_INT_P (op0)
3081 && (const_ok_for_arm (mask << start_bit)
3082 || const_ok_for_arm (~(mask << start_bit))))
3084 op0 = gen_int_mode (~(mask << start_bit), SImode);
3085 emit_insn (gen_andsi3 (op2, operands[0], op0));
3089 if (CONST_INT_P (op0))
3091 rtx tmp = gen_reg_rtx (SImode);
3093 emit_insn (gen_movsi (tmp, op0));
3098 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3100 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3104 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3106 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3109 if (subtarget != target)
3111 /* If TARGET is still a SUBREG, then it must be wider than a word,
3112 so we must be careful only to set the subword we were asked to. */
3113 if (GET_CODE (target) == SUBREG)
3114 emit_move_insn (target, subtarget);
3116 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3123 (define_insn "insv_zero"
3124 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3125 (match_operand:SI 1 "const_int_operand" "M")
3126 (match_operand:SI 2 "const_int_operand" "M"))
3130 [(set_attr "length" "4")
3131 (set_attr "predicable" "yes")
3132 (set_attr "predicable_short_it" "no")]
3135 (define_insn "insv_t2"
3136 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3137 (match_operand:SI 1 "const_int_operand" "M")
3138 (match_operand:SI 2 "const_int_operand" "M"))
3139 (match_operand:SI 3 "s_register_operand" "r"))]
3141 "bfi%?\t%0, %3, %2, %1"
3142 [(set_attr "length" "4")
3143 (set_attr "predicable" "yes")
3144 (set_attr "predicable_short_it" "no")]
3147 ; constants for op 2 will never be given to these patterns.
3148 (define_insn_and_split "*anddi_notdi_di"
3149 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3150 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
3151 (match_operand:DI 2 "s_register_operand" "r,0")))]
3154 "TARGET_32BIT && reload_completed
3155 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
3156 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
3157 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
3158 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
3161 operands[3] = gen_highpart (SImode, operands[0]);
3162 operands[0] = gen_lowpart (SImode, operands[0]);
3163 operands[4] = gen_highpart (SImode, operands[1]);
3164 operands[1] = gen_lowpart (SImode, operands[1]);
3165 operands[5] = gen_highpart (SImode, operands[2]);
3166 operands[2] = gen_lowpart (SImode, operands[2]);
3168 [(set_attr "length" "8")
3169 (set_attr "predicable" "yes")]
3172 (define_insn_and_split "*anddi_notzesidi_di"
3173 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3174 (and:DI (not:DI (zero_extend:DI
3175 (match_operand:SI 2 "s_register_operand" "r,r")))
3176 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3179 bic%?\\t%Q0, %Q1, %2
3181 ; (not (zero_extend ...)) allows us to just copy the high word from
3182 ; operand1 to operand0.
3185 && operands[0] != operands[1]"
3186 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
3187 (set (match_dup 3) (match_dup 4))]
3190 operands[3] = gen_highpart (SImode, operands[0]);
3191 operands[0] = gen_lowpart (SImode, operands[0]);
3192 operands[4] = gen_highpart (SImode, operands[1]);
3193 operands[1] = gen_lowpart (SImode, operands[1]);
3195 [(set_attr "length" "4,8")
3196 (set_attr "predicable" "yes")
3197 (set_attr "predicable_short_it" "no")]
3200 (define_insn_and_split "*anddi_notsesidi_di"
3201 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3202 (and:DI (not:DI (sign_extend:DI
3203 (match_operand:SI 2 "s_register_operand" "r,r")))
3204 (match_operand:DI 1 "s_register_operand" "0,r")))]
3207 "TARGET_32BIT && reload_completed"
3208 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
3209 (set (match_dup 3) (and:SI (not:SI
3210 (ashiftrt:SI (match_dup 2) (const_int 31)))
3214 operands[3] = gen_highpart (SImode, operands[0]);
3215 operands[0] = gen_lowpart (SImode, operands[0]);
3216 operands[4] = gen_highpart (SImode, operands[1]);
3217 operands[1] = gen_lowpart (SImode, operands[1]);
3219 [(set_attr "length" "8")
3220 (set_attr "predicable" "yes")
3221 (set_attr "predicable_short_it" "no")]
3224 (define_insn "andsi_notsi_si"
3225 [(set (match_operand:SI 0 "s_register_operand" "=r")
3226 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3227 (match_operand:SI 1 "s_register_operand" "r")))]
3229 "bic%?\\t%0, %1, %2"
3230 [(set_attr "predicable" "yes")
3231 (set_attr "predicable_short_it" "no")]
3234 (define_insn "thumb1_bicsi3"
3235 [(set (match_operand:SI 0 "register_operand" "=l")
3236 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
3237 (match_operand:SI 2 "register_operand" "0")))]
3240 [(set_attr "length" "2")
3241 (set_attr "conds" "set")])
3243 (define_insn "andsi_not_shiftsi_si"
3244 [(set (match_operand:SI 0 "s_register_operand" "=r")
3245 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3246 [(match_operand:SI 2 "s_register_operand" "r")
3247 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
3248 (match_operand:SI 1 "s_register_operand" "r")))]
3250 "bic%?\\t%0, %1, %2%S4"
3251 [(set_attr "predicable" "yes")
3252 (set_attr "shift" "2")
3253 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
3254 (const_string "arlo_shift")
3255 (const_string "arlo_shift_reg")))]
3258 (define_insn "*andsi_notsi_si_compare0"
3259 [(set (reg:CC_NOOV CC_REGNUM)
3261 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3262 (match_operand:SI 1 "s_register_operand" "r"))
3264 (set (match_operand:SI 0 "s_register_operand" "=r")
3265 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3267 "bic%.\\t%0, %1, %2"
3268 [(set_attr "conds" "set")]
3271 (define_insn "*andsi_notsi_si_compare0_scratch"
3272 [(set (reg:CC_NOOV CC_REGNUM)
3274 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3275 (match_operand:SI 1 "s_register_operand" "r"))
3277 (clobber (match_scratch:SI 0 "=r"))]
3279 "bic%.\\t%0, %1, %2"
3280 [(set_attr "conds" "set")]
3283 (define_expand "iordi3"
3284 [(set (match_operand:DI 0 "s_register_operand" "")
3285 (ior:DI (match_operand:DI 1 "s_register_operand" "")
3286 (match_operand:DI 2 "neon_logic_op2" "")))]
3291 (define_insn_and_split "*iordi3_insn"
3292 [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
3293 (ior:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
3294 (match_operand:DI 2 "arm_iordi_operand_neon" "w ,Dl,r ,r ,Df,Df,w ,Dl")))]
3295 "TARGET_32BIT && !TARGET_IWMMXT"
3297 switch (which_alternative)
3299 case 0: /* fall through */
3300 case 6: return "vorr\t%P0, %P1, %P2";
3301 case 1: /* fall through */
3302 case 7: return neon_output_logic_immediate ("vorr", &operands[2],
3303 DImode, 0, VALID_NEON_QREG_MODE (DImode));
3309 default: gcc_unreachable ();
3312 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
3313 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
3314 [(set (match_dup 3) (match_dup 4))
3315 (set (match_dup 5) (match_dup 6))]
3318 operands[3] = gen_lowpart (SImode, operands[0]);
3319 operands[5] = gen_highpart (SImode, operands[0]);
3321 operands[4] = simplify_gen_binary (IOR, SImode,
3322 gen_lowpart (SImode, operands[1]),
3323 gen_lowpart (SImode, operands[2]));
3324 operands[6] = simplify_gen_binary (IOR, SImode,
3325 gen_highpart (SImode, operands[1]),
3326 gen_highpart_mode (SImode, DImode, operands[2]));
3329 [(set_attr "neon_type" "neon_int_1,neon_int_1,*,*,*,*,neon_int_1,neon_int_1")
3330 (set_attr "length" "*,*,8,8,8,8,*,*")
3331 (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,avoid_neon_for_64bits,avoid_neon_for_64bits")]
3334 (define_insn "*iordi_zesidi_di"
3335 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3336 (ior:DI (zero_extend:DI
3337 (match_operand:SI 2 "s_register_operand" "r,r"))
3338 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3341 orr%?\\t%Q0, %Q1, %2
3343 [(set_attr "length" "4,8")
3344 (set_attr "predicable" "yes")
3345 (set_attr "predicable_short_it" "no")]
3348 (define_insn "*iordi_sesidi_di"
3349 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3350 (ior:DI (sign_extend:DI
3351 (match_operand:SI 2 "s_register_operand" "r,r"))
3352 (match_operand:DI 1 "s_register_operand" "0,r")))]
3355 [(set_attr "length" "8")
3356 (set_attr "predicable" "yes")]
3359 (define_expand "iorsi3"
3360 [(set (match_operand:SI 0 "s_register_operand" "")
3361 (ior:SI (match_operand:SI 1 "s_register_operand" "")
3362 (match_operand:SI 2 "reg_or_int_operand" "")))]
3365 if (CONST_INT_P (operands[2]))
3369 arm_split_constant (IOR, SImode, NULL_RTX,
3370 INTVAL (operands[2]), operands[0], operands[1],
3371 optimize && can_create_pseudo_p ());
3374 else /* TARGET_THUMB1 */
3376 rtx tmp = force_reg (SImode, operands[2]);
3377 if (rtx_equal_p (operands[0], operands[1]))
3381 operands[2] = operands[1];
3389 (define_insn_and_split "*iorsi3_insn"
3390 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3391 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3392 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3397 orn%?\\t%0, %1, #%B2
3401 && CONST_INT_P (operands[2])
3402 && !(const_ok_for_arm (INTVAL (operands[2]))
3403 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3404 [(clobber (const_int 0))]
3406 arm_split_constant (IOR, SImode, curr_insn,
3407 INTVAL (operands[2]), operands[0], operands[1], 0);
3410 [(set_attr "length" "4,4,4,4,16")
3411 (set_attr "arch" "32,t2,t2,32,32")
3412 (set_attr "predicable" "yes")
3413 (set_attr "predicable_short_it" "no,yes,no,no,no")
3414 (set_attr "type" "arlo_imm,*,arlo_imm,*,*")]
3417 (define_insn "*thumb1_iorsi3_insn"
3418 [(set (match_operand:SI 0 "register_operand" "=l")
3419 (ior:SI (match_operand:SI 1 "register_operand" "%0")
3420 (match_operand:SI 2 "register_operand" "l")))]
3423 [(set_attr "length" "2")
3424 (set_attr "conds" "set")])
3427 [(match_scratch:SI 3 "r")
3428 (set (match_operand:SI 0 "arm_general_register_operand" "")
3429 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3430 (match_operand:SI 2 "const_int_operand" "")))]
3432 && !const_ok_for_arm (INTVAL (operands[2]))
3433 && const_ok_for_arm (~INTVAL (operands[2]))"
3434 [(set (match_dup 3) (match_dup 2))
3435 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3439 (define_insn "*iorsi3_compare0"
3440 [(set (reg:CC_NOOV CC_REGNUM)
3441 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3442 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3444 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3445 (ior:SI (match_dup 1) (match_dup 2)))]
3447 "orr%.\\t%0, %1, %2"
3448 [(set_attr "conds" "set")
3449 (set_attr "type" "arlo_imm,*")]
3452 (define_insn "*iorsi3_compare0_scratch"
3453 [(set (reg:CC_NOOV CC_REGNUM)
3454 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3455 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3457 (clobber (match_scratch:SI 0 "=r,r"))]
3459 "orr%.\\t%0, %1, %2"
3460 [(set_attr "conds" "set")
3461 (set_attr "type" "arlo_imm,*")]
3464 (define_expand "xordi3"
3465 [(set (match_operand:DI 0 "s_register_operand" "")
3466 (xor:DI (match_operand:DI 1 "s_register_operand" "")
3467 (match_operand:DI 2 "arm_xordi_operand" "")))]
3472 (define_insn_and_split "*xordi3_insn"
3473 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,&r,&r,?w")
3474 (xor:DI (match_operand:DI 1 "s_register_operand" "w ,%0,r ,0 ,r ,w")
3475 (match_operand:DI 2 "arm_xordi_operand" "w ,r ,r ,Dg,Dg,w")))]
3476 "TARGET_32BIT && !TARGET_IWMMXT"
3478 switch (which_alternative)
3483 case 4: /* fall through */
3485 case 0: /* fall through */
3486 case 5: return "veor\t%P0, %P1, %P2";
3487 default: gcc_unreachable ();
3490 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
3491 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
3492 [(set (match_dup 3) (match_dup 4))
3493 (set (match_dup 5) (match_dup 6))]
3496 operands[3] = gen_lowpart (SImode, operands[0]);
3497 operands[5] = gen_highpart (SImode, operands[0]);
3499 operands[4] = simplify_gen_binary (XOR, SImode,
3500 gen_lowpart (SImode, operands[1]),
3501 gen_lowpart (SImode, operands[2]));
3502 operands[6] = simplify_gen_binary (XOR, SImode,
3503 gen_highpart (SImode, operands[1]),
3504 gen_highpart_mode (SImode, DImode, operands[2]));
3507 [(set_attr "length" "*,8,8,8,8,*")
3508 (set_attr "neon_type" "neon_int_1,*,*,*,*,neon_int_1")
3509 (set_attr "arch" "neon_for_64bits,*,*,*,*,avoid_neon_for_64bits")]
3512 (define_insn "*xordi_zesidi_di"
3513 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3514 (xor:DI (zero_extend:DI
3515 (match_operand:SI 2 "s_register_operand" "r,r"))
3516 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3519 eor%?\\t%Q0, %Q1, %2
3521 [(set_attr "length" "4,8")
3522 (set_attr "predicable" "yes")
3523 (set_attr "predicable_short_it" "no")]
3526 (define_insn "*xordi_sesidi_di"
3527 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3528 (xor:DI (sign_extend:DI
3529 (match_operand:SI 2 "s_register_operand" "r,r"))
3530 (match_operand:DI 1 "s_register_operand" "0,r")))]
3533 [(set_attr "length" "8")
3534 (set_attr "predicable" "yes")]
3537 (define_expand "xorsi3"
3538 [(set (match_operand:SI 0 "s_register_operand" "")
3539 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3540 (match_operand:SI 2 "reg_or_int_operand" "")))]
3542 "if (CONST_INT_P (operands[2]))
3546 arm_split_constant (XOR, SImode, NULL_RTX,
3547 INTVAL (operands[2]), operands[0], operands[1],
3548 optimize && can_create_pseudo_p ());
3551 else /* TARGET_THUMB1 */
3553 rtx tmp = force_reg (SImode, operands[2]);
3554 if (rtx_equal_p (operands[0], operands[1]))
3558 operands[2] = operands[1];
3565 (define_insn_and_split "*arm_xorsi3"
3566 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3567 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3568 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3576 && CONST_INT_P (operands[2])
3577 && !const_ok_for_arm (INTVAL (operands[2]))"
3578 [(clobber (const_int 0))]
3580 arm_split_constant (XOR, SImode, curr_insn,
3581 INTVAL (operands[2]), operands[0], operands[1], 0);
3584 [(set_attr "length" "4,4,4,16")
3585 (set_attr "predicable" "yes")
3586 (set_attr "predicable_short_it" "no,yes,no,no")
3587 (set_attr "type" "arlo_imm,*,*,*")]
3590 (define_insn "*thumb1_xorsi3_insn"
3591 [(set (match_operand:SI 0 "register_operand" "=l")
3592 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3593 (match_operand:SI 2 "register_operand" "l")))]
3596 [(set_attr "length" "2")
3597 (set_attr "conds" "set")
3598 (set_attr "type" "arlo_imm")]
3601 (define_insn "*xorsi3_compare0"
3602 [(set (reg:CC_NOOV CC_REGNUM)
3603 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3604 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3606 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3607 (xor:SI (match_dup 1) (match_dup 2)))]
3609 "eor%.\\t%0, %1, %2"
3610 [(set_attr "conds" "set")
3611 (set_attr "type" "arlo_imm,*")]
3614 (define_insn "*xorsi3_compare0_scratch"
3615 [(set (reg:CC_NOOV CC_REGNUM)
3616 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3617 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3621 [(set_attr "conds" "set")
3622 (set_attr "type" "arlo_imm,*")]
3625 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3626 ; (NOT D) we can sometimes merge the final NOT into one of the following
3630 [(set (match_operand:SI 0 "s_register_operand" "")
3631 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3632 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3633 (match_operand:SI 3 "arm_rhs_operand" "")))
3634 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3636 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3637 (not:SI (match_dup 3))))
3638 (set (match_dup 0) (not:SI (match_dup 4)))]
3642 (define_insn_and_split "*andsi_iorsi3_notsi"
3643 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3644 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3645 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3646 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3648 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3649 "&& reload_completed"
3650 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3651 (set (match_dup 0) (and:SI (not:SI (match_dup 3)) (match_dup 0)))]
3653 [(set_attr "length" "8")
3654 (set_attr "ce_count" "2")
3655 (set_attr "predicable" "yes")
3656 (set_attr "predicable_short_it" "no")]
3659 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3660 ; insns are available?
3662 [(set (match_operand:SI 0 "s_register_operand" "")
3663 (match_operator:SI 1 "logical_binary_operator"
3664 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3665 (match_operand:SI 3 "const_int_operand" "")
3666 (match_operand:SI 4 "const_int_operand" ""))
3667 (match_operator:SI 9 "logical_binary_operator"
3668 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3669 (match_operand:SI 6 "const_int_operand" ""))
3670 (match_operand:SI 7 "s_register_operand" "")])]))
3671 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3673 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3674 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3677 [(ashift:SI (match_dup 2) (match_dup 4))
3681 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3684 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3688 [(set (match_operand:SI 0 "s_register_operand" "")
3689 (match_operator:SI 1 "logical_binary_operator"
3690 [(match_operator:SI 9 "logical_binary_operator"
3691 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3692 (match_operand:SI 6 "const_int_operand" ""))
3693 (match_operand:SI 7 "s_register_operand" "")])
3694 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3695 (match_operand:SI 3 "const_int_operand" "")
3696 (match_operand:SI 4 "const_int_operand" ""))]))
3697 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3699 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3700 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3703 [(ashift:SI (match_dup 2) (match_dup 4))
3707 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3710 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3714 [(set (match_operand:SI 0 "s_register_operand" "")
3715 (match_operator:SI 1 "logical_binary_operator"
3716 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3717 (match_operand:SI 3 "const_int_operand" "")
3718 (match_operand:SI 4 "const_int_operand" ""))
3719 (match_operator:SI 9 "logical_binary_operator"
3720 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3721 (match_operand:SI 6 "const_int_operand" ""))
3722 (match_operand:SI 7 "s_register_operand" "")])]))
3723 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3725 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3726 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3729 [(ashift:SI (match_dup 2) (match_dup 4))
3733 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3736 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3740 [(set (match_operand:SI 0 "s_register_operand" "")
3741 (match_operator:SI 1 "logical_binary_operator"
3742 [(match_operator:SI 9 "logical_binary_operator"
3743 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3744 (match_operand:SI 6 "const_int_operand" ""))
3745 (match_operand:SI 7 "s_register_operand" "")])
3746 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3747 (match_operand:SI 3 "const_int_operand" "")
3748 (match_operand:SI 4 "const_int_operand" ""))]))
3749 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3751 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3752 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3755 [(ashift:SI (match_dup 2) (match_dup 4))
3759 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3762 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3766 ;; Minimum and maximum insns
3768 (define_expand "smaxsi3"
3770 (set (match_operand:SI 0 "s_register_operand" "")
3771 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3772 (match_operand:SI 2 "arm_rhs_operand" "")))
3773 (clobber (reg:CC CC_REGNUM))])]
3776 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3778 /* No need for a clobber of the condition code register here. */
3779 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3780 gen_rtx_SMAX (SImode, operands[1],
3786 (define_insn "*smax_0"
3787 [(set (match_operand:SI 0 "s_register_operand" "=r")
3788 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3791 "bic%?\\t%0, %1, %1, asr #31"
3792 [(set_attr "predicable" "yes")
3793 (set_attr "predicable_short_it" "no")]
3796 (define_insn "*smax_m1"
3797 [(set (match_operand:SI 0 "s_register_operand" "=r")
3798 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3801 "orr%?\\t%0, %1, %1, asr #31"
3802 [(set_attr "predicable" "yes")
3803 (set_attr "predicable_short_it" "no")]
3806 (define_insn_and_split "*arm_smax_insn"
3807 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3808 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3809 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3810 (clobber (reg:CC CC_REGNUM))]
3813 ; cmp\\t%1, %2\;movlt\\t%0, %2
3814 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3816 [(set (reg:CC CC_REGNUM)
3817 (compare:CC (match_dup 1) (match_dup 2)))
3819 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3823 [(set_attr "conds" "clob")
3824 (set_attr "length" "8,12")]
3827 (define_expand "sminsi3"
3829 (set (match_operand:SI 0 "s_register_operand" "")
3830 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3831 (match_operand:SI 2 "arm_rhs_operand" "")))
3832 (clobber (reg:CC CC_REGNUM))])]
3835 if (operands[2] == const0_rtx)
3837 /* No need for a clobber of the condition code register here. */
3838 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3839 gen_rtx_SMIN (SImode, operands[1],
3845 (define_insn "*smin_0"
3846 [(set (match_operand:SI 0 "s_register_operand" "=r")
3847 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3850 "and%?\\t%0, %1, %1, asr #31"
3851 [(set_attr "predicable" "yes")
3852 (set_attr "predicable_short_it" "no")]
3855 (define_insn_and_split "*arm_smin_insn"
3856 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3857 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3858 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3859 (clobber (reg:CC CC_REGNUM))]
3862 ; cmp\\t%1, %2\;movge\\t%0, %2
3863 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3865 [(set (reg:CC CC_REGNUM)
3866 (compare:CC (match_dup 1) (match_dup 2)))
3868 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3872 [(set_attr "conds" "clob")
3873 (set_attr "length" "8,12")]
3876 (define_expand "umaxsi3"
3878 (set (match_operand:SI 0 "s_register_operand" "")
3879 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3880 (match_operand:SI 2 "arm_rhs_operand" "")))
3881 (clobber (reg:CC CC_REGNUM))])]
3886 (define_insn_and_split "*arm_umaxsi3"
3887 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3888 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3889 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3890 (clobber (reg:CC CC_REGNUM))]
3893 ; cmp\\t%1, %2\;movcc\\t%0, %2
3894 ; cmp\\t%1, %2\;movcs\\t%0, %1
3895 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3897 [(set (reg:CC CC_REGNUM)
3898 (compare:CC (match_dup 1) (match_dup 2)))
3900 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3904 [(set_attr "conds" "clob")
3905 (set_attr "length" "8,8,12")]
3908 (define_expand "uminsi3"
3910 (set (match_operand:SI 0 "s_register_operand" "")
3911 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3912 (match_operand:SI 2 "arm_rhs_operand" "")))
3913 (clobber (reg:CC CC_REGNUM))])]
3918 (define_insn_and_split "*arm_uminsi3"
3919 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3920 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3921 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3922 (clobber (reg:CC CC_REGNUM))]
3925 ; cmp\\t%1, %2\;movcs\\t%0, %2
3926 ; cmp\\t%1, %2\;movcc\\t%0, %1
3927 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3929 [(set (reg:CC CC_REGNUM)
3930 (compare:CC (match_dup 1) (match_dup 2)))
3932 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3936 [(set_attr "conds" "clob")
3937 (set_attr "length" "8,8,12")]
3940 (define_insn "*store_minmaxsi"
3941 [(set (match_operand:SI 0 "memory_operand" "=m")
3942 (match_operator:SI 3 "minmax_operator"
3943 [(match_operand:SI 1 "s_register_operand" "r")
3944 (match_operand:SI 2 "s_register_operand" "r")]))
3945 (clobber (reg:CC CC_REGNUM))]
3946 "TARGET_32BIT && optimize_insn_for_size_p()"
3948 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3949 operands[1], operands[2]);
3950 output_asm_insn (\"cmp\\t%1, %2\", operands);
3952 output_asm_insn (\"ite\t%d3\", operands);
3953 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3954 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3957 [(set_attr "conds" "clob")
3958 (set (attr "length")
3959 (if_then_else (eq_attr "is_thumb" "yes")
3962 (set_attr "type" "store1")]
3965 ; Reject the frame pointer in operand[1], since reloading this after
3966 ; it has been eliminated can cause carnage.
3967 (define_insn "*minmax_arithsi"
3968 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3969 (match_operator:SI 4 "shiftable_operator"
3970 [(match_operator:SI 5 "minmax_operator"
3971 [(match_operand:SI 2 "s_register_operand" "r,r")
3972 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3973 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3974 (clobber (reg:CC CC_REGNUM))]
3975 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3978 enum rtx_code code = GET_CODE (operands[4]);
3981 if (which_alternative != 0 || operands[3] != const0_rtx
3982 || (code != PLUS && code != IOR && code != XOR))
3987 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3988 operands[2], operands[3]);
3989 output_asm_insn (\"cmp\\t%2, %3\", operands);
3993 output_asm_insn (\"ite\\t%d5\", operands);
3995 output_asm_insn (\"it\\t%d5\", operands);
3997 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3999 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4002 [(set_attr "conds" "clob")
4003 (set (attr "length")
4004 (if_then_else (eq_attr "is_thumb" "yes")
4009 ; Reject the frame pointer in operand[1], since reloading this after
4010 ; it has been eliminated can cause carnage.
4011 (define_insn_and_split "*minmax_arithsi_non_canon"
4012 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4014 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4015 (match_operator:SI 4 "minmax_operator"
4016 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4017 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4018 (clobber (reg:CC CC_REGNUM))]
4019 "TARGET_32BIT && !arm_eliminable_register (operands[1])
4020 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4022 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4023 [(set (reg:CC CC_REGNUM)
4024 (compare:CC (match_dup 2) (match_dup 3)))
4026 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4028 (minus:SI (match_dup 1)
4030 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4034 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4035 operands[2], operands[3]);
4036 enum rtx_code rc = minmax_code (operands[4]);
4037 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4038 operands[2], operands[3]);
4040 if (mode == CCFPmode || mode == CCFPEmode)
4041 rc = reverse_condition_maybe_unordered (rc);
4043 rc = reverse_condition (rc);
4044 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4045 if (CONST_INT_P (operands[3]))
4046 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4048 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4050 [(set_attr "conds" "clob")
4051 (set (attr "length")
4052 (if_then_else (eq_attr "is_thumb" "yes")
4057 (define_code_iterator SAT [smin smax])
4058 (define_code_iterator SATrev [smin smax])
4059 (define_code_attr SATlo [(smin "1") (smax "2")])
4060 (define_code_attr SAThi [(smin "2") (smax "1")])
4062 (define_insn "*satsi_<SAT:code>"
4063 [(set (match_operand:SI 0 "s_register_operand" "=r")
4064 (SAT:SI (SATrev:SI (match_operand:SI 3 "s_register_operand" "r")
4065 (match_operand:SI 1 "const_int_operand" "i"))
4066 (match_operand:SI 2 "const_int_operand" "i")))]
4067 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
4068 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4072 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4073 &mask, &signed_sat))
4076 operands[1] = GEN_INT (mask);
4078 return "ssat%?\t%0, %1, %3";
4080 return "usat%?\t%0, %1, %3";
4082 [(set_attr "predicable" "yes")
4083 (set_attr "predicable_short_it" "no")]
4086 (define_insn "*satsi_<SAT:code>_shift"
4087 [(set (match_operand:SI 0 "s_register_operand" "=r")
4088 (SAT:SI (SATrev:SI (match_operator:SI 3 "sat_shift_operator"
4089 [(match_operand:SI 4 "s_register_operand" "r")
4090 (match_operand:SI 5 "const_int_operand" "i")])
4091 (match_operand:SI 1 "const_int_operand" "i"))
4092 (match_operand:SI 2 "const_int_operand" "i")))]
4093 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
4094 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4098 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4099 &mask, &signed_sat))
4102 operands[1] = GEN_INT (mask);
4104 return "ssat%?\t%0, %1, %4%S3";
4106 return "usat%?\t%0, %1, %4%S3";
4108 [(set_attr "predicable" "yes")
4109 (set_attr "predicable_short_it" "no")
4110 (set_attr "shift" "3")
4111 (set_attr "type" "arlo_shift")])
4113 ;; Shift and rotation insns
4115 (define_expand "ashldi3"
4116 [(set (match_operand:DI 0 "s_register_operand" "")
4117 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
4118 (match_operand:SI 2 "general_operand" "")))]
4123 /* Delay the decision whether to use NEON or core-regs until
4124 register allocation. */
4125 emit_insn (gen_ashldi3_neon (operands[0], operands[1], operands[2]));
4130 /* Only the NEON case can handle in-memory shift counts. */
4131 if (!reg_or_int_operand (operands[2], SImode))
4132 operands[2] = force_reg (SImode, operands[2]);
4135 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4136 ; /* No special preparation statements; expand pattern as above. */
4139 rtx scratch1, scratch2;
4141 if (CONST_INT_P (operands[2])
4142 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4144 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
4148 /* Ideally we should use iwmmxt here if we could know that operands[1]
4149 ends up already living in an iwmmxt register. Otherwise it's
4150 cheaper to have the alternate code being generated than moving
4151 values to iwmmxt regs and back. */
4153 /* If we're optimizing for size, we prefer the libgcc calls. */
4154 if (optimize_function_for_size_p (cfun))
4157 /* Expand operation using core-registers.
4158 'FAIL' would achieve the same thing, but this is a bit smarter. */
4159 scratch1 = gen_reg_rtx (SImode);
4160 scratch2 = gen_reg_rtx (SImode);
4161 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4162 operands[2], scratch1, scratch2);
4168 (define_insn_and_split "arm_ashldi3_1bit"
4169 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4170 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
4172 (clobber (reg:CC CC_REGNUM))]
4174 "#" ; "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
4175 "&& reload_completed"
4176 [(parallel [(set (reg:CC CC_REGNUM)
4177 (compare:CC (ashift:SI (match_dup 1) (const_int 1))
4179 (set (match_dup 0) (ashift:SI (match_dup 1) (const_int 1)))])
4180 (set (match_dup 2) (plus:SI (plus:SI (match_dup 3) (match_dup 3))
4181 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4183 operands[2] = gen_highpart (SImode, operands[0]);
4184 operands[0] = gen_lowpart (SImode, operands[0]);
4185 operands[3] = gen_highpart (SImode, operands[1]);
4186 operands[1] = gen_lowpart (SImode, operands[1]);
4188 [(set_attr "conds" "clob")
4189 (set_attr "length" "8")]
4192 (define_expand "ashlsi3"
4193 [(set (match_operand:SI 0 "s_register_operand" "")
4194 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
4195 (match_operand:SI 2 "arm_rhs_operand" "")))]
4198 if (CONST_INT_P (operands[2])
4199 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4201 emit_insn (gen_movsi (operands[0], const0_rtx));
4207 (define_insn "*thumb1_ashlsi3"
4208 [(set (match_operand:SI 0 "register_operand" "=l,l")
4209 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
4210 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4213 [(set_attr "length" "2")
4214 (set_attr "type" "shift,shift_reg")
4215 (set_attr "conds" "set")])
4217 (define_expand "ashrdi3"
4218 [(set (match_operand:DI 0 "s_register_operand" "")
4219 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
4220 (match_operand:SI 2 "reg_or_int_operand" "")))]
4225 /* Delay the decision whether to use NEON or core-regs until
4226 register allocation. */
4227 emit_insn (gen_ashrdi3_neon (operands[0], operands[1], operands[2]));
4231 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4232 ; /* No special preparation statements; expand pattern as above. */
4235 rtx scratch1, scratch2;
4237 if (CONST_INT_P (operands[2])
4238 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4240 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
4244 /* Ideally we should use iwmmxt here if we could know that operands[1]
4245 ends up already living in an iwmmxt register. Otherwise it's
4246 cheaper to have the alternate code being generated than moving
4247 values to iwmmxt regs and back. */
4249 /* If we're optimizing for size, we prefer the libgcc calls. */
4250 if (optimize_function_for_size_p (cfun))
4253 /* Expand operation using core-registers.
4254 'FAIL' would achieve the same thing, but this is a bit smarter. */
4255 scratch1 = gen_reg_rtx (SImode);
4256 scratch2 = gen_reg_rtx (SImode);
4257 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4258 operands[2], scratch1, scratch2);
4264 (define_insn_and_split "arm_ashrdi3_1bit"
4265 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4266 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
4268 (clobber (reg:CC CC_REGNUM))]
4270 "#" ; "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
4271 "&& reload_completed"
4272 [(parallel [(set (reg:CC CC_REGNUM)
4273 (compare:CC (ashiftrt:SI (match_dup 3) (const_int 1))
4275 (set (match_dup 2) (ashiftrt:SI (match_dup 3) (const_int 1)))])
4276 (set (match_dup 0) (unspec:SI [(match_dup 1)
4277 (reg:CC_C CC_REGNUM)]
4280 operands[2] = gen_highpart (SImode, operands[0]);
4281 operands[0] = gen_lowpart (SImode, operands[0]);
4282 operands[3] = gen_highpart (SImode, operands[1]);
4283 operands[1] = gen_lowpart (SImode, operands[1]);
4285 [(set_attr "conds" "clob")
4286 (set_attr "length" "8")]
4290 [(set (match_operand:SI 0 "s_register_operand" "=r")
4291 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
4292 (reg:CC_C CC_REGNUM)]
4296 [(set_attr "conds" "use")
4297 (set_attr "type" "mov_shift")]
4300 (define_expand "ashrsi3"
4301 [(set (match_operand:SI 0 "s_register_operand" "")
4302 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
4303 (match_operand:SI 2 "arm_rhs_operand" "")))]
4306 if (CONST_INT_P (operands[2])
4307 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4308 operands[2] = GEN_INT (31);
4312 (define_insn "*thumb1_ashrsi3"
4313 [(set (match_operand:SI 0 "register_operand" "=l,l")
4314 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
4315 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4318 [(set_attr "length" "2")
4319 (set_attr "type" "shift,shift_reg")
4320 (set_attr "conds" "set")])
4322 (define_expand "lshrdi3"
4323 [(set (match_operand:DI 0 "s_register_operand" "")
4324 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
4325 (match_operand:SI 2 "reg_or_int_operand" "")))]
4330 /* Delay the decision whether to use NEON or core-regs until
4331 register allocation. */
4332 emit_insn (gen_lshrdi3_neon (operands[0], operands[1], operands[2]));
4336 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4337 ; /* No special preparation statements; expand pattern as above. */
4340 rtx scratch1, scratch2;
4342 if (CONST_INT_P (operands[2])
4343 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4345 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
4349 /* Ideally we should use iwmmxt here if we could know that operands[1]
4350 ends up already living in an iwmmxt register. Otherwise it's
4351 cheaper to have the alternate code being generated than moving
4352 values to iwmmxt regs and back. */
4354 /* If we're optimizing for size, we prefer the libgcc calls. */
4355 if (optimize_function_for_size_p (cfun))
4358 /* Expand operation using core-registers.
4359 'FAIL' would achieve the same thing, but this is a bit smarter. */
4360 scratch1 = gen_reg_rtx (SImode);
4361 scratch2 = gen_reg_rtx (SImode);
4362 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4363 operands[2], scratch1, scratch2);
4369 (define_insn_and_split "arm_lshrdi3_1bit"
4370 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4371 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
4373 (clobber (reg:CC CC_REGNUM))]
4375 "#" ; "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
4376 "&& reload_completed"
4377 [(parallel [(set (reg:CC CC_REGNUM)
4378 (compare:CC (lshiftrt:SI (match_dup 3) (const_int 1))
4380 (set (match_dup 2) (lshiftrt:SI (match_dup 3) (const_int 1)))])
4381 (set (match_dup 0) (unspec:SI [(match_dup 1)
4382 (reg:CC_C CC_REGNUM)]
4385 operands[2] = gen_highpart (SImode, operands[0]);
4386 operands[0] = gen_lowpart (SImode, operands[0]);
4387 operands[3] = gen_highpart (SImode, operands[1]);
4388 operands[1] = gen_lowpart (SImode, operands[1]);
4390 [(set_attr "conds" "clob")
4391 (set_attr "length" "8")]
4394 (define_expand "lshrsi3"
4395 [(set (match_operand:SI 0 "s_register_operand" "")
4396 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
4397 (match_operand:SI 2 "arm_rhs_operand" "")))]
4400 if (CONST_INT_P (operands[2])
4401 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4403 emit_insn (gen_movsi (operands[0], const0_rtx));
4409 (define_insn "*thumb1_lshrsi3"
4410 [(set (match_operand:SI 0 "register_operand" "=l,l")
4411 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
4412 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4415 [(set_attr "length" "2")
4416 (set_attr "type" "shift,shift_reg")
4417 (set_attr "conds" "set")])
4419 (define_expand "rotlsi3"
4420 [(set (match_operand:SI 0 "s_register_operand" "")
4421 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4422 (match_operand:SI 2 "reg_or_int_operand" "")))]
4425 if (CONST_INT_P (operands[2]))
4426 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4429 rtx reg = gen_reg_rtx (SImode);
4430 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4436 (define_expand "rotrsi3"
4437 [(set (match_operand:SI 0 "s_register_operand" "")
4438 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4439 (match_operand:SI 2 "arm_rhs_operand" "")))]
4444 if (CONST_INT_P (operands[2])
4445 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4446 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4448 else /* TARGET_THUMB1 */
4450 if (CONST_INT_P (operands [2]))
4451 operands [2] = force_reg (SImode, operands[2]);
4456 (define_insn "*thumb1_rotrsi3"
4457 [(set (match_operand:SI 0 "register_operand" "=l")
4458 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
4459 (match_operand:SI 2 "register_operand" "l")))]
4462 [(set_attr "type" "shift_reg")
4463 (set_attr "length" "2")]
4466 (define_insn "*arm_shiftsi3"
4467 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
4468 (match_operator:SI 3 "shift_operator"
4469 [(match_operand:SI 1 "s_register_operand" "0,r,r")
4470 (match_operand:SI 2 "reg_or_int_operand" "l,M,r")]))]
4472 "* return arm_output_shift(operands, 0);"
4473 [(set_attr "predicable" "yes")
4474 (set_attr "arch" "t2,*,*")
4475 (set_attr "predicable_short_it" "yes,no,no")
4476 (set_attr "length" "4")
4477 (set_attr "shift" "1")
4478 (set_attr "type" "arlo_shift_reg,arlo_shift,arlo_shift_reg")]
4481 (define_insn "*shiftsi3_compare"
4482 [(set (reg:CC CC_REGNUM)
4483 (compare:CC (match_operator:SI 3 "shift_operator"
4484 [(match_operand:SI 1 "s_register_operand" "r,r")
4485 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4487 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4488 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4490 "* return arm_output_shift(operands, 1);"
4491 [(set_attr "conds" "set")
4492 (set_attr "shift" "1")
4493 (set_attr "type" "arlo_shift,arlo_shift_reg")]
4496 (define_insn "*shiftsi3_compare0"
4497 [(set (reg:CC_NOOV CC_REGNUM)
4498 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4499 [(match_operand:SI 1 "s_register_operand" "r,r")
4500 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4502 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4503 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4505 "* return arm_output_shift(operands, 1);"
4506 [(set_attr "conds" "set")
4507 (set_attr "shift" "1")
4508 (set_attr "type" "arlo_shift,arlo_shift_reg")]
4511 (define_insn "*shiftsi3_compare0_scratch"
4512 [(set (reg:CC_NOOV CC_REGNUM)
4513 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4514 [(match_operand:SI 1 "s_register_operand" "r,r")
4515 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4517 (clobber (match_scratch:SI 0 "=r,r"))]
4519 "* return arm_output_shift(operands, 1);"
4520 [(set_attr "conds" "set")
4521 (set_attr "shift" "1")
4522 (set_attr "type" "shift,shift_reg")]
4525 (define_insn "*not_shiftsi"
4526 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4527 (not:SI (match_operator:SI 3 "shift_operator"
4528 [(match_operand:SI 1 "s_register_operand" "r,r")
4529 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4532 [(set_attr "predicable" "yes")
4533 (set_attr "predicable_short_it" "no")
4534 (set_attr "shift" "1")
4535 (set_attr "arch" "32,a")
4536 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4538 (define_insn "*not_shiftsi_compare0"
4539 [(set (reg:CC_NOOV CC_REGNUM)
4541 (not:SI (match_operator:SI 3 "shift_operator"
4542 [(match_operand:SI 1 "s_register_operand" "r,r")
4543 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4545 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4546 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4549 [(set_attr "conds" "set")
4550 (set_attr "shift" "1")
4551 (set_attr "arch" "32,a")
4552 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4554 (define_insn "*not_shiftsi_compare0_scratch"
4555 [(set (reg:CC_NOOV CC_REGNUM)
4557 (not:SI (match_operator:SI 3 "shift_operator"
4558 [(match_operand:SI 1 "s_register_operand" "r,r")
4559 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4561 (clobber (match_scratch:SI 0 "=r,r"))]
4564 [(set_attr "conds" "set")
4565 (set_attr "shift" "1")
4566 (set_attr "arch" "32,a")
4567 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4569 ;; We don't really have extzv, but defining this using shifts helps
4570 ;; to reduce register pressure later on.
4572 (define_expand "extzv"
4573 [(set (match_operand 0 "s_register_operand" "")
4574 (zero_extract (match_operand 1 "nonimmediate_operand" "")
4575 (match_operand 2 "const_int_operand" "")
4576 (match_operand 3 "const_int_operand" "")))]
4577 "TARGET_THUMB1 || arm_arch_thumb2"
4580 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4581 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4583 if (arm_arch_thumb2)
4585 HOST_WIDE_INT width = INTVAL (operands[2]);
4586 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4588 if (unaligned_access && MEM_P (operands[1])
4589 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4593 if (BYTES_BIG_ENDIAN)
4594 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4599 base_addr = adjust_address (operands[1], SImode,
4600 bitpos / BITS_PER_UNIT);
4601 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4605 rtx dest = operands[0];
4606 rtx tmp = gen_reg_rtx (SImode);
4608 /* We may get a paradoxical subreg here. Strip it off. */
4609 if (GET_CODE (dest) == SUBREG
4610 && GET_MODE (dest) == SImode
4611 && GET_MODE (SUBREG_REG (dest)) == HImode)
4612 dest = SUBREG_REG (dest);
4614 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4617 base_addr = adjust_address (operands[1], HImode,
4618 bitpos / BITS_PER_UNIT);
4619 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4620 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4624 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4626 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4634 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4637 operands[3] = GEN_INT (rshift);
4641 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4645 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4646 operands[3], gen_reg_rtx (SImode)));
4651 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4653 (define_expand "extzv_t1"
4654 [(set (match_operand:SI 4 "s_register_operand" "")
4655 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
4656 (match_operand:SI 2 "const_int_operand" "")))
4657 (set (match_operand:SI 0 "s_register_operand" "")
4658 (lshiftrt:SI (match_dup 4)
4659 (match_operand:SI 3 "const_int_operand" "")))]
4663 (define_expand "extv"
4664 [(set (match_operand 0 "s_register_operand" "")
4665 (sign_extract (match_operand 1 "nonimmediate_operand" "")
4666 (match_operand 2 "const_int_operand" "")
4667 (match_operand 3 "const_int_operand" "")))]
4670 HOST_WIDE_INT width = INTVAL (operands[2]);
4671 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4673 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4674 && (bitpos % BITS_PER_UNIT) == 0)
4678 if (BYTES_BIG_ENDIAN)
4679 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4683 base_addr = adjust_address (operands[1], SImode,
4684 bitpos / BITS_PER_UNIT);
4685 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4689 rtx dest = operands[0];
4690 rtx tmp = gen_reg_rtx (SImode);
4692 /* We may get a paradoxical subreg here. Strip it off. */
4693 if (GET_CODE (dest) == SUBREG
4694 && GET_MODE (dest) == SImode
4695 && GET_MODE (SUBREG_REG (dest)) == HImode)
4696 dest = SUBREG_REG (dest);
4698 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4701 base_addr = adjust_address (operands[1], HImode,
4702 bitpos / BITS_PER_UNIT);
4703 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4704 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4709 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4711 else if (GET_MODE (operands[0]) == SImode
4712 && GET_MODE (operands[1]) == SImode)
4714 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4722 ; Helper to expand register forms of extv with the proper modes.
4724 (define_expand "extv_regsi"
4725 [(set (match_operand:SI 0 "s_register_operand" "")
4726 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
4727 (match_operand 2 "const_int_operand" "")
4728 (match_operand 3 "const_int_operand" "")))]
4733 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4735 (define_insn "unaligned_loadsi"
4736 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4737 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
4738 UNSPEC_UNALIGNED_LOAD))]
4739 "unaligned_access && TARGET_32BIT"
4740 "ldr%?\t%0, %1\t@ unaligned"
4741 [(set_attr "arch" "t2,any")
4742 (set_attr "length" "2,4")
4743 (set_attr "predicable" "yes")
4744 (set_attr "predicable_short_it" "yes,no")
4745 (set_attr "type" "load1")])
4747 (define_insn "unaligned_loadhis"
4748 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4750 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4751 UNSPEC_UNALIGNED_LOAD)))]
4752 "unaligned_access && TARGET_32BIT"
4753 "ldr%(sh%)\t%0, %1\t@ unaligned"
4754 [(set_attr "arch" "t2,any")
4755 (set_attr "length" "2,4")
4756 (set_attr "predicable" "yes")
4757 (set_attr "predicable_short_it" "yes,no")
4758 (set_attr "type" "load_byte")])
4760 (define_insn "unaligned_loadhiu"
4761 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4763 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4764 UNSPEC_UNALIGNED_LOAD)))]
4765 "unaligned_access && TARGET_32BIT"
4766 "ldr%(h%)\t%0, %1\t@ unaligned"
4767 [(set_attr "arch" "t2,any")
4768 (set_attr "length" "2,4")
4769 (set_attr "predicable" "yes")
4770 (set_attr "predicable_short_it" "yes,no")
4771 (set_attr "type" "load_byte")])
4773 (define_insn "unaligned_storesi"
4774 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
4775 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
4776 UNSPEC_UNALIGNED_STORE))]
4777 "unaligned_access && TARGET_32BIT"
4778 "str%?\t%1, %0\t@ unaligned"
4779 [(set_attr "arch" "t2,any")
4780 (set_attr "length" "2,4")
4781 (set_attr "predicable" "yes")
4782 (set_attr "predicable_short_it" "yes,no")
4783 (set_attr "type" "store1")])
4785 (define_insn "unaligned_storehi"
4786 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
4787 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
4788 UNSPEC_UNALIGNED_STORE))]
4789 "unaligned_access && TARGET_32BIT"
4790 "str%(h%)\t%1, %0\t@ unaligned"
4791 [(set_attr "arch" "t2,any")
4792 (set_attr "length" "2,4")
4793 (set_attr "predicable" "yes")
4794 (set_attr "predicable_short_it" "yes,no")
4795 (set_attr "type" "store1")])
4797 ;; Unaligned double-word load and store.
4798 ;; Split after reload into two unaligned single-word accesses.
4799 ;; It prevents lower_subreg from splitting some other aligned
4800 ;; double-word accesses too early. Used for internal memcpy.
4802 (define_insn_and_split "unaligned_loaddi"
4803 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
4804 (unspec:DI [(match_operand:DI 1 "memory_operand" "o,o")]
4805 UNSPEC_UNALIGNED_LOAD))]
4806 "unaligned_access && TARGET_32BIT"
4808 "&& reload_completed"
4809 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_LOAD))
4810 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_LOAD))]
4812 operands[2] = gen_highpart (SImode, operands[0]);
4813 operands[0] = gen_lowpart (SImode, operands[0]);
4814 operands[3] = gen_highpart (SImode, operands[1]);
4815 operands[1] = gen_lowpart (SImode, operands[1]);
4817 /* If the first destination register overlaps with the base address,
4818 swap the order in which the loads are emitted. */
4819 if (reg_overlap_mentioned_p (operands[0], operands[1]))
4821 rtx tmp = operands[1];
4822 operands[1] = operands[3];
4825 operands[0] = operands[2];
4829 [(set_attr "arch" "t2,any")
4830 (set_attr "length" "4,8")
4831 (set_attr "predicable" "yes")
4832 (set_attr "type" "load2")])
4834 (define_insn_and_split "unaligned_storedi"
4835 [(set (match_operand:DI 0 "memory_operand" "=o,o")
4836 (unspec:DI [(match_operand:DI 1 "s_register_operand" "l,r")]
4837 UNSPEC_UNALIGNED_STORE))]
4838 "unaligned_access && TARGET_32BIT"
4840 "&& reload_completed"
4841 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_STORE))
4842 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_STORE))]
4844 operands[2] = gen_highpart (SImode, operands[0]);
4845 operands[0] = gen_lowpart (SImode, operands[0]);
4846 operands[3] = gen_highpart (SImode, operands[1]);
4847 operands[1] = gen_lowpart (SImode, operands[1]);
4849 [(set_attr "arch" "t2,any")
4850 (set_attr "length" "4,8")
4851 (set_attr "predicable" "yes")
4852 (set_attr "type" "store2")])
4855 (define_insn "*extv_reg"
4856 [(set (match_operand:SI 0 "s_register_operand" "=r")
4857 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4858 (match_operand:SI 2 "const_int_operand" "M")
4859 (match_operand:SI 3 "const_int_operand" "M")))]
4861 "sbfx%?\t%0, %1, %3, %2"
4862 [(set_attr "length" "4")
4863 (set_attr "predicable" "yes")
4864 (set_attr "predicable_short_it" "no")]
4867 (define_insn "extzv_t2"
4868 [(set (match_operand:SI 0 "s_register_operand" "=r")
4869 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4870 (match_operand:SI 2 "const_int_operand" "M")
4871 (match_operand:SI 3 "const_int_operand" "M")))]
4873 "ubfx%?\t%0, %1, %3, %2"
4874 [(set_attr "length" "4")
4875 (set_attr "predicable" "yes")
4876 (set_attr "predicable_short_it" "no")]
4880 ;; Division instructions
4881 (define_insn "divsi3"
4882 [(set (match_operand:SI 0 "s_register_operand" "=r")
4883 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4884 (match_operand:SI 2 "s_register_operand" "r")))]
4886 "sdiv%?\t%0, %1, %2"
4887 [(set_attr "predicable" "yes")
4888 (set_attr "predicable_short_it" "no")
4889 (set_attr "type" "sdiv")]
4892 (define_insn "udivsi3"
4893 [(set (match_operand:SI 0 "s_register_operand" "=r")
4894 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4895 (match_operand:SI 2 "s_register_operand" "r")))]
4897 "udiv%?\t%0, %1, %2"
4898 [(set_attr "predicable" "yes")
4899 (set_attr "predicable_short_it" "no")
4900 (set_attr "type" "udiv")]
4904 ;; Unary arithmetic insns
4906 (define_expand "negdi2"
4908 [(set (match_operand:DI 0 "s_register_operand" "")
4909 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4910 (clobber (reg:CC CC_REGNUM))])]
4915 emit_insn (gen_negdi2_neon (operands[0], operands[1]));
4921 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4922 ;; The first alternative allows the common case of a *full* overlap.
4923 (define_insn_and_split "*arm_negdi2"
4924 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4925 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4926 (clobber (reg:CC CC_REGNUM))]
4928 "#" ; "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4929 "&& reload_completed"
4930 [(parallel [(set (reg:CC CC_REGNUM)
4931 (compare:CC (const_int 0) (match_dup 1)))
4932 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4933 (set (match_dup 2) (minus:SI (minus:SI (const_int 0) (match_dup 3))
4934 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4936 operands[2] = gen_highpart (SImode, operands[0]);
4937 operands[0] = gen_lowpart (SImode, operands[0]);
4938 operands[3] = gen_highpart (SImode, operands[1]);
4939 operands[1] = gen_lowpart (SImode, operands[1]);
4941 [(set_attr "conds" "clob")
4942 (set_attr "length" "8")]
4945 (define_insn "*thumb1_negdi2"
4946 [(set (match_operand:DI 0 "register_operand" "=&l")
4947 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4948 (clobber (reg:CC CC_REGNUM))]
4950 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4951 [(set_attr "length" "6")]
4954 (define_expand "negsi2"
4955 [(set (match_operand:SI 0 "s_register_operand" "")
4956 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4961 (define_insn "*arm_negsi2"
4962 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4963 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4965 "rsb%?\\t%0, %1, #0"
4966 [(set_attr "predicable" "yes")
4967 (set_attr "predicable_short_it" "yes,no")
4968 (set_attr "arch" "t2,*")
4969 (set_attr "length" "4")]
4972 (define_insn "*thumb1_negsi2"
4973 [(set (match_operand:SI 0 "register_operand" "=l")
4974 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4977 [(set_attr "length" "2")]
4980 (define_expand "negsf2"
4981 [(set (match_operand:SF 0 "s_register_operand" "")
4982 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4983 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4987 (define_expand "negdf2"
4988 [(set (match_operand:DF 0 "s_register_operand" "")
4989 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4990 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4993 ;; Negate an extended 32-bit value.
4994 (define_insn_and_split "*negdi_extendsidi"
4995 [(set (match_operand:DI 0 "s_register_operand" "=r,&r,l,&l")
4996 (neg:DI (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r,0,l"))))
4997 (clobber (reg:CC CC_REGNUM))]
4999 "#" ; rsb\\t%Q0, %1, #0\;asr\\t%R0, %Q0, #31
5000 "&& reload_completed"
5003 operands[2] = gen_highpart (SImode, operands[0]);
5004 operands[0] = gen_lowpart (SImode, operands[0]);
5005 rtx tmp = gen_rtx_SET (VOIDmode,
5007 gen_rtx_MINUS (SImode,
5016 /* Set the flags, to emit the short encoding in Thumb2. */
5017 rtx flags = gen_rtx_SET (VOIDmode,
5018 gen_rtx_REG (CCmode, CC_REGNUM),
5019 gen_rtx_COMPARE (CCmode,
5022 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5027 emit_insn (gen_rtx_SET (VOIDmode,
5029 gen_rtx_ASHIFTRT (SImode,
5034 [(set_attr "length" "8,8,4,4")
5035 (set_attr "arch" "a,a,t2,t2")]
5038 (define_insn_and_split "*negdi_zero_extendsidi"
5039 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
5040 (neg:DI (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))))
5041 (clobber (reg:CC CC_REGNUM))]
5043 "#" ; "rsbs\\t%Q0, %1, #0\;sbc\\t%R0,%R0,%R0"
5044 ;; Don't care what register is input to sbc,
5045 ;; since we just just need to propagate the carry.
5046 "&& reload_completed"
5047 [(parallel [(set (reg:CC CC_REGNUM)
5048 (compare:CC (const_int 0) (match_dup 1)))
5049 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
5050 (set (match_dup 2) (minus:SI (minus:SI (match_dup 2) (match_dup 2))
5051 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
5053 operands[2] = gen_highpart (SImode, operands[0]);
5054 operands[0] = gen_lowpart (SImode, operands[0]);
5056 [(set_attr "conds" "clob")
5057 (set_attr "length" "8")] ;; length in thumb is 4
5060 ;; abssi2 doesn't really clobber the condition codes if a different register
5061 ;; is being set. To keep things simple, assume during rtl manipulations that
5062 ;; it does, but tell the final scan operator the truth. Similarly for
5065 (define_expand "abssi2"
5067 [(set (match_operand:SI 0 "s_register_operand" "")
5068 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
5069 (clobber (match_dup 2))])]
5073 operands[2] = gen_rtx_SCRATCH (SImode);
5075 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5078 (define_insn_and_split "*arm_abssi2"
5079 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5080 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5081 (clobber (reg:CC CC_REGNUM))]
5084 "&& reload_completed"
5087 /* if (which_alternative == 0) */
5088 if (REGNO(operands[0]) == REGNO(operands[1]))
5090 /* Emit the pattern:
5091 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5092 [(set (reg:CC CC_REGNUM)
5093 (compare:CC (match_dup 0) (const_int 0)))
5094 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5095 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5097 emit_insn (gen_rtx_SET (VOIDmode,
5098 gen_rtx_REG (CCmode, CC_REGNUM),
5099 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5100 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5101 (gen_rtx_LT (SImode,
5102 gen_rtx_REG (CCmode, CC_REGNUM),
5104 (gen_rtx_SET (VOIDmode,
5106 (gen_rtx_MINUS (SImode,
5113 /* Emit the pattern:
5114 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5116 (xor:SI (match_dup 1)
5117 (ashiftrt:SI (match_dup 1) (const_int 31))))
5119 (minus:SI (match_dup 0)
5120 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5122 emit_insn (gen_rtx_SET (VOIDmode,
5124 gen_rtx_XOR (SImode,
5125 gen_rtx_ASHIFTRT (SImode,
5129 emit_insn (gen_rtx_SET (VOIDmode,
5131 gen_rtx_MINUS (SImode,
5133 gen_rtx_ASHIFTRT (SImode,
5139 [(set_attr "conds" "clob,*")
5140 (set_attr "shift" "1")
5141 (set_attr "predicable" "no, yes")
5142 (set_attr "length" "8")]
5145 (define_insn_and_split "*thumb1_abssi2"
5146 [(set (match_operand:SI 0 "s_register_operand" "=l")
5147 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
5148 (clobber (match_scratch:SI 2 "=&l"))]
5151 "TARGET_THUMB1 && reload_completed"
5152 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
5153 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
5154 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
5156 [(set_attr "length" "6")]
5159 (define_insn_and_split "*arm_neg_abssi2"
5160 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5161 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5162 (clobber (reg:CC CC_REGNUM))]
5165 "&& reload_completed"
5168 /* if (which_alternative == 0) */
5169 if (REGNO (operands[0]) == REGNO (operands[1]))
5171 /* Emit the pattern:
5172 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5174 emit_insn (gen_rtx_SET (VOIDmode,
5175 gen_rtx_REG (CCmode, CC_REGNUM),
5176 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5177 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5179 gen_rtx_REG (CCmode, CC_REGNUM),
5181 gen_rtx_SET (VOIDmode,
5183 (gen_rtx_MINUS (SImode,
5189 /* Emit the pattern:
5190 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5192 emit_insn (gen_rtx_SET (VOIDmode,
5194 gen_rtx_XOR (SImode,
5195 gen_rtx_ASHIFTRT (SImode,
5199 emit_insn (gen_rtx_SET (VOIDmode,
5201 gen_rtx_MINUS (SImode,
5202 gen_rtx_ASHIFTRT (SImode,
5209 [(set_attr "conds" "clob,*")
5210 (set_attr "shift" "1")
5211 (set_attr "predicable" "no, yes")
5212 (set_attr "length" "8")]
5215 (define_insn_and_split "*thumb1_neg_abssi2"
5216 [(set (match_operand:SI 0 "s_register_operand" "=l")
5217 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
5218 (clobber (match_scratch:SI 2 "=&l"))]
5221 "TARGET_THUMB1 && reload_completed"
5222 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
5223 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
5224 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
5226 [(set_attr "length" "6")]
5229 (define_expand "abssf2"
5230 [(set (match_operand:SF 0 "s_register_operand" "")
5231 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
5232 "TARGET_32BIT && TARGET_HARD_FLOAT"
5235 (define_expand "absdf2"
5236 [(set (match_operand:DF 0 "s_register_operand" "")
5237 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
5238 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5241 (define_expand "sqrtsf2"
5242 [(set (match_operand:SF 0 "s_register_operand" "")
5243 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
5244 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
5247 (define_expand "sqrtdf2"
5248 [(set (match_operand:DF 0 "s_register_operand" "")
5249 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
5250 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5253 (define_insn_and_split "one_cmpldi2"
5254 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,?w")
5255 (not:DI (match_operand:DI 1 "s_register_operand" " w, 0, r, w")))]
5262 "TARGET_32BIT && reload_completed
5263 && arm_general_register_operand (operands[0], DImode)"
5264 [(set (match_dup 0) (not:SI (match_dup 1)))
5265 (set (match_dup 2) (not:SI (match_dup 3)))]
5268 operands[2] = gen_highpart (SImode, operands[0]);
5269 operands[0] = gen_lowpart (SImode, operands[0]);
5270 operands[3] = gen_highpart (SImode, operands[1]);
5271 operands[1] = gen_lowpart (SImode, operands[1]);
5273 [(set_attr "length" "*,8,8,*")
5274 (set_attr "predicable" "no,yes,yes,no")
5275 (set_attr "neon_type" "neon_int_1,*,*,neon_int_1")
5276 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")]
5279 (define_expand "one_cmplsi2"
5280 [(set (match_operand:SI 0 "s_register_operand" "")
5281 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
5286 (define_insn "*arm_one_cmplsi2"
5287 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5288 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5291 [(set_attr "predicable" "yes")
5292 (set_attr "predicable_short_it" "yes,no")
5293 (set_attr "arch" "t2,*")
5294 (set_attr "length" "4")
5295 (set_attr "type" "mvn_reg")]
5298 (define_insn "*thumb1_one_cmplsi2"
5299 [(set (match_operand:SI 0 "register_operand" "=l")
5300 (not:SI (match_operand:SI 1 "register_operand" "l")))]
5303 [(set_attr "length" "2")
5304 (set_attr "type" "mvn_reg")]
5307 (define_insn "*notsi_compare0"
5308 [(set (reg:CC_NOOV CC_REGNUM)
5309 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5311 (set (match_operand:SI 0 "s_register_operand" "=r")
5312 (not:SI (match_dup 1)))]
5315 [(set_attr "conds" "set")
5316 (set_attr "type" "mvn_reg")]
5319 (define_insn "*notsi_compare0_scratch"
5320 [(set (reg:CC_NOOV CC_REGNUM)
5321 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5323 (clobber (match_scratch:SI 0 "=r"))]
5326 [(set_attr "conds" "set")
5327 (set_attr "type" "mvn_reg")]
5330 ;; Fixed <--> Floating conversion insns
5332 (define_expand "floatsihf2"
5333 [(set (match_operand:HF 0 "general_operand" "")
5334 (float:HF (match_operand:SI 1 "general_operand" "")))]
5338 rtx op1 = gen_reg_rtx (SFmode);
5339 expand_float (op1, operands[1], 0);
5340 op1 = convert_to_mode (HFmode, op1, 0);
5341 emit_move_insn (operands[0], op1);
5346 (define_expand "floatdihf2"
5347 [(set (match_operand:HF 0 "general_operand" "")
5348 (float:HF (match_operand:DI 1 "general_operand" "")))]
5352 rtx op1 = gen_reg_rtx (SFmode);
5353 expand_float (op1, operands[1], 0);
5354 op1 = convert_to_mode (HFmode, op1, 0);
5355 emit_move_insn (operands[0], op1);
5360 (define_expand "floatsisf2"
5361 [(set (match_operand:SF 0 "s_register_operand" "")
5362 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
5363 "TARGET_32BIT && TARGET_HARD_FLOAT"
5367 (define_expand "floatsidf2"
5368 [(set (match_operand:DF 0 "s_register_operand" "")
5369 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
5370 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5374 (define_expand "fix_trunchfsi2"
5375 [(set (match_operand:SI 0 "general_operand" "")
5376 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5380 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5381 expand_fix (operands[0], op1, 0);
5386 (define_expand "fix_trunchfdi2"
5387 [(set (match_operand:DI 0 "general_operand" "")
5388 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5392 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5393 expand_fix (operands[0], op1, 0);
5398 (define_expand "fix_truncsfsi2"
5399 [(set (match_operand:SI 0 "s_register_operand" "")
5400 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
5401 "TARGET_32BIT && TARGET_HARD_FLOAT"
5405 (define_expand "fix_truncdfsi2"
5406 [(set (match_operand:SI 0 "s_register_operand" "")
5407 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
5408 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5414 (define_expand "truncdfsf2"
5415 [(set (match_operand:SF 0 "s_register_operand" "")
5417 (match_operand:DF 1 "s_register_operand" "")))]
5418 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5422 /* DFmode -> HFmode conversions have to go through SFmode. */
5423 (define_expand "truncdfhf2"
5424 [(set (match_operand:HF 0 "general_operand" "")
5426 (match_operand:DF 1 "general_operand" "")))]
5431 op1 = convert_to_mode (SFmode, operands[1], 0);
5432 op1 = convert_to_mode (HFmode, op1, 0);
5433 emit_move_insn (operands[0], op1);
5438 ;; Zero and sign extension instructions.
5440 (define_insn "zero_extend<mode>di2"
5441 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,w")
5442 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
5443 "<qhs_zextenddi_cstr>")))]
5444 "TARGET_32BIT <qhs_zextenddi_cond>"
5446 [(set_attr "length" "8,4,8,8")
5447 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")
5448 (set_attr "ce_count" "2")
5449 (set_attr "predicable" "yes")]
5452 (define_insn "extend<mode>di2"
5453 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,?r,w")
5454 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
5455 "<qhs_extenddi_cstr>")))]
5456 "TARGET_32BIT <qhs_sextenddi_cond>"
5458 [(set_attr "length" "8,4,8,8,8")
5459 (set_attr "ce_count" "2")
5460 (set_attr "shift" "1")
5461 (set_attr "predicable" "yes")
5462 (set_attr "arch" "neon_for_64bits,*,a,t,avoid_neon_for_64bits")]
5465 ;; Splits for all extensions to DImode
5467 [(set (match_operand:DI 0 "s_register_operand" "")
5468 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5469 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5470 [(set (match_dup 0) (match_dup 1))]
5472 rtx lo_part = gen_lowpart (SImode, operands[0]);
5473 enum machine_mode src_mode = GET_MODE (operands[1]);
5475 if (REG_P (operands[0])
5476 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5477 emit_clobber (operands[0]);
5478 if (!REG_P (lo_part) || src_mode != SImode
5479 || !rtx_equal_p (lo_part, operands[1]))
5481 if (src_mode == SImode)
5482 emit_move_insn (lo_part, operands[1]);
5484 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5485 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5486 operands[1] = lo_part;
5488 operands[0] = gen_highpart (SImode, operands[0]);
5489 operands[1] = const0_rtx;
5493 [(set (match_operand:DI 0 "s_register_operand" "")
5494 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5495 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5496 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5498 rtx lo_part = gen_lowpart (SImode, operands[0]);
5499 enum machine_mode src_mode = GET_MODE (operands[1]);
5501 if (REG_P (operands[0])
5502 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5503 emit_clobber (operands[0]);
5505 if (!REG_P (lo_part) || src_mode != SImode
5506 || !rtx_equal_p (lo_part, operands[1]))
5508 if (src_mode == SImode)
5509 emit_move_insn (lo_part, operands[1]);
5511 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5512 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5513 operands[1] = lo_part;
5515 operands[0] = gen_highpart (SImode, operands[0]);
5518 (define_expand "zero_extendhisi2"
5519 [(set (match_operand:SI 0 "s_register_operand" "")
5520 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5523 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5525 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5528 if (!arm_arch6 && !MEM_P (operands[1]))
5530 rtx t = gen_lowpart (SImode, operands[1]);
5531 rtx tmp = gen_reg_rtx (SImode);
5532 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5533 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5539 [(set (match_operand:SI 0 "s_register_operand" "")
5540 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5541 "!TARGET_THUMB2 && !arm_arch6"
5542 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5543 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5545 operands[2] = gen_lowpart (SImode, operands[1]);
5548 (define_insn "*thumb1_zero_extendhisi2"
5549 [(set (match_operand:SI 0 "register_operand" "=l,l")
5550 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
5555 if (which_alternative == 0 && arm_arch6)
5556 return "uxth\t%0, %1";
5557 if (which_alternative == 0)
5560 mem = XEXP (operands[1], 0);
5562 if (GET_CODE (mem) == CONST)
5563 mem = XEXP (mem, 0);
5565 if (GET_CODE (mem) == PLUS)
5567 rtx a = XEXP (mem, 0);
5569 /* This can happen due to bugs in reload. */
5570 if (REG_P (a) && REGNO (a) == SP_REGNUM)
5573 ops[0] = operands[0];
5576 output_asm_insn ("mov\t%0, %1", ops);
5578 XEXP (mem, 0) = operands[0];
5582 return "ldrh\t%0, %1";
5584 [(set_attr_alternative "length"
5585 [(if_then_else (eq_attr "is_arch6" "yes")
5586 (const_int 2) (const_int 4))
5588 (set_attr "type" "extend,load_byte")]
5591 (define_insn "*arm_zero_extendhisi2"
5592 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5593 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5594 "TARGET_ARM && arm_arch4 && !arm_arch6"
5598 [(set_attr "type" "arlo_shift,load_byte")
5599 (set_attr "predicable" "yes")]
5602 (define_insn "*arm_zero_extendhisi2_v6"
5603 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5604 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5605 "TARGET_ARM && arm_arch6"
5609 [(set_attr "predicable" "yes")
5610 (set_attr "type" "extend,load_byte")]
5613 (define_insn "*arm_zero_extendhisi2addsi"
5614 [(set (match_operand:SI 0 "s_register_operand" "=r")
5615 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5616 (match_operand:SI 2 "s_register_operand" "r")))]
5618 "uxtah%?\\t%0, %2, %1"
5619 [(set_attr "type" "arlo_shift")
5620 (set_attr "predicable" "yes")
5621 (set_attr "predicable_short_it" "no")]
5624 (define_expand "zero_extendqisi2"
5625 [(set (match_operand:SI 0 "s_register_operand" "")
5626 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
5629 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5631 emit_insn (gen_andsi3 (operands[0],
5632 gen_lowpart (SImode, operands[1]),
5636 if (!arm_arch6 && !MEM_P (operands[1]))
5638 rtx t = gen_lowpart (SImode, operands[1]);
5639 rtx tmp = gen_reg_rtx (SImode);
5640 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5641 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5647 [(set (match_operand:SI 0 "s_register_operand" "")
5648 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5650 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5651 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5653 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5656 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5661 (define_insn "*thumb1_zero_extendqisi2"
5662 [(set (match_operand:SI 0 "register_operand" "=l,l")
5663 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5664 "TARGET_THUMB1 && !arm_arch6"
5668 [(set_attr "length" "4,2")
5669 (set_attr "type" "arlo_shift,load_byte")
5670 (set_attr "pool_range" "*,32")]
5673 (define_insn "*thumb1_zero_extendqisi2_v6"
5674 [(set (match_operand:SI 0 "register_operand" "=l,l")
5675 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5676 "TARGET_THUMB1 && arm_arch6"
5680 [(set_attr "length" "2")
5681 (set_attr "type" "extend,load_byte")]
5684 (define_insn "*arm_zero_extendqisi2"
5685 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5686 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5687 "TARGET_ARM && !arm_arch6"
5690 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5691 [(set_attr "length" "8,4")
5692 (set_attr "type" "arlo_shift,load_byte")
5693 (set_attr "predicable" "yes")]
5696 (define_insn "*arm_zero_extendqisi2_v6"
5697 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5698 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5699 "TARGET_ARM && arm_arch6"
5702 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5703 [(set_attr "type" "extend,load_byte")
5704 (set_attr "predicable" "yes")]
5707 (define_insn "*arm_zero_extendqisi2addsi"
5708 [(set (match_operand:SI 0 "s_register_operand" "=r")
5709 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5710 (match_operand:SI 2 "s_register_operand" "r")))]
5712 "uxtab%?\\t%0, %2, %1"
5713 [(set_attr "predicable" "yes")
5714 (set_attr "predicable_short_it" "no")
5715 (set_attr "type" "arlo_shift")]
5719 [(set (match_operand:SI 0 "s_register_operand" "")
5720 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5721 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5722 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5723 [(set (match_dup 2) (match_dup 1))
5724 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5729 [(set (match_operand:SI 0 "s_register_operand" "")
5730 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5731 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5732 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5733 [(set (match_dup 2) (match_dup 1))
5734 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5740 [(set (match_operand:SI 0 "s_register_operand" "")
5741 (ior_xor:SI (and:SI (ashift:SI
5742 (match_operand:SI 1 "s_register_operand" "")
5743 (match_operand:SI 2 "const_int_operand" ""))
5744 (match_operand:SI 3 "const_int_operand" ""))
5746 (match_operator 5 "subreg_lowpart_operator"
5747 [(match_operand:SI 4 "s_register_operand" "")]))))]
5749 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
5750 == (GET_MODE_MASK (GET_MODE (operands[5]))
5751 & (GET_MODE_MASK (GET_MODE (operands[5]))
5752 << (INTVAL (operands[2])))))"
5753 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
5755 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5756 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5759 (define_insn "*compareqi_eq0"
5760 [(set (reg:CC_Z CC_REGNUM)
5761 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5765 [(set_attr "conds" "set")
5766 (set_attr "predicable" "yes")
5767 (set_attr "predicable_short_it" "no")]
5770 (define_expand "extendhisi2"
5771 [(set (match_operand:SI 0 "s_register_operand" "")
5772 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5777 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5780 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5782 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5786 if (!arm_arch6 && !MEM_P (operands[1]))
5788 rtx t = gen_lowpart (SImode, operands[1]);
5789 rtx tmp = gen_reg_rtx (SImode);
5790 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5791 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5798 [(set (match_operand:SI 0 "register_operand" "")
5799 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5800 (clobber (match_scratch:SI 2 ""))])]
5802 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5803 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5805 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5808 ;; We used to have an early-clobber on the scratch register here.
5809 ;; However, there's a bug somewhere in reload which means that this
5810 ;; can be partially ignored during spill allocation if the memory
5811 ;; address also needs reloading; this causes us to die later on when
5812 ;; we try to verify the operands. Fortunately, we don't really need
5813 ;; the early-clobber: we can always use operand 0 if operand 2
5814 ;; overlaps the address.
5815 (define_insn "thumb1_extendhisi2"
5816 [(set (match_operand:SI 0 "register_operand" "=l,l")
5817 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
5818 (clobber (match_scratch:SI 2 "=X,l"))]
5825 if (which_alternative == 0 && !arm_arch6)
5827 if (which_alternative == 0)
5828 return \"sxth\\t%0, %1\";
5830 mem = XEXP (operands[1], 0);
5832 /* This code used to try to use 'V', and fix the address only if it was
5833 offsettable, but this fails for e.g. REG+48 because 48 is outside the
5834 range of QImode offsets, and offsettable_address_p does a QImode
5837 if (GET_CODE (mem) == CONST)
5838 mem = XEXP (mem, 0);
5840 if (GET_CODE (mem) == LABEL_REF)
5841 return \"ldr\\t%0, %1\";
5843 if (GET_CODE (mem) == PLUS)
5845 rtx a = XEXP (mem, 0);
5846 rtx b = XEXP (mem, 1);
5848 if (GET_CODE (a) == LABEL_REF
5850 return \"ldr\\t%0, %1\";
5853 return \"ldrsh\\t%0, %1\";
5861 ops[2] = const0_rtx;
5864 gcc_assert (REG_P (ops[1]));
5866 ops[0] = operands[0];
5867 if (reg_mentioned_p (operands[2], ops[1]))
5870 ops[3] = operands[2];
5871 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
5874 [(set_attr_alternative "length"
5875 [(if_then_else (eq_attr "is_arch6" "yes")
5876 (const_int 2) (const_int 4))
5878 (set_attr "type" "extend,load_byte")
5879 (set_attr "pool_range" "*,1018")]
5882 ;; This pattern will only be used when ldsh is not available
5883 (define_expand "extendhisi2_mem"
5884 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5886 (zero_extend:SI (match_dup 7)))
5887 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5888 (set (match_operand:SI 0 "" "")
5889 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5894 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5896 mem1 = change_address (operands[1], QImode, addr);
5897 mem2 = change_address (operands[1], QImode,
5898 plus_constant (Pmode, addr, 1));
5899 operands[0] = gen_lowpart (SImode, operands[0]);
5901 operands[2] = gen_reg_rtx (SImode);
5902 operands[3] = gen_reg_rtx (SImode);
5903 operands[6] = gen_reg_rtx (SImode);
5906 if (BYTES_BIG_ENDIAN)
5908 operands[4] = operands[2];
5909 operands[5] = operands[3];
5913 operands[4] = operands[3];
5914 operands[5] = operands[2];
5920 [(set (match_operand:SI 0 "register_operand" "")
5921 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5923 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5924 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5926 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5929 (define_insn "*arm_extendhisi2"
5930 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5931 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5932 "TARGET_ARM && arm_arch4 && !arm_arch6"
5936 [(set_attr "length" "8,4")
5937 (set_attr "type" "arlo_shift,load_byte")
5938 (set_attr "predicable" "yes")
5939 (set_attr "pool_range" "*,256")
5940 (set_attr "neg_pool_range" "*,244")]
5943 ;; ??? Check Thumb-2 pool range
5944 (define_insn "*arm_extendhisi2_v6"
5945 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5946 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5947 "TARGET_32BIT && arm_arch6"
5951 [(set_attr "type" "extend,load_byte")
5952 (set_attr "predicable" "yes")
5953 (set_attr "predicable_short_it" "no")
5954 (set_attr "pool_range" "*,256")
5955 (set_attr "neg_pool_range" "*,244")]
5958 (define_insn "*arm_extendhisi2addsi"
5959 [(set (match_operand:SI 0 "s_register_operand" "=r")
5960 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5961 (match_operand:SI 2 "s_register_operand" "r")))]
5963 "sxtah%?\\t%0, %2, %1"
5966 (define_expand "extendqihi2"
5968 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
5970 (set (match_operand:HI 0 "s_register_operand" "")
5971 (ashiftrt:SI (match_dup 2)
5976 if (arm_arch4 && MEM_P (operands[1]))
5978 emit_insn (gen_rtx_SET (VOIDmode,
5980 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5983 if (!s_register_operand (operands[1], QImode))
5984 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5985 operands[0] = gen_lowpart (SImode, operands[0]);
5986 operands[1] = gen_lowpart (SImode, operands[1]);
5987 operands[2] = gen_reg_rtx (SImode);
5991 (define_insn "*arm_extendqihi_insn"
5992 [(set (match_operand:HI 0 "s_register_operand" "=r")
5993 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5994 "TARGET_ARM && arm_arch4"
5995 "ldr%(sb%)\\t%0, %1"
5996 [(set_attr "type" "load_byte")
5997 (set_attr "predicable" "yes")
5998 (set_attr "pool_range" "256")
5999 (set_attr "neg_pool_range" "244")]
6002 (define_expand "extendqisi2"
6003 [(set (match_operand:SI 0 "s_register_operand" "")
6004 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
6007 if (!arm_arch4 && MEM_P (operands[1]))
6008 operands[1] = copy_to_mode_reg (QImode, operands[1]);
6010 if (!arm_arch6 && !MEM_P (operands[1]))
6012 rtx t = gen_lowpart (SImode, operands[1]);
6013 rtx tmp = gen_reg_rtx (SImode);
6014 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
6015 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
6021 [(set (match_operand:SI 0 "register_operand" "")
6022 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
6024 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
6025 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
6027 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
6030 (define_insn "*arm_extendqisi"
6031 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
6032 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
6033 "TARGET_ARM && arm_arch4 && !arm_arch6"
6037 [(set_attr "length" "8,4")
6038 (set_attr "type" "arlo_shift,load_byte")
6039 (set_attr "predicable" "yes")
6040 (set_attr "pool_range" "*,256")
6041 (set_attr "neg_pool_range" "*,244")]
6044 (define_insn "*arm_extendqisi_v6"
6045 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
6047 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
6048 "TARGET_ARM && arm_arch6"
6052 [(set_attr "type" "extend,load_byte")
6053 (set_attr "predicable" "yes")
6054 (set_attr "pool_range" "*,256")
6055 (set_attr "neg_pool_range" "*,244")]
6058 (define_insn "*arm_extendqisi2addsi"
6059 [(set (match_operand:SI 0 "s_register_operand" "=r")
6060 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
6061 (match_operand:SI 2 "s_register_operand" "r")))]
6063 "sxtab%?\\t%0, %2, %1"
6064 [(set_attr "type" "arlo_shift")
6065 (set_attr "predicable" "yes")
6066 (set_attr "predicable_short_it" "no")]
6070 [(set (match_operand:SI 0 "register_operand" "")
6071 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
6072 "TARGET_THUMB1 && reload_completed"
6073 [(set (match_dup 0) (match_dup 2))
6074 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
6076 rtx addr = XEXP (operands[1], 0);
6078 if (GET_CODE (addr) == CONST)
6079 addr = XEXP (addr, 0);
6081 if (GET_CODE (addr) == PLUS
6082 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
6083 /* No split necessary. */
6086 if (GET_CODE (addr) == PLUS
6087 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
6090 if (reg_overlap_mentioned_p (operands[0], addr))
6092 rtx t = gen_lowpart (QImode, operands[0]);
6093 emit_move_insn (t, operands[1]);
6094 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
6100 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
6101 operands[2] = const0_rtx;
6103 else if (GET_CODE (addr) != PLUS)
6105 else if (REG_P (XEXP (addr, 0)))
6107 operands[2] = XEXP (addr, 1);
6108 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
6112 operands[2] = XEXP (addr, 0);
6113 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
6116 operands[3] = change_address (operands[1], QImode, addr);
6120 [(set (match_operand:SI 0 "register_operand" "")
6121 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
6122 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
6123 (set (match_operand:SI 3 "register_operand" "")
6124 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
6126 && GET_CODE (XEXP (operands[4], 0)) == PLUS
6127 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
6128 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
6129 && (peep2_reg_dead_p (3, operands[0])
6130 || rtx_equal_p (operands[0], operands[3]))
6131 && (peep2_reg_dead_p (3, operands[2])
6132 || rtx_equal_p (operands[2], operands[3]))"
6133 [(set (match_dup 2) (match_dup 1))
6134 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
6136 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
6137 operands[4] = change_address (operands[4], QImode, addr);
6140 (define_insn "thumb1_extendqisi2"
6141 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
6142 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
6147 if (which_alternative == 0 && arm_arch6)
6148 return "sxtb\\t%0, %1";
6149 if (which_alternative == 0)
6152 addr = XEXP (operands[1], 0);
6153 if (GET_CODE (addr) == PLUS
6154 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
6155 return "ldrsb\\t%0, %1";
6159 [(set_attr_alternative "length"
6160 [(if_then_else (eq_attr "is_arch6" "yes")
6161 (const_int 2) (const_int 4))
6163 (if_then_else (eq_attr "is_arch6" "yes")
6164 (const_int 4) (const_int 6))])
6165 (set_attr "type" "extend,load_byte,load_byte")]
6168 (define_expand "extendsfdf2"
6169 [(set (match_operand:DF 0 "s_register_operand" "")
6170 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
6171 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6175 /* HFmode -> DFmode conversions have to go through SFmode. */
6176 (define_expand "extendhfdf2"
6177 [(set (match_operand:DF 0 "general_operand" "")
6178 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
6183 op1 = convert_to_mode (SFmode, operands[1], 0);
6184 op1 = convert_to_mode (DFmode, op1, 0);
6185 emit_insn (gen_movdf (operands[0], op1));
6190 ;; Move insns (including loads and stores)
6192 ;; XXX Just some ideas about movti.
6193 ;; I don't think these are a good idea on the arm, there just aren't enough
6195 ;;(define_expand "loadti"
6196 ;; [(set (match_operand:TI 0 "s_register_operand" "")
6197 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
6200 ;;(define_expand "storeti"
6201 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
6202 ;; (match_operand:TI 1 "s_register_operand" ""))]
6205 ;;(define_expand "movti"
6206 ;; [(set (match_operand:TI 0 "general_operand" "")
6207 ;; (match_operand:TI 1 "general_operand" ""))]
6213 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
6214 ;; operands[1] = copy_to_reg (operands[1]);
6215 ;; if (MEM_P (operands[0]))
6216 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6217 ;; else if (MEM_P (operands[1]))
6218 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6222 ;; emit_insn (insn);
6226 ;; Recognize garbage generated above.
6229 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6230 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6234 ;; register mem = (which_alternative < 3);
6235 ;; register const char *template;
6237 ;; operands[mem] = XEXP (operands[mem], 0);
6238 ;; switch (which_alternative)
6240 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6241 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6242 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6243 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6244 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6245 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6247 ;; output_asm_insn (template, operands);
6251 (define_expand "movdi"
6252 [(set (match_operand:DI 0 "general_operand" "")
6253 (match_operand:DI 1 "general_operand" ""))]
6256 if (can_create_pseudo_p ())
6258 if (!REG_P (operands[0]))
6259 operands[1] = force_reg (DImode, operands[1]);
6264 (define_insn "*arm_movdi"
6265 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, q, m")
6266 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,q"))]
6268 && !(TARGET_HARD_FLOAT && TARGET_VFP)
6270 && ( register_operand (operands[0], DImode)
6271 || register_operand (operands[1], DImode))"
6273 switch (which_alternative)
6280 return output_move_double (operands, true, NULL);
6283 [(set_attr "length" "8,12,16,8,8")
6284 (set_attr "type" "*,*,*,load2,store2")
6285 (set_attr "arm_pool_range" "*,*,*,1020,*")
6286 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6287 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6288 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6292 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6293 (match_operand:ANY64 1 "const_double_operand" ""))]
6296 && (arm_const_double_inline_cost (operands[1])
6297 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
6300 arm_split_constant (SET, SImode, curr_insn,
6301 INTVAL (gen_lowpart (SImode, operands[1])),
6302 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6303 arm_split_constant (SET, SImode, curr_insn,
6304 INTVAL (gen_highpart_mode (SImode,
6305 GET_MODE (operands[0]),
6307 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6312 ; If optimizing for size, or if we have load delay slots, then
6313 ; we want to split the constant into two separate operations.
6314 ; In both cases this may split a trivial part into a single data op
6315 ; leaving a single complex constant to load. We can also get longer
6316 ; offsets in a LDR which means we get better chances of sharing the pool
6317 ; entries. Finally, we can normally do a better job of scheduling
6318 ; LDR instructions than we can with LDM.
6319 ; This pattern will only match if the one above did not.
6321 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6322 (match_operand:ANY64 1 "const_double_operand" ""))]
6323 "TARGET_ARM && reload_completed
6324 && arm_const_double_by_parts (operands[1])"
6325 [(set (match_dup 0) (match_dup 1))
6326 (set (match_dup 2) (match_dup 3))]
6328 operands[2] = gen_highpart (SImode, operands[0]);
6329 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6331 operands[0] = gen_lowpart (SImode, operands[0]);
6332 operands[1] = gen_lowpart (SImode, operands[1]);
6337 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6338 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
6339 "TARGET_EITHER && reload_completed"
6340 [(set (match_dup 0) (match_dup 1))
6341 (set (match_dup 2) (match_dup 3))]
6343 operands[2] = gen_highpart (SImode, operands[0]);
6344 operands[3] = gen_highpart (SImode, operands[1]);
6345 operands[0] = gen_lowpart (SImode, operands[0]);
6346 operands[1] = gen_lowpart (SImode, operands[1]);
6348 /* Handle a partial overlap. */
6349 if (rtx_equal_p (operands[0], operands[3]))
6351 rtx tmp0 = operands[0];
6352 rtx tmp1 = operands[1];
6354 operands[0] = operands[2];
6355 operands[1] = operands[3];
6362 ;; We can't actually do base+index doubleword loads if the index and
6363 ;; destination overlap. Split here so that we at least have chance to
6366 [(set (match_operand:DI 0 "s_register_operand" "")
6367 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6368 (match_operand:SI 2 "s_register_operand" ""))))]
6370 && reg_overlap_mentioned_p (operands[0], operands[1])
6371 && reg_overlap_mentioned_p (operands[0], operands[2])"
6373 (plus:SI (match_dup 1)
6376 (mem:DI (match_dup 4)))]
6378 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6382 ;;; ??? This should have alternatives for constants.
6383 ;;; ??? This was originally identical to the movdf_insn pattern.
6384 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
6385 ;;; thumb_reorg with a memory reference.
6386 (define_insn "*thumb1_movdi_insn"
6387 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
6388 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
6390 && ( register_operand (operands[0], DImode)
6391 || register_operand (operands[1], DImode))"
6394 switch (which_alternative)
6398 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6399 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6400 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6402 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
6404 operands[1] = GEN_INT (- INTVAL (operands[1]));
6405 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
6407 return \"ldmia\\t%1, {%0, %H0}\";
6409 return \"stmia\\t%0, {%1, %H1}\";
6411 return thumb_load_double_from_address (operands);
6413 operands[2] = gen_rtx_MEM (SImode,
6414 plus_constant (Pmode, XEXP (operands[0], 0), 4));
6415 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6418 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6419 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6420 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6423 [(set_attr "length" "4,4,6,2,2,6,4,4")
6424 (set_attr "type" "*,mov_reg,*,load2,store2,load2,store2,mov_reg")
6425 (set_attr "pool_range" "*,*,*,*,*,1018,*,*")]
6428 (define_expand "movsi"
6429 [(set (match_operand:SI 0 "general_operand" "")
6430 (match_operand:SI 1 "general_operand" ""))]
6434 rtx base, offset, tmp;
6438 /* Everything except mem = const or mem = mem can be done easily. */
6439 if (MEM_P (operands[0]))
6440 operands[1] = force_reg (SImode, operands[1]);
6441 if (arm_general_register_operand (operands[0], SImode)
6442 && CONST_INT_P (operands[1])
6443 && !(const_ok_for_arm (INTVAL (operands[1]))
6444 || const_ok_for_arm (~INTVAL (operands[1]))))
6446 arm_split_constant (SET, SImode, NULL_RTX,
6447 INTVAL (operands[1]), operands[0], NULL_RTX,
6448 optimize && can_create_pseudo_p ());
6452 else /* TARGET_THUMB1... */
6454 if (can_create_pseudo_p ())
6456 if (!REG_P (operands[0]))
6457 operands[1] = force_reg (SImode, operands[1]);
6461 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
6463 split_const (operands[1], &base, &offset);
6464 if (GET_CODE (base) == SYMBOL_REF
6465 && !offset_within_block_p (base, INTVAL (offset)))
6467 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6468 emit_move_insn (tmp, base);
6469 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6474 /* Recognize the case where operand[1] is a reference to thread-local
6475 data and load its address to a register. */
6476 if (arm_tls_referenced_p (operands[1]))
6478 rtx tmp = operands[1];
6481 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
6483 addend = XEXP (XEXP (tmp, 0), 1);
6484 tmp = XEXP (XEXP (tmp, 0), 0);
6487 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
6488 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
6490 tmp = legitimize_tls_address (tmp,
6491 !can_create_pseudo_p () ? operands[0] : 0);
6494 tmp = gen_rtx_PLUS (SImode, tmp, addend);
6495 tmp = force_operand (tmp, operands[0]);
6500 && (CONSTANT_P (operands[1])
6501 || symbol_mentioned_p (operands[1])
6502 || label_mentioned_p (operands[1])))
6503 operands[1] = legitimize_pic_address (operands[1], SImode,
6504 (!can_create_pseudo_p ()
6511 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6512 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6513 ;; so this does not matter.
6514 (define_insn "*arm_movt"
6515 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
6516 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
6517 (match_operand:SI 2 "general_operand" "i")))]
6519 "movt%?\t%0, #:upper16:%c2"
6520 [(set_attr "predicable" "yes")
6521 (set_attr "predicable_short_it" "no")
6522 (set_attr "length" "4")]
6525 (define_insn "*arm_movsi_insn"
6526 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6527 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6528 "TARGET_ARM && ! TARGET_IWMMXT
6529 && !(TARGET_HARD_FLOAT && TARGET_VFP)
6530 && ( register_operand (operands[0], SImode)
6531 || register_operand (operands[1], SImode))"
6539 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load1,store1")
6540 (set_attr "predicable" "yes")
6541 (set_attr "pool_range" "*,*,*,*,4096,*")
6542 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6546 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6547 (match_operand:SI 1 "const_int_operand" ""))]
6549 && (!(const_ok_for_arm (INTVAL (operands[1]))
6550 || const_ok_for_arm (~INTVAL (operands[1]))))"
6551 [(clobber (const_int 0))]
6553 arm_split_constant (SET, SImode, NULL_RTX,
6554 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6559 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6560 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6561 ;; and lo_sum would be merged back into memory load at cprop. However,
6562 ;; if the default is to prefer movt/movw rather than a load from the constant
6563 ;; pool, the performance is better.
6565 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6566 (match_operand:SI 1 "general_operand" ""))]
6568 && TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6569 && !flag_pic && !target_word_relocations
6570 && !arm_tls_referenced_p (operands[1])"
6571 [(clobber (const_int 0))]
6573 arm_emit_movpair (operands[0], operands[1]);
6577 (define_insn "*thumb1_movsi_insn"
6578 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
6579 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
6581 && ( register_operand (operands[0], SImode)
6582 || register_operand (operands[1], SImode))"
6593 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
6594 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
6595 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")
6596 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
6599 [(set (match_operand:SI 0 "register_operand" "")
6600 (match_operand:SI 1 "const_int_operand" ""))]
6601 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
6602 [(set (match_dup 2) (match_dup 1))
6603 (set (match_dup 0) (neg:SI (match_dup 2)))]
6606 operands[1] = GEN_INT (- INTVAL (operands[1]));
6607 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6612 [(set (match_operand:SI 0 "register_operand" "")
6613 (match_operand:SI 1 "const_int_operand" ""))]
6614 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
6615 [(set (match_dup 2) (match_dup 1))
6616 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
6619 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
6620 unsigned HOST_WIDE_INT mask = 0xff;
6623 for (i = 0; i < 25; i++)
6624 if ((val & (mask << i)) == val)
6627 /* Don't split if the shift is zero. */
6631 operands[1] = GEN_INT (val >> i);
6632 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6633 operands[3] = GEN_INT (i);
6637 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
6639 [(set (match_operand:SI 0 "register_operand" "")
6640 (match_operand:SI 1 "const_int_operand" ""))]
6641 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])"
6642 [(set (match_dup 2) (match_dup 1))
6643 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
6646 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
6647 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6648 operands[3] = GEN_INT (255);
6652 ;; When generating pic, we need to load the symbol offset into a register.
6653 ;; So that the optimizer does not confuse this with a normal symbol load
6654 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6655 ;; since that is the only type of relocation we can use.
6657 ;; Wrap calculation of the whole PIC address in a single pattern for the
6658 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6659 ;; a PIC address involves two loads from memory, so we want to CSE it
6660 ;; as often as possible.
6661 ;; This pattern will be split into one of the pic_load_addr_* patterns
6662 ;; and a move after GCSE optimizations.
6664 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6665 (define_expand "calculate_pic_address"
6666 [(set (match_operand:SI 0 "register_operand" "")
6667 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6668 (unspec:SI [(match_operand:SI 2 "" "")]
6673 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6675 [(set (match_operand:SI 0 "register_operand" "")
6676 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6677 (unspec:SI [(match_operand:SI 2 "" "")]
6680 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6681 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6682 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6685 ;; operand1 is the memory address to go into
6686 ;; pic_load_addr_32bit.
6687 ;; operand2 is the PIC label to be emitted
6688 ;; from pic_add_dot_plus_eight.
6689 ;; We do this to allow hoisting of the entire insn.
6690 (define_insn_and_split "pic_load_addr_unified"
6691 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6692 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6693 (match_operand:SI 2 "" "")]
6694 UNSPEC_PIC_UNIFIED))]
6697 "&& reload_completed"
6698 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6699 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6700 (match_dup 2)] UNSPEC_PIC_BASE))]
6701 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6702 [(set_attr "type" "load1,load1,load1")
6703 (set_attr "pool_range" "4096,4094,1022")
6704 (set_attr "neg_pool_range" "4084,0,0")
6705 (set_attr "arch" "a,t2,t1")
6706 (set_attr "length" "8,6,4")]
6709 ;; The rather odd constraints on the following are to force reload to leave
6710 ;; the insn alone, and to force the minipool generation pass to then move
6711 ;; the GOT symbol to memory.
6713 (define_insn "pic_load_addr_32bit"
6714 [(set (match_operand:SI 0 "s_register_operand" "=r")
6715 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6716 "TARGET_32BIT && flag_pic"
6718 [(set_attr "type" "load1")
6719 (set (attr "pool_range")
6720 (if_then_else (eq_attr "is_thumb" "no")
6723 (set (attr "neg_pool_range")
6724 (if_then_else (eq_attr "is_thumb" "no")
6729 (define_insn "pic_load_addr_thumb1"
6730 [(set (match_operand:SI 0 "s_register_operand" "=l")
6731 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6732 "TARGET_THUMB1 && flag_pic"
6734 [(set_attr "type" "load1")
6735 (set (attr "pool_range") (const_int 1018))]
6738 (define_insn "pic_add_dot_plus_four"
6739 [(set (match_operand:SI 0 "register_operand" "=r")
6740 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6742 (match_operand 2 "" "")]
6746 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6747 INTVAL (operands[2]));
6748 return \"add\\t%0, %|pc\";
6750 [(set_attr "length" "2")]
6753 (define_insn "pic_add_dot_plus_eight"
6754 [(set (match_operand:SI 0 "register_operand" "=r")
6755 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6757 (match_operand 2 "" "")]
6761 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6762 INTVAL (operands[2]));
6763 return \"add%?\\t%0, %|pc, %1\";
6765 [(set_attr "predicable" "yes")]
6768 (define_insn "tls_load_dot_plus_eight"
6769 [(set (match_operand:SI 0 "register_operand" "=r")
6770 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6772 (match_operand 2 "" "")]
6776 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6777 INTVAL (operands[2]));
6778 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6780 [(set_attr "predicable" "yes")]
6783 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6784 ;; followed by a load. These sequences can be crunched down to
6785 ;; tls_load_dot_plus_eight by a peephole.
6788 [(set (match_operand:SI 0 "register_operand" "")
6789 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6791 (match_operand 1 "" "")]
6793 (set (match_operand:SI 2 "arm_general_register_operand" "")
6794 (mem:SI (match_dup 0)))]
6795 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6797 (mem:SI (unspec:SI [(match_dup 3)
6804 (define_insn "pic_offset_arm"
6805 [(set (match_operand:SI 0 "register_operand" "=r")
6806 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6807 (unspec:SI [(match_operand:SI 2 "" "X")]
6808 UNSPEC_PIC_OFFSET))))]
6809 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6810 "ldr%?\\t%0, [%1,%2]"
6811 [(set_attr "type" "load1")]
6814 (define_expand "builtin_setjmp_receiver"
6815 [(label_ref (match_operand 0 "" ""))]
6819 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6821 if (arm_pic_register != INVALID_REGNUM)
6822 arm_load_pic_register (1UL << 3);
6826 ;; If copying one reg to another we can set the condition codes according to
6827 ;; its value. Such a move is common after a return from subroutine and the
6828 ;; result is being tested against zero.
6830 (define_insn "*movsi_compare0"
6831 [(set (reg:CC CC_REGNUM)
6832 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6834 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6840 [(set_attr "conds" "set")
6841 (set_attr "type" "arlo_imm,arlo_imm")]
6844 ;; Subroutine to store a half word from a register into memory.
6845 ;; Operand 0 is the source register (HImode)
6846 ;; Operand 1 is the destination address in a register (SImode)
6848 ;; In both this routine and the next, we must be careful not to spill
6849 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6850 ;; can generate unrecognizable rtl.
6852 (define_expand "storehi"
6853 [;; store the low byte
6854 (set (match_operand 1 "" "") (match_dup 3))
6855 ;; extract the high byte
6857 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6858 ;; store the high byte
6859 (set (match_dup 4) (match_dup 5))]
6863 rtx op1 = operands[1];
6864 rtx addr = XEXP (op1, 0);
6865 enum rtx_code code = GET_CODE (addr);
6867 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6869 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6871 operands[4] = adjust_address (op1, QImode, 1);
6872 operands[1] = adjust_address (operands[1], QImode, 0);
6873 operands[3] = gen_lowpart (QImode, operands[0]);
6874 operands[0] = gen_lowpart (SImode, operands[0]);
6875 operands[2] = gen_reg_rtx (SImode);
6876 operands[5] = gen_lowpart (QImode, operands[2]);
6880 (define_expand "storehi_bigend"
6881 [(set (match_dup 4) (match_dup 3))
6883 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6884 (set (match_operand 1 "" "") (match_dup 5))]
6888 rtx op1 = operands[1];
6889 rtx addr = XEXP (op1, 0);
6890 enum rtx_code code = GET_CODE (addr);
6892 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6894 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6896 operands[4] = adjust_address (op1, QImode, 1);
6897 operands[1] = adjust_address (operands[1], QImode, 0);
6898 operands[3] = gen_lowpart (QImode, operands[0]);
6899 operands[0] = gen_lowpart (SImode, operands[0]);
6900 operands[2] = gen_reg_rtx (SImode);
6901 operands[5] = gen_lowpart (QImode, operands[2]);
6905 ;; Subroutine to store a half word integer constant into memory.
6906 (define_expand "storeinthi"
6907 [(set (match_operand 0 "" "")
6908 (match_operand 1 "" ""))
6909 (set (match_dup 3) (match_dup 2))]
6913 HOST_WIDE_INT value = INTVAL (operands[1]);
6914 rtx addr = XEXP (operands[0], 0);
6915 rtx op0 = operands[0];
6916 enum rtx_code code = GET_CODE (addr);
6918 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6920 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6922 operands[1] = gen_reg_rtx (SImode);
6923 if (BYTES_BIG_ENDIAN)
6925 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6926 if ((value & 255) == ((value >> 8) & 255))
6927 operands[2] = operands[1];
6930 operands[2] = gen_reg_rtx (SImode);
6931 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6936 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6937 if ((value & 255) == ((value >> 8) & 255))
6938 operands[2] = operands[1];
6941 operands[2] = gen_reg_rtx (SImode);
6942 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6946 operands[3] = adjust_address (op0, QImode, 1);
6947 operands[0] = adjust_address (operands[0], QImode, 0);
6948 operands[2] = gen_lowpart (QImode, operands[2]);
6949 operands[1] = gen_lowpart (QImode, operands[1]);
6953 (define_expand "storehi_single_op"
6954 [(set (match_operand:HI 0 "memory_operand" "")
6955 (match_operand:HI 1 "general_operand" ""))]
6956 "TARGET_32BIT && arm_arch4"
6958 if (!s_register_operand (operands[1], HImode))
6959 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6963 (define_expand "movhi"
6964 [(set (match_operand:HI 0 "general_operand" "")
6965 (match_operand:HI 1 "general_operand" ""))]
6970 if (can_create_pseudo_p ())
6972 if (MEM_P (operands[0]))
6976 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6979 if (CONST_INT_P (operands[1]))
6980 emit_insn (gen_storeinthi (operands[0], operands[1]));
6983 if (MEM_P (operands[1]))
6984 operands[1] = force_reg (HImode, operands[1]);
6985 if (BYTES_BIG_ENDIAN)
6986 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6988 emit_insn (gen_storehi (operands[1], operands[0]));
6992 /* Sign extend a constant, and keep it in an SImode reg. */
6993 else if (CONST_INT_P (operands[1]))
6995 rtx reg = gen_reg_rtx (SImode);
6996 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6998 /* If the constant is already valid, leave it alone. */
6999 if (!const_ok_for_arm (val))
7001 /* If setting all the top bits will make the constant
7002 loadable in a single instruction, then set them.
7003 Otherwise, sign extend the number. */
7005 if (const_ok_for_arm (~(val | ~0xffff)))
7007 else if (val & 0x8000)
7011 emit_insn (gen_movsi (reg, GEN_INT (val)));
7012 operands[1] = gen_lowpart (HImode, reg);
7014 else if (arm_arch4 && optimize && can_create_pseudo_p ()
7015 && MEM_P (operands[1]))
7017 rtx reg = gen_reg_rtx (SImode);
7019 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7020 operands[1] = gen_lowpart (HImode, reg);
7022 else if (!arm_arch4)
7024 if (MEM_P (operands[1]))
7027 rtx offset = const0_rtx;
7028 rtx reg = gen_reg_rtx (SImode);
7030 if ((REG_P (base = XEXP (operands[1], 0))
7031 || (GET_CODE (base) == PLUS
7032 && (CONST_INT_P (offset = XEXP (base, 1)))
7033 && ((INTVAL(offset) & 1) != 1)
7034 && REG_P (base = XEXP (base, 0))))
7035 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
7039 new_rtx = widen_memory_access (operands[1], SImode,
7040 ((INTVAL (offset) & ~3)
7041 - INTVAL (offset)));
7042 emit_insn (gen_movsi (reg, new_rtx));
7043 if (((INTVAL (offset) & 2) != 0)
7044 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
7046 rtx reg2 = gen_reg_rtx (SImode);
7048 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
7053 emit_insn (gen_movhi_bytes (reg, operands[1]));
7055 operands[1] = gen_lowpart (HImode, reg);
7059 /* Handle loading a large integer during reload. */
7060 else if (CONST_INT_P (operands[1])
7061 && !const_ok_for_arm (INTVAL (operands[1]))
7062 && !const_ok_for_arm (~INTVAL (operands[1])))
7064 /* Writing a constant to memory needs a scratch, which should
7065 be handled with SECONDARY_RELOADs. */
7066 gcc_assert (REG_P (operands[0]));
7068 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7069 emit_insn (gen_movsi (operands[0], operands[1]));
7073 else if (TARGET_THUMB2)
7075 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
7076 if (can_create_pseudo_p ())
7078 if (!REG_P (operands[0]))
7079 operands[1] = force_reg (HImode, operands[1]);
7080 /* Zero extend a constant, and keep it in an SImode reg. */
7081 else if (CONST_INT_P (operands[1]))
7083 rtx reg = gen_reg_rtx (SImode);
7084 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
7086 emit_insn (gen_movsi (reg, GEN_INT (val)));
7087 operands[1] = gen_lowpart (HImode, reg);
7091 else /* TARGET_THUMB1 */
7093 if (can_create_pseudo_p ())
7095 if (CONST_INT_P (operands[1]))
7097 rtx reg = gen_reg_rtx (SImode);
7099 emit_insn (gen_movsi (reg, operands[1]));
7100 operands[1] = gen_lowpart (HImode, reg);
7103 /* ??? We shouldn't really get invalid addresses here, but this can
7104 happen if we are passed a SP (never OK for HImode/QImode) or
7105 virtual register (also rejected as illegitimate for HImode/QImode)
7106 relative address. */
7107 /* ??? This should perhaps be fixed elsewhere, for instance, in
7108 fixup_stack_1, by checking for other kinds of invalid addresses,
7109 e.g. a bare reference to a virtual register. This may confuse the
7110 alpha though, which must handle this case differently. */
7111 if (MEM_P (operands[0])
7112 && !memory_address_p (GET_MODE (operands[0]),
7113 XEXP (operands[0], 0)))
7115 = replace_equiv_address (operands[0],
7116 copy_to_reg (XEXP (operands[0], 0)));
7118 if (MEM_P (operands[1])
7119 && !memory_address_p (GET_MODE (operands[1]),
7120 XEXP (operands[1], 0)))
7122 = replace_equiv_address (operands[1],
7123 copy_to_reg (XEXP (operands[1], 0)));
7125 if (MEM_P (operands[1]) && optimize > 0)
7127 rtx reg = gen_reg_rtx (SImode);
7129 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7130 operands[1] = gen_lowpart (HImode, reg);
7133 if (MEM_P (operands[0]))
7134 operands[1] = force_reg (HImode, operands[1]);
7136 else if (CONST_INT_P (operands[1])
7137 && !satisfies_constraint_I (operands[1]))
7139 /* Handle loading a large integer during reload. */
7141 /* Writing a constant to memory needs a scratch, which should
7142 be handled with SECONDARY_RELOADs. */
7143 gcc_assert (REG_P (operands[0]));
7145 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7146 emit_insn (gen_movsi (operands[0], operands[1]));
7153 (define_insn "*thumb1_movhi_insn"
7154 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
7155 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
7157 && ( register_operand (operands[0], HImode)
7158 || register_operand (operands[1], HImode))"
7160 switch (which_alternative)
7162 case 0: return \"add %0, %1, #0\";
7163 case 2: return \"strh %1, %0\";
7164 case 3: return \"mov %0, %1\";
7165 case 4: return \"mov %0, %1\";
7166 case 5: return \"mov %0, %1\";
7167 default: gcc_unreachable ();
7169 /* The stack pointer can end up being taken as an index register.
7170 Catch this case here and deal with it. */
7171 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
7172 && REG_P (XEXP (XEXP (operands[1], 0), 0))
7173 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
7176 ops[0] = operands[0];
7177 ops[1] = XEXP (XEXP (operands[1], 0), 0);
7179 output_asm_insn (\"mov %0, %1\", ops);
7181 XEXP (XEXP (operands[1], 0), 0) = operands[0];
7184 return \"ldrh %0, %1\";
7186 [(set_attr "length" "2,4,2,2,2,2")
7187 (set_attr "type" "*,load1,store1,*,*,*")
7188 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
7191 (define_expand "movhi_bytes"
7192 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7194 (zero_extend:SI (match_dup 6)))
7195 (set (match_operand:SI 0 "" "")
7196 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7201 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7203 mem1 = change_address (operands[1], QImode, addr);
7204 mem2 = change_address (operands[1], QImode,
7205 plus_constant (Pmode, addr, 1));
7206 operands[0] = gen_lowpart (SImode, operands[0]);
7208 operands[2] = gen_reg_rtx (SImode);
7209 operands[3] = gen_reg_rtx (SImode);
7212 if (BYTES_BIG_ENDIAN)
7214 operands[4] = operands[2];
7215 operands[5] = operands[3];
7219 operands[4] = operands[3];
7220 operands[5] = operands[2];
7225 (define_expand "movhi_bigend"
7227 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
7230 (ashiftrt:SI (match_dup 2) (const_int 16)))
7231 (set (match_operand:HI 0 "s_register_operand" "")
7235 operands[2] = gen_reg_rtx (SImode);
7236 operands[3] = gen_reg_rtx (SImode);
7237 operands[4] = gen_lowpart (HImode, operands[3]);
7241 ;; Pattern to recognize insn generated default case above
7242 (define_insn "*movhi_insn_arch4"
7243 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
7244 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
7247 && (register_operand (operands[0], HImode)
7248 || register_operand (operands[1], HImode))"
7250 mov%?\\t%0, %1\\t%@ movhi
7251 mvn%?\\t%0, #%B1\\t%@ movhi
7252 str%(h%)\\t%1, %0\\t%@ movhi
7253 ldr%(h%)\\t%0, %1\\t%@ movhi"
7254 [(set_attr "predicable" "yes")
7255 (set_attr "pool_range" "*,*,*,256")
7256 (set_attr "neg_pool_range" "*,*,*,244")
7257 (set_attr_alternative "type"
7258 [(if_then_else (match_operand 1 "const_int_operand" "")
7259 (const_string "mov_imm" )
7260 (const_string "mov_reg"))
7261 (const_string "mvn_imm")
7262 (const_string "store1")
7263 (const_string "load1")])]
7266 (define_insn "*movhi_bytes"
7267 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7268 (match_operand:HI 1 "arm_rhs_operand" "I,r,K"))]
7271 mov%?\\t%0, %1\\t%@ movhi
7272 mov%?\\t%0, %1\\t%@ movhi
7273 mvn%?\\t%0, #%B1\\t%@ movhi"
7274 [(set_attr "predicable" "yes")
7275 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
7278 (define_expand "thumb_movhi_clobber"
7279 [(set (match_operand:HI 0 "memory_operand" "")
7280 (match_operand:HI 1 "register_operand" ""))
7281 (clobber (match_operand:DI 2 "register_operand" ""))]
7284 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
7285 && REGNO (operands[1]) <= LAST_LO_REGNUM)
7287 emit_insn (gen_movhi (operands[0], operands[1]));
7290 /* XXX Fixme, need to handle other cases here as well. */
7295 ;; We use a DImode scratch because we may occasionally need an additional
7296 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
7297 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
7298 (define_expand "reload_outhi"
7299 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7300 (match_operand:HI 1 "s_register_operand" "r")
7301 (match_operand:DI 2 "s_register_operand" "=&l")])]
7304 arm_reload_out_hi (operands);
7306 thumb_reload_out_hi (operands);
7311 (define_expand "reload_inhi"
7312 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7313 (match_operand:HI 1 "arm_reload_memory_operand" "o")
7314 (match_operand:DI 2 "s_register_operand" "=&r")])]
7318 arm_reload_in_hi (operands);
7320 thumb_reload_out_hi (operands);
7324 (define_expand "movqi"
7325 [(set (match_operand:QI 0 "general_operand" "")
7326 (match_operand:QI 1 "general_operand" ""))]
7329 /* Everything except mem = const or mem = mem can be done easily */
7331 if (can_create_pseudo_p ())
7333 if (CONST_INT_P (operands[1]))
7335 rtx reg = gen_reg_rtx (SImode);
7337 /* For thumb we want an unsigned immediate, then we are more likely
7338 to be able to use a movs insn. */
7340 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7342 emit_insn (gen_movsi (reg, operands[1]));
7343 operands[1] = gen_lowpart (QImode, reg);
7348 /* ??? We shouldn't really get invalid addresses here, but this can
7349 happen if we are passed a SP (never OK for HImode/QImode) or
7350 virtual register (also rejected as illegitimate for HImode/QImode)
7351 relative address. */
7352 /* ??? This should perhaps be fixed elsewhere, for instance, in
7353 fixup_stack_1, by checking for other kinds of invalid addresses,
7354 e.g. a bare reference to a virtual register. This may confuse the
7355 alpha though, which must handle this case differently. */
7356 if (MEM_P (operands[0])
7357 && !memory_address_p (GET_MODE (operands[0]),
7358 XEXP (operands[0], 0)))
7360 = replace_equiv_address (operands[0],
7361 copy_to_reg (XEXP (operands[0], 0)));
7362 if (MEM_P (operands[1])
7363 && !memory_address_p (GET_MODE (operands[1]),
7364 XEXP (operands[1], 0)))
7366 = replace_equiv_address (operands[1],
7367 copy_to_reg (XEXP (operands[1], 0)));
7370 if (MEM_P (operands[1]) && optimize > 0)
7372 rtx reg = gen_reg_rtx (SImode);
7374 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7375 operands[1] = gen_lowpart (QImode, reg);
7378 if (MEM_P (operands[0]))
7379 operands[1] = force_reg (QImode, operands[1]);
7381 else if (TARGET_THUMB
7382 && CONST_INT_P (operands[1])
7383 && !satisfies_constraint_I (operands[1]))
7385 /* Handle loading a large integer during reload. */
7387 /* Writing a constant to memory needs a scratch, which should
7388 be handled with SECONDARY_RELOADs. */
7389 gcc_assert (REG_P (operands[0]));
7391 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7392 emit_insn (gen_movsi (operands[0], operands[1]));
7398 (define_insn "*arm_movqi_insn"
7399 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7400 (match_operand:QI 1 "general_operand" "r,r,I,Py,K,Uu,l,m,r"))]
7402 && ( register_operand (operands[0], QImode)
7403 || register_operand (operands[1], QImode))"
7414 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load1,store1,load1,store1")
7415 (set_attr "predicable" "yes")
7416 (set_attr "predicable_short_it" "yes,yes,yes,no,no,no,no,no,no")
7417 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7418 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7421 (define_insn "*thumb1_movqi_insn"
7422 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
7423 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
7425 && ( register_operand (operands[0], QImode)
7426 || register_operand (operands[1], QImode))"
7434 [(set_attr "length" "2")
7435 (set_attr "type" "arlo_imm,load1,store1,mov_reg,mov_imm,mov_imm")
7436 (set_attr "pool_range" "*,32,*,*,*,*")
7437 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
7440 (define_expand "movhf"
7441 [(set (match_operand:HF 0 "general_operand" "")
7442 (match_operand:HF 1 "general_operand" ""))]
7447 if (MEM_P (operands[0]))
7448 operands[1] = force_reg (HFmode, operands[1]);
7450 else /* TARGET_THUMB1 */
7452 if (can_create_pseudo_p ())
7454 if (!REG_P (operands[0]))
7455 operands[1] = force_reg (HFmode, operands[1]);
7461 (define_insn "*arm32_movhf"
7462 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
7463 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
7464 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16) && !arm_restrict_it
7465 && ( s_register_operand (operands[0], HFmode)
7466 || s_register_operand (operands[1], HFmode))"
7468 switch (which_alternative)
7470 case 0: /* ARM register from memory */
7471 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
7472 case 1: /* memory from ARM register */
7473 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
7474 case 2: /* ARM register from ARM register */
7475 return \"mov%?\\t%0, %1\\t%@ __fp16\";
7476 case 3: /* ARM register from constant */
7482 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7483 bits = real_to_target (NULL, &r, HFmode);
7484 ops[0] = operands[0];
7485 ops[1] = GEN_INT (bits);
7486 ops[2] = GEN_INT (bits & 0xff00);
7487 ops[3] = GEN_INT (bits & 0x00ff);
7489 if (arm_arch_thumb2)
7490 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7492 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7499 [(set_attr "conds" "unconditional")
7500 (set_attr "type" "load1,store1,mov_reg,mov_reg")
7501 (set_attr "length" "4,4,4,8")
7502 (set_attr "predicable" "yes")]
7505 (define_insn "*thumb1_movhf"
7506 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
7507 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
7509 && ( s_register_operand (operands[0], HFmode)
7510 || s_register_operand (operands[1], HFmode))"
7512 switch (which_alternative)
7517 gcc_assert (MEM_P (operands[1]));
7518 addr = XEXP (operands[1], 0);
7519 if (GET_CODE (addr) == LABEL_REF
7520 || (GET_CODE (addr) == CONST
7521 && GET_CODE (XEXP (addr, 0)) == PLUS
7522 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
7523 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
7525 /* Constant pool entry. */
7526 return \"ldr\\t%0, %1\";
7528 return \"ldrh\\t%0, %1\";
7530 case 2: return \"strh\\t%1, %0\";
7531 default: return \"mov\\t%0, %1\";
7534 [(set_attr "length" "2")
7535 (set_attr "type" "mov_reg,load1,store1,mov_reg,mov_reg")
7536 (set_attr "pool_range" "*,1018,*,*,*")
7537 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
7539 (define_expand "movsf"
7540 [(set (match_operand:SF 0 "general_operand" "")
7541 (match_operand:SF 1 "general_operand" ""))]
7546 if (MEM_P (operands[0]))
7547 operands[1] = force_reg (SFmode, operands[1]);
7549 else /* TARGET_THUMB1 */
7551 if (can_create_pseudo_p ())
7553 if (!REG_P (operands[0]))
7554 operands[1] = force_reg (SFmode, operands[1]);
7560 ;; Transform a floating-point move of a constant into a core register into
7561 ;; an SImode operation.
7563 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7564 (match_operand:SF 1 "immediate_operand" ""))]
7567 && CONST_DOUBLE_P (operands[1])"
7568 [(set (match_dup 2) (match_dup 3))]
7570 operands[2] = gen_lowpart (SImode, operands[0]);
7571 operands[3] = gen_lowpart (SImode, operands[1]);
7572 if (operands[2] == 0 || operands[3] == 0)
7577 (define_insn "*arm_movsf_soft_insn"
7578 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7579 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7581 && TARGET_SOFT_FLOAT
7582 && (!MEM_P (operands[0])
7583 || register_operand (operands[1], SFmode))"
7586 ldr%?\\t%0, %1\\t%@ float
7587 str%?\\t%1, %0\\t%@ float"
7588 [(set_attr "predicable" "yes")
7589 (set_attr "predicable_short_it" "no")
7590 (set_attr "type" "mov_reg,load1,store1")
7591 (set_attr "arm_pool_range" "*,4096,*")
7592 (set_attr "thumb2_pool_range" "*,4094,*")
7593 (set_attr "arm_neg_pool_range" "*,4084,*")
7594 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7597 ;;; ??? This should have alternatives for constants.
7598 (define_insn "*thumb1_movsf_insn"
7599 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
7600 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
7602 && ( register_operand (operands[0], SFmode)
7603 || register_operand (operands[1], SFmode))"
7612 [(set_attr "length" "2")
7613 (set_attr "type" "*,load1,store1,load1,store1,mov_reg,mov_reg")
7614 (set_attr "pool_range" "*,*,*,1018,*,*,*")
7615 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
7618 (define_expand "movdf"
7619 [(set (match_operand:DF 0 "general_operand" "")
7620 (match_operand:DF 1 "general_operand" ""))]
7625 if (MEM_P (operands[0]))
7626 operands[1] = force_reg (DFmode, operands[1]);
7628 else /* TARGET_THUMB */
7630 if (can_create_pseudo_p ())
7632 if (!REG_P (operands[0]))
7633 operands[1] = force_reg (DFmode, operands[1]);
7639 ;; Reloading a df mode value stored in integer regs to memory can require a
7641 (define_expand "reload_outdf"
7642 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7643 (match_operand:DF 1 "s_register_operand" "r")
7644 (match_operand:SI 2 "s_register_operand" "=&r")]
7648 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7651 operands[2] = XEXP (operands[0], 0);
7652 else if (code == POST_INC || code == PRE_DEC)
7654 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7655 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7656 emit_insn (gen_movdi (operands[0], operands[1]));
7659 else if (code == PRE_INC)
7661 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7663 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7666 else if (code == POST_DEC)
7667 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7669 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7670 XEXP (XEXP (operands[0], 0), 1)));
7672 emit_insn (gen_rtx_SET (VOIDmode,
7673 replace_equiv_address (operands[0], operands[2]),
7676 if (code == POST_DEC)
7677 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7683 (define_insn "*movdf_soft_insn"
7684 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,q,m")
7685 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,q"))]
7686 "TARGET_32BIT && TARGET_SOFT_FLOAT
7687 && ( register_operand (operands[0], DFmode)
7688 || register_operand (operands[1], DFmode))"
7690 switch (which_alternative)
7697 return output_move_double (operands, true, NULL);
7700 [(set_attr "length" "8,12,16,8,8")
7701 (set_attr "type" "*,*,*,load2,store2")
7702 (set_attr "arm_pool_range" "*,*,*,1020,*")
7703 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7704 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7705 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7708 ;;; ??? This should have alternatives for constants.
7709 ;;; ??? This was originally identical to the movdi_insn pattern.
7710 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
7711 ;;; thumb_reorg with a memory reference.
7712 (define_insn "*thumb_movdf_insn"
7713 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
7714 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
7716 && ( register_operand (operands[0], DFmode)
7717 || register_operand (operands[1], DFmode))"
7719 switch (which_alternative)
7723 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7724 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
7725 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
7727 return \"ldmia\\t%1, {%0, %H0}\";
7729 return \"stmia\\t%0, {%1, %H1}\";
7731 return thumb_load_double_from_address (operands);
7733 operands[2] = gen_rtx_MEM (SImode,
7734 plus_constant (Pmode,
7735 XEXP (operands[0], 0), 4));
7736 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
7739 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7740 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
7741 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
7744 [(set_attr "length" "4,2,2,6,4,4")
7745 (set_attr "type" "*,load2,store2,load2,store2,mov_reg")
7746 (set_attr "pool_range" "*,*,*,1018,*,*")]
7750 ;; load- and store-multiple insns
7751 ;; The arm can load/store any set of registers, provided that they are in
7752 ;; ascending order, but these expanders assume a contiguous set.
7754 (define_expand "load_multiple"
7755 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7756 (match_operand:SI 1 "" ""))
7757 (use (match_operand:SI 2 "" ""))])]
7760 HOST_WIDE_INT offset = 0;
7762 /* Support only fixed point registers. */
7763 if (!CONST_INT_P (operands[2])
7764 || INTVAL (operands[2]) > 14
7765 || INTVAL (operands[2]) < 2
7766 || !MEM_P (operands[1])
7767 || !REG_P (operands[0])
7768 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7769 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7773 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7774 INTVAL (operands[2]),
7775 force_reg (SImode, XEXP (operands[1], 0)),
7776 FALSE, operands[1], &offset);
7779 (define_expand "store_multiple"
7780 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7781 (match_operand:SI 1 "" ""))
7782 (use (match_operand:SI 2 "" ""))])]
7785 HOST_WIDE_INT offset = 0;
7787 /* Support only fixed point registers. */
7788 if (!CONST_INT_P (operands[2])
7789 || INTVAL (operands[2]) > 14
7790 || INTVAL (operands[2]) < 2
7791 || !REG_P (operands[1])
7792 || !MEM_P (operands[0])
7793 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7794 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7798 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7799 INTVAL (operands[2]),
7800 force_reg (SImode, XEXP (operands[0], 0)),
7801 FALSE, operands[0], &offset);
7805 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7806 ;; We could let this apply for blocks of less than this, but it clobbers so
7807 ;; many registers that there is then probably a better way.
7809 (define_expand "movmemqi"
7810 [(match_operand:BLK 0 "general_operand" "")
7811 (match_operand:BLK 1 "general_operand" "")
7812 (match_operand:SI 2 "const_int_operand" "")
7813 (match_operand:SI 3 "const_int_operand" "")]
7818 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7819 && !optimize_function_for_size_p (cfun))
7821 if (gen_movmem_ldrd_strd (operands))
7826 if (arm_gen_movmemqi (operands))
7830 else /* TARGET_THUMB1 */
7832 if ( INTVAL (operands[3]) != 4
7833 || INTVAL (operands[2]) > 48)
7836 thumb_expand_movmemqi (operands);
7842 ;; Thumb block-move insns
7844 (define_insn "movmem12b"
7845 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7846 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7847 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7848 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7849 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
7850 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
7851 (set (match_operand:SI 0 "register_operand" "=l")
7852 (plus:SI (match_dup 2) (const_int 12)))
7853 (set (match_operand:SI 1 "register_operand" "=l")
7854 (plus:SI (match_dup 3) (const_int 12)))
7855 (clobber (match_scratch:SI 4 "=&l"))
7856 (clobber (match_scratch:SI 5 "=&l"))
7857 (clobber (match_scratch:SI 6 "=&l"))]
7859 "* return thumb_output_move_mem_multiple (3, operands);"
7860 [(set_attr "length" "4")
7861 ; This isn't entirely accurate... It loads as well, but in terms of
7862 ; scheduling the following insn it is better to consider it as a store
7863 (set_attr "type" "store3")]
7866 (define_insn "movmem8b"
7867 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7868 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7869 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7870 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7871 (set (match_operand:SI 0 "register_operand" "=l")
7872 (plus:SI (match_dup 2) (const_int 8)))
7873 (set (match_operand:SI 1 "register_operand" "=l")
7874 (plus:SI (match_dup 3) (const_int 8)))
7875 (clobber (match_scratch:SI 4 "=&l"))
7876 (clobber (match_scratch:SI 5 "=&l"))]
7878 "* return thumb_output_move_mem_multiple (2, operands);"
7879 [(set_attr "length" "4")
7880 ; This isn't entirely accurate... It loads as well, but in terms of
7881 ; scheduling the following insn it is better to consider it as a store
7882 (set_attr "type" "store2")]
7887 ;; Compare & branch insns
7888 ;; The range calculations are based as follows:
7889 ;; For forward branches, the address calculation returns the address of
7890 ;; the next instruction. This is 2 beyond the branch instruction.
7891 ;; For backward branches, the address calculation returns the address of
7892 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7893 ;; instruction for the shortest sequence, and 4 before the branch instruction
7894 ;; if we have to jump around an unconditional branch.
7895 ;; To the basic branch range the PC offset must be added (this is +4).
7896 ;; So for forward branches we have
7897 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7898 ;; And for backward branches we have
7899 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7901 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7902 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7904 (define_expand "cbranchsi4"
7905 [(set (pc) (if_then_else
7906 (match_operator 0 "expandable_comparison_operator"
7907 [(match_operand:SI 1 "s_register_operand" "")
7908 (match_operand:SI 2 "nonmemory_operand" "")])
7909 (label_ref (match_operand 3 "" ""))
7915 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7917 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7921 if (thumb1_cmpneg_operand (operands[2], SImode))
7923 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7924 operands[3], operands[0]));
7927 if (!thumb1_cmp_operand (operands[2], SImode))
7928 operands[2] = force_reg (SImode, operands[2]);
7931 ;; A pattern to recognize a special situation and optimize for it.
7932 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
7933 ;; due to the available addressing modes. Hence, convert a signed comparison
7934 ;; with zero into an unsigned comparison with 127 if possible.
7935 (define_expand "cbranchqi4"
7936 [(set (pc) (if_then_else
7937 (match_operator 0 "lt_ge_comparison_operator"
7938 [(match_operand:QI 1 "memory_operand" "")
7939 (match_operand:QI 2 "const0_operand" "")])
7940 (label_ref (match_operand 3 "" ""))
7945 xops[1] = gen_reg_rtx (SImode);
7946 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
7947 xops[2] = GEN_INT (127);
7948 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
7949 VOIDmode, xops[1], xops[2]);
7950 xops[3] = operands[3];
7951 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
7955 (define_expand "cbranchsf4"
7956 [(set (pc) (if_then_else
7957 (match_operator 0 "expandable_comparison_operator"
7958 [(match_operand:SF 1 "s_register_operand" "")
7959 (match_operand:SF 2 "arm_float_compare_operand" "")])
7960 (label_ref (match_operand 3 "" ""))
7962 "TARGET_32BIT && TARGET_HARD_FLOAT"
7963 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7964 operands[3])); DONE;"
7967 (define_expand "cbranchdf4"
7968 [(set (pc) (if_then_else
7969 (match_operator 0 "expandable_comparison_operator"
7970 [(match_operand:DF 1 "s_register_operand" "")
7971 (match_operand:DF 2 "arm_float_compare_operand" "")])
7972 (label_ref (match_operand 3 "" ""))
7974 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7975 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7976 operands[3])); DONE;"
7979 (define_expand "cbranchdi4"
7980 [(set (pc) (if_then_else
7981 (match_operator 0 "expandable_comparison_operator"
7982 [(match_operand:DI 1 "s_register_operand" "")
7983 (match_operand:DI 2 "cmpdi_operand" "")])
7984 (label_ref (match_operand 3 "" ""))
7988 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7990 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7996 (define_insn "cbranchsi4_insn"
7997 [(set (pc) (if_then_else
7998 (match_operator 0 "arm_comparison_operator"
7999 [(match_operand:SI 1 "s_register_operand" "l,l*h")
8000 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
8001 (label_ref (match_operand 3 "" ""))
8005 rtx t = cfun->machine->thumb1_cc_insn;
8008 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
8009 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
8011 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
8013 if (!noov_comparison_operator (operands[0], VOIDmode))
8016 else if (cfun->machine->thumb1_cc_mode != CCmode)
8021 output_asm_insn ("cmp\t%1, %2", operands);
8022 cfun->machine->thumb1_cc_insn = insn;
8023 cfun->machine->thumb1_cc_op0 = operands[1];
8024 cfun->machine->thumb1_cc_op1 = operands[2];
8025 cfun->machine->thumb1_cc_mode = CCmode;
8028 /* Ensure we emit the right type of condition code on the jump. */
8029 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
8032 switch (get_attr_length (insn))
8034 case 4: return \"b%d0\\t%l3\";
8035 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8036 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8039 [(set (attr "far_jump")
8041 (eq_attr "length" "8")
8042 (const_string "yes")
8043 (const_string "no")))
8044 (set (attr "length")
8046 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8047 (le (minus (match_dup 3) (pc)) (const_int 256)))
8050 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8051 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8056 (define_insn "cbranchsi4_scratch"
8057 [(set (pc) (if_then_else
8058 (match_operator 4 "arm_comparison_operator"
8059 [(match_operand:SI 1 "s_register_operand" "l,0")
8060 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
8061 (label_ref (match_operand 3 "" ""))
8063 (clobber (match_scratch:SI 0 "=l,l"))]
8066 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
8068 switch (get_attr_length (insn))
8070 case 4: return \"b%d4\\t%l3\";
8071 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8072 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8075 [(set (attr "far_jump")
8077 (eq_attr "length" "8")
8078 (const_string "yes")
8079 (const_string "no")))
8080 (set (attr "length")
8082 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8083 (le (minus (match_dup 3) (pc)) (const_int 256)))
8086 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8087 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8092 (define_insn "*negated_cbranchsi4"
8095 (match_operator 0 "equality_operator"
8096 [(match_operand:SI 1 "s_register_operand" "l")
8097 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
8098 (label_ref (match_operand 3 "" ""))
8102 output_asm_insn (\"cmn\\t%1, %2\", operands);
8103 switch (get_attr_length (insn))
8105 case 4: return \"b%d0\\t%l3\";
8106 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8107 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8110 [(set (attr "far_jump")
8112 (eq_attr "length" "8")
8113 (const_string "yes")
8114 (const_string "no")))
8115 (set (attr "length")
8117 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8118 (le (minus (match_dup 3) (pc)) (const_int 256)))
8121 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8122 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8127 (define_insn "*tbit_cbranch"
8130 (match_operator 0 "equality_operator"
8131 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
8133 (match_operand:SI 2 "const_int_operand" "i"))
8135 (label_ref (match_operand 3 "" ""))
8137 (clobber (match_scratch:SI 4 "=l"))]
8142 op[0] = operands[4];
8143 op[1] = operands[1];
8144 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
8146 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
8147 switch (get_attr_length (insn))
8149 case 4: return \"b%d0\\t%l3\";
8150 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8151 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8154 [(set (attr "far_jump")
8156 (eq_attr "length" "8")
8157 (const_string "yes")
8158 (const_string "no")))
8159 (set (attr "length")
8161 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8162 (le (minus (match_dup 3) (pc)) (const_int 256)))
8165 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8166 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8171 (define_insn "*tlobits_cbranch"
8174 (match_operator 0 "equality_operator"
8175 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
8176 (match_operand:SI 2 "const_int_operand" "i")
8179 (label_ref (match_operand 3 "" ""))
8181 (clobber (match_scratch:SI 4 "=l"))]
8186 op[0] = operands[4];
8187 op[1] = operands[1];
8188 op[2] = GEN_INT (32 - INTVAL (operands[2]));
8190 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
8191 switch (get_attr_length (insn))
8193 case 4: return \"b%d0\\t%l3\";
8194 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8195 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8198 [(set (attr "far_jump")
8200 (eq_attr "length" "8")
8201 (const_string "yes")
8202 (const_string "no")))
8203 (set (attr "length")
8205 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8206 (le (minus (match_dup 3) (pc)) (const_int 256)))
8209 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8210 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8215 (define_insn "*tstsi3_cbranch"
8218 (match_operator 3 "equality_operator"
8219 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
8220 (match_operand:SI 1 "s_register_operand" "l"))
8222 (label_ref (match_operand 2 "" ""))
8227 output_asm_insn (\"tst\\t%0, %1\", operands);
8228 switch (get_attr_length (insn))
8230 case 4: return \"b%d3\\t%l2\";
8231 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
8232 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
8235 [(set (attr "far_jump")
8237 (eq_attr "length" "8")
8238 (const_string "yes")
8239 (const_string "no")))
8240 (set (attr "length")
8242 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
8243 (le (minus (match_dup 2) (pc)) (const_int 256)))
8246 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
8247 (le (minus (match_dup 2) (pc)) (const_int 2048)))
8252 (define_insn "*cbranchne_decr1"
8254 (if_then_else (match_operator 3 "equality_operator"
8255 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
8257 (label_ref (match_operand 4 "" ""))
8259 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
8260 (plus:SI (match_dup 2) (const_int -1)))
8261 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
8266 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
8268 VOIDmode, operands[2], const1_rtx);
8269 cond[1] = operands[4];
8271 if (which_alternative == 0)
8272 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
8273 else if (which_alternative == 1)
8275 /* We must provide an alternative for a hi reg because reload
8276 cannot handle output reloads on a jump instruction, but we
8277 can't subtract into that. Fortunately a mov from lo to hi
8278 does not clobber the condition codes. */
8279 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
8280 output_asm_insn (\"mov\\t%0, %1\", operands);
8284 /* Similarly, but the target is memory. */
8285 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
8286 output_asm_insn (\"str\\t%1, %0\", operands);
8289 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
8292 output_asm_insn (\"b%d0\\t%l1\", cond);
8295 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
8296 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
8298 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
8299 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
8303 [(set (attr "far_jump")
8305 (ior (and (eq (symbol_ref ("which_alternative"))
8307 (eq_attr "length" "8"))
8308 (eq_attr "length" "10"))
8309 (const_string "yes")
8310 (const_string "no")))
8311 (set_attr_alternative "length"
8315 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
8316 (le (minus (match_dup 4) (pc)) (const_int 256)))
8319 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
8320 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8325 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8326 (le (minus (match_dup 4) (pc)) (const_int 256)))
8329 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8330 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8335 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8336 (le (minus (match_dup 4) (pc)) (const_int 256)))
8339 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8340 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8345 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8346 (le (minus (match_dup 4) (pc)) (const_int 256)))
8349 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8350 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8355 (define_insn "*addsi3_cbranch"
8358 (match_operator 4 "arm_comparison_operator"
8360 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
8361 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
8363 (label_ref (match_operand 5 "" ""))
8366 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
8367 (plus:SI (match_dup 2) (match_dup 3)))
8368 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
8370 && (GET_CODE (operands[4]) == EQ
8371 || GET_CODE (operands[4]) == NE
8372 || GET_CODE (operands[4]) == GE
8373 || GET_CODE (operands[4]) == LT)"
8378 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
8379 cond[1] = operands[2];
8380 cond[2] = operands[3];
8382 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
8383 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
8385 output_asm_insn (\"add\\t%0, %1, %2\", cond);
8387 if (which_alternative >= 2
8388 && which_alternative < 4)
8389 output_asm_insn (\"mov\\t%0, %1\", operands);
8390 else if (which_alternative >= 4)
8391 output_asm_insn (\"str\\t%1, %0\", operands);
8393 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
8396 return \"b%d4\\t%l5\";
8398 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
8400 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
8404 [(set (attr "far_jump")
8406 (ior (and (lt (symbol_ref ("which_alternative"))
8408 (eq_attr "length" "8"))
8409 (eq_attr "length" "10"))
8410 (const_string "yes")
8411 (const_string "no")))
8412 (set (attr "length")
8414 (lt (symbol_ref ("which_alternative"))
8417 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
8418 (le (minus (match_dup 5) (pc)) (const_int 256)))
8421 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
8422 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8426 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
8427 (le (minus (match_dup 5) (pc)) (const_int 256)))
8430 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
8431 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8436 (define_insn "*addsi3_cbranch_scratch"
8439 (match_operator 3 "arm_comparison_operator"
8441 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
8442 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
8444 (label_ref (match_operand 4 "" ""))
8446 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
8448 && (GET_CODE (operands[3]) == EQ
8449 || GET_CODE (operands[3]) == NE
8450 || GET_CODE (operands[3]) == GE
8451 || GET_CODE (operands[3]) == LT)"
8454 switch (which_alternative)
8457 output_asm_insn (\"cmp\t%1, #%n2\", operands);
8460 output_asm_insn (\"cmn\t%1, %2\", operands);
8463 if (INTVAL (operands[2]) < 0)
8464 output_asm_insn (\"sub\t%0, %1, %2\", operands);
8466 output_asm_insn (\"add\t%0, %1, %2\", operands);
8469 if (INTVAL (operands[2]) < 0)
8470 output_asm_insn (\"sub\t%0, %0, %2\", operands);
8472 output_asm_insn (\"add\t%0, %0, %2\", operands);
8476 switch (get_attr_length (insn))
8479 return \"b%d3\\t%l4\";
8481 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
8483 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
8487 [(set (attr "far_jump")
8489 (eq_attr "length" "8")
8490 (const_string "yes")
8491 (const_string "no")))
8492 (set (attr "length")
8494 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
8495 (le (minus (match_dup 4) (pc)) (const_int 256)))
8498 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
8499 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8505 ;; Comparison and test insns
8507 (define_insn "*arm_cmpsi_insn"
8508 [(set (reg:CC CC_REGNUM)
8509 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
8510 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
8517 [(set_attr "conds" "set")
8518 (set_attr "arch" "t2,t2,any,any")
8519 (set_attr "length" "2,2,4,4")
8520 (set_attr "predicable" "yes")
8521 (set_attr "type" "*,*,*,arlo_imm")]
8524 (define_insn "*cmpsi_shiftsi"
8525 [(set (reg:CC CC_REGNUM)
8526 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
8527 (match_operator:SI 3 "shift_operator"
8528 [(match_operand:SI 1 "s_register_operand" "r,r")
8529 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
8532 [(set_attr "conds" "set")
8533 (set_attr "shift" "1")
8534 (set_attr "arch" "32,a")
8535 (set_attr "type" "arlo_shift,arlo_shift_reg")])
8537 (define_insn "*cmpsi_shiftsi_swp"
8538 [(set (reg:CC_SWP CC_REGNUM)
8539 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
8540 [(match_operand:SI 1 "s_register_operand" "r,r")
8541 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
8542 (match_operand:SI 0 "s_register_operand" "r,r")))]
8545 [(set_attr "conds" "set")
8546 (set_attr "shift" "1")
8547 (set_attr "arch" "32,a")
8548 (set_attr "type" "arlo_shift,arlo_shift_reg")])
8550 (define_insn "*arm_cmpsi_negshiftsi_si"
8551 [(set (reg:CC_Z CC_REGNUM)
8553 (neg:SI (match_operator:SI 1 "shift_operator"
8554 [(match_operand:SI 2 "s_register_operand" "r")
8555 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
8556 (match_operand:SI 0 "s_register_operand" "r")))]
8559 [(set_attr "conds" "set")
8560 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
8561 (const_string "arlo_shift")
8562 (const_string "arlo_shift_reg")))
8563 (set_attr "predicable" "yes")]
8566 ;; DImode comparisons. The generic code generates branches that
8567 ;; if-conversion can not reduce to a conditional compare, so we do
8570 (define_insn_and_split "*arm_cmpdi_insn"
8571 [(set (reg:CC_NCV CC_REGNUM)
8572 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
8573 (match_operand:DI 1 "arm_di_operand" "rDi")))
8574 (clobber (match_scratch:SI 2 "=r"))]
8576 "#" ; "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
8577 "&& reload_completed"
8578 [(set (reg:CC CC_REGNUM)
8579 (compare:CC (match_dup 0) (match_dup 1)))
8580 (parallel [(set (reg:CC CC_REGNUM)
8581 (compare:CC (match_dup 3) (match_dup 4)))
8583 (minus:SI (match_dup 5)
8584 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))])]
8586 operands[3] = gen_highpart (SImode, operands[0]);
8587 operands[0] = gen_lowpart (SImode, operands[0]);
8588 if (CONST_INT_P (operands[1]))
8590 operands[4] = GEN_INT (~INTVAL (gen_highpart_mode (SImode,
8593 operands[5] = gen_rtx_PLUS (SImode, operands[3], operands[4]);
8597 operands[4] = gen_highpart (SImode, operands[1]);
8598 operands[5] = gen_rtx_MINUS (SImode, operands[3], operands[4]);
8600 operands[1] = gen_lowpart (SImode, operands[1]);
8601 operands[2] = gen_lowpart (SImode, operands[2]);
8603 [(set_attr "conds" "set")
8604 (set_attr "length" "8")]
8607 (define_insn_and_split "*arm_cmpdi_unsigned"
8608 [(set (reg:CC_CZ CC_REGNUM)
8609 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r")
8610 (match_operand:DI 1 "arm_di_operand" "Py,r,rDi")))]
8613 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
8614 "&& reload_completed"
8615 [(set (reg:CC CC_REGNUM)
8616 (compare:CC (match_dup 2) (match_dup 3)))
8617 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
8618 (set (reg:CC CC_REGNUM)
8619 (compare:CC (match_dup 0) (match_dup 1))))]
8621 operands[2] = gen_highpart (SImode, operands[0]);
8622 operands[0] = gen_lowpart (SImode, operands[0]);
8623 if (CONST_INT_P (operands[1]))
8624 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
8626 operands[3] = gen_highpart (SImode, operands[1]);
8627 operands[1] = gen_lowpart (SImode, operands[1]);
8629 [(set_attr "conds" "set")
8630 (set_attr "enabled_for_depr_it" "yes,yes,no")
8631 (set_attr "arch" "t2,t2,*")
8632 (set_attr "length" "6,6,8")]
8635 (define_insn "*arm_cmpdi_zero"
8636 [(set (reg:CC_Z CC_REGNUM)
8637 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
8639 (clobber (match_scratch:SI 1 "=r"))]
8641 "orr%.\\t%1, %Q0, %R0"
8642 [(set_attr "conds" "set")]
8645 (define_insn "*thumb_cmpdi_zero"
8646 [(set (reg:CC_Z CC_REGNUM)
8647 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
8649 (clobber (match_scratch:SI 1 "=l"))]
8651 "orr\\t%1, %Q0, %R0"
8652 [(set_attr "conds" "set")
8653 (set_attr "length" "2")]
8656 ; This insn allows redundant compares to be removed by cse, nothing should
8657 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
8658 ; is deleted later on. The match_dup will match the mode here, so that
8659 ; mode changes of the condition codes aren't lost by this even though we don't
8660 ; specify what they are.
8662 (define_insn "*deleted_compare"
8663 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
8665 "\\t%@ deleted compare"
8666 [(set_attr "conds" "set")
8667 (set_attr "length" "0")]
8671 ;; Conditional branch insns
8673 (define_expand "cbranch_cc"
8675 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
8676 (match_operand 2 "" "")])
8677 (label_ref (match_operand 3 "" ""))
8680 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
8681 operands[1], operands[2], NULL_RTX);
8682 operands[2] = const0_rtx;"
8686 ;; Patterns to match conditional branch insns.
8689 (define_insn "arm_cond_branch"
8691 (if_then_else (match_operator 1 "arm_comparison_operator"
8692 [(match_operand 2 "cc_register" "") (const_int 0)])
8693 (label_ref (match_operand 0 "" ""))
8697 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8699 arm_ccfsm_state += 2;
8702 return \"b%d1\\t%l0\";
8704 [(set_attr "conds" "use")
8705 (set_attr "type" "branch")
8706 (set (attr "length")
8708 (and (match_test "TARGET_THUMB2")
8709 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8710 (le (minus (match_dup 0) (pc)) (const_int 256))))
8715 (define_insn "*arm_cond_branch_reversed"
8717 (if_then_else (match_operator 1 "arm_comparison_operator"
8718 [(match_operand 2 "cc_register" "") (const_int 0)])
8720 (label_ref (match_operand 0 "" ""))))]
8723 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8725 arm_ccfsm_state += 2;
8728 return \"b%D1\\t%l0\";
8730 [(set_attr "conds" "use")
8731 (set_attr "type" "branch")
8732 (set (attr "length")
8734 (and (match_test "TARGET_THUMB2")
8735 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8736 (le (minus (match_dup 0) (pc)) (const_int 256))))
8745 (define_expand "cstore_cc"
8746 [(set (match_operand:SI 0 "s_register_operand" "")
8747 (match_operator:SI 1 "" [(match_operand 2 "" "")
8748 (match_operand 3 "" "")]))]
8750 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
8751 operands[2], operands[3], NULL_RTX);
8752 operands[3] = const0_rtx;"
8755 (define_insn_and_split "*mov_scc"
8756 [(set (match_operand:SI 0 "s_register_operand" "=r")
8757 (match_operator:SI 1 "arm_comparison_operator"
8758 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8760 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8763 (if_then_else:SI (match_dup 1)
8767 [(set_attr "conds" "use")
8768 (set_attr "length" "8")]
8771 (define_insn_and_split "*mov_negscc"
8772 [(set (match_operand:SI 0 "s_register_operand" "=r")
8773 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8774 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8776 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8779 (if_then_else:SI (match_dup 1)
8783 operands[3] = GEN_INT (~0);
8785 [(set_attr "conds" "use")
8786 (set_attr "length" "8")]
8789 (define_insn_and_split "*mov_notscc"
8790 [(set (match_operand:SI 0 "s_register_operand" "=r")
8791 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8792 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8794 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8797 (if_then_else:SI (match_dup 1)
8801 operands[3] = GEN_INT (~1);
8802 operands[4] = GEN_INT (~0);
8804 [(set_attr "conds" "use")
8805 (set_attr "length" "8")]
8808 (define_expand "cstoresi4"
8809 [(set (match_operand:SI 0 "s_register_operand" "")
8810 (match_operator:SI 1 "expandable_comparison_operator"
8811 [(match_operand:SI 2 "s_register_operand" "")
8812 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8813 "TARGET_32BIT || TARGET_THUMB1"
8815 rtx op3, scratch, scratch2;
8819 if (!arm_add_operand (operands[3], SImode))
8820 operands[3] = force_reg (SImode, operands[3]);
8821 emit_insn (gen_cstore_cc (operands[0], operands[1],
8822 operands[2], operands[3]));
8826 if (operands[3] == const0_rtx)
8828 switch (GET_CODE (operands[1]))
8831 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8835 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8839 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8840 NULL_RTX, 0, OPTAB_WIDEN);
8841 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8842 NULL_RTX, 0, OPTAB_WIDEN);
8843 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8844 operands[0], 1, OPTAB_WIDEN);
8848 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8850 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8851 NULL_RTX, 1, OPTAB_WIDEN);
8855 scratch = expand_binop (SImode, ashr_optab, operands[2],
8856 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8857 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8858 NULL_RTX, 0, OPTAB_WIDEN);
8859 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8863 /* LT is handled by generic code. No need for unsigned with 0. */
8870 switch (GET_CODE (operands[1]))
8873 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8874 NULL_RTX, 0, OPTAB_WIDEN);
8875 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8879 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8880 NULL_RTX, 0, OPTAB_WIDEN);
8881 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8885 op3 = force_reg (SImode, operands[3]);
8887 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8888 NULL_RTX, 1, OPTAB_WIDEN);
8889 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8890 NULL_RTX, 0, OPTAB_WIDEN);
8891 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8897 if (!thumb1_cmp_operand (op3, SImode))
8898 op3 = force_reg (SImode, op3);
8899 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8900 NULL_RTX, 0, OPTAB_WIDEN);
8901 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8902 NULL_RTX, 1, OPTAB_WIDEN);
8903 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8908 op3 = force_reg (SImode, operands[3]);
8909 scratch = force_reg (SImode, const0_rtx);
8910 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8916 if (!thumb1_cmp_operand (op3, SImode))
8917 op3 = force_reg (SImode, op3);
8918 scratch = force_reg (SImode, const0_rtx);
8919 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8925 if (!thumb1_cmp_operand (op3, SImode))
8926 op3 = force_reg (SImode, op3);
8927 scratch = gen_reg_rtx (SImode);
8928 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8932 op3 = force_reg (SImode, operands[3]);
8933 scratch = gen_reg_rtx (SImode);
8934 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8937 /* No good sequences for GT, LT. */
8944 (define_expand "cstoresf4"
8945 [(set (match_operand:SI 0 "s_register_operand" "")
8946 (match_operator:SI 1 "expandable_comparison_operator"
8947 [(match_operand:SF 2 "s_register_operand" "")
8948 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8949 "TARGET_32BIT && TARGET_HARD_FLOAT"
8950 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8951 operands[2], operands[3])); DONE;"
8954 (define_expand "cstoredf4"
8955 [(set (match_operand:SI 0 "s_register_operand" "")
8956 (match_operator:SI 1 "expandable_comparison_operator"
8957 [(match_operand:DF 2 "s_register_operand" "")
8958 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8959 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8960 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8961 operands[2], operands[3])); DONE;"
8964 (define_expand "cstoredi4"
8965 [(set (match_operand:SI 0 "s_register_operand" "")
8966 (match_operator:SI 1 "expandable_comparison_operator"
8967 [(match_operand:DI 2 "s_register_operand" "")
8968 (match_operand:DI 3 "cmpdi_operand" "")]))]
8971 if (!arm_validize_comparison (&operands[1],
8975 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8981 (define_expand "cstoresi_eq0_thumb1"
8983 [(set (match_operand:SI 0 "s_register_operand" "")
8984 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8986 (clobber (match_dup:SI 2))])]
8988 "operands[2] = gen_reg_rtx (SImode);"
8991 (define_expand "cstoresi_ne0_thumb1"
8993 [(set (match_operand:SI 0 "s_register_operand" "")
8994 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8996 (clobber (match_dup:SI 2))])]
8998 "operands[2] = gen_reg_rtx (SImode);"
9001 (define_insn "*cstoresi_eq0_thumb1_insn"
9002 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
9003 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
9005 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
9008 neg\\t%0, %1\;adc\\t%0, %0, %1
9009 neg\\t%2, %1\;adc\\t%0, %1, %2"
9010 [(set_attr "length" "4")]
9013 (define_insn "*cstoresi_ne0_thumb1_insn"
9014 [(set (match_operand:SI 0 "s_register_operand" "=l")
9015 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
9017 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
9019 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
9020 [(set_attr "length" "4")]
9023 ;; Used as part of the expansion of thumb ltu and gtu sequences
9024 (define_insn "cstoresi_nltu_thumb1"
9025 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
9026 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
9027 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
9029 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
9030 [(set_attr "length" "4")]
9033 (define_insn_and_split "cstoresi_ltu_thumb1"
9034 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
9035 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
9036 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
9041 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
9042 (set (match_dup 0) (neg:SI (match_dup 3)))]
9043 "operands[3] = gen_reg_rtx (SImode);"
9044 [(set_attr "length" "4")]
9047 ;; Used as part of the expansion of thumb les sequence.
9048 (define_insn "thumb1_addsi3_addgeu"
9049 [(set (match_operand:SI 0 "s_register_operand" "=l")
9050 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
9051 (match_operand:SI 2 "s_register_operand" "l"))
9052 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
9053 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
9055 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
9056 [(set_attr "length" "4")]
9060 ;; Conditional move insns
9062 (define_expand "movsicc"
9063 [(set (match_operand:SI 0 "s_register_operand" "")
9064 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
9065 (match_operand:SI 2 "arm_not_operand" "")
9066 (match_operand:SI 3 "arm_not_operand" "")))]
9073 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
9074 &XEXP (operands[1], 1)))
9077 code = GET_CODE (operands[1]);
9078 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
9079 XEXP (operands[1], 1), NULL_RTX);
9080 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
9084 (define_expand "movsfcc"
9085 [(set (match_operand:SF 0 "s_register_operand" "")
9086 (if_then_else:SF (match_operand 1 "arm_cond_move_operator" "")
9087 (match_operand:SF 2 "s_register_operand" "")
9088 (match_operand:SF 3 "s_register_operand" "")))]
9089 "TARGET_32BIT && TARGET_HARD_FLOAT"
9092 enum rtx_code code = GET_CODE (operands[1]);
9095 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
9096 &XEXP (operands[1], 1)))
9099 code = GET_CODE (operands[1]);
9100 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
9101 XEXP (operands[1], 1), NULL_RTX);
9102 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
9106 (define_expand "movdfcc"
9107 [(set (match_operand:DF 0 "s_register_operand" "")
9108 (if_then_else:DF (match_operand 1 "arm_cond_move_operator" "")
9109 (match_operand:DF 2 "s_register_operand" "")
9110 (match_operand:DF 3 "s_register_operand" "")))]
9111 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
9114 enum rtx_code code = GET_CODE (operands[1]);
9117 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
9118 &XEXP (operands[1], 1)))
9120 code = GET_CODE (operands[1]);
9121 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
9122 XEXP (operands[1], 1), NULL_RTX);
9123 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
9127 (define_insn "*cmov<mode>"
9128 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
9129 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
9130 [(match_operand 2 "cc_register" "") (const_int 0)])
9131 (match_operand:SDF 3 "s_register_operand"
9133 (match_operand:SDF 4 "s_register_operand"
9134 "<F_constraint>")))]
9135 "TARGET_HARD_FLOAT && TARGET_FPU_ARMV8 <vfp_double_cond>"
9138 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
9145 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
9150 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
9156 [(set_attr "conds" "use")
9157 (set_attr "type" "f_sel<vfp_type>")]
9160 (define_insn_and_split "*movsicc_insn"
9161 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
9163 (match_operator 3 "arm_comparison_operator"
9164 [(match_operand 4 "cc_register" "") (const_int 0)])
9165 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
9166 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
9177 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
9178 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
9179 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
9180 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
9181 "&& reload_completed"
9184 enum rtx_code rev_code;
9185 enum machine_mode mode;
9188 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9190 gen_rtx_SET (VOIDmode,
9194 rev_code = GET_CODE (operands[3]);
9195 mode = GET_MODE (operands[4]);
9196 if (mode == CCFPmode || mode == CCFPEmode)
9197 rev_code = reverse_condition_maybe_unordered (rev_code);
9199 rev_code = reverse_condition (rev_code);
9201 rev_cond = gen_rtx_fmt_ee (rev_code,
9205 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9207 gen_rtx_SET (VOIDmode,
9212 [(set_attr "length" "4,4,4,4,8,8,8,8")
9213 (set_attr "conds" "use")
9214 (set_attr_alternative "type"
9215 [(if_then_else (match_operand 2 "const_int_operand" "")
9216 (const_string "mov_imm")
9217 (const_string "mov_reg"))
9218 (const_string "mvn_imm")
9219 (if_then_else (match_operand 1 "const_int_operand" "")
9220 (const_string "mov_imm")
9221 (const_string "mov_reg"))
9222 (const_string "mvn_imm")
9223 (const_string "mov_reg")
9224 (const_string "mov_reg")
9225 (const_string "mov_reg")
9226 (const_string "mov_reg")])]
9229 (define_insn "*movsfcc_soft_insn"
9230 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
9231 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
9232 [(match_operand 4 "cc_register" "") (const_int 0)])
9233 (match_operand:SF 1 "s_register_operand" "0,r")
9234 (match_operand:SF 2 "s_register_operand" "r,0")))]
9235 "TARGET_ARM && TARGET_SOFT_FLOAT"
9239 [(set_attr "conds" "use")
9240 (set_attr "type" "mov_reg")]
9244 ;; Jump and linkage insns
9246 (define_expand "jump"
9248 (label_ref (match_operand 0 "" "")))]
9253 (define_insn "*arm_jump"
9255 (label_ref (match_operand 0 "" "")))]
9259 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
9261 arm_ccfsm_state += 2;
9264 return \"b%?\\t%l0\";
9267 [(set_attr "predicable" "yes")
9268 (set (attr "length")
9270 (and (match_test "TARGET_THUMB2")
9271 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
9272 (le (minus (match_dup 0) (pc)) (const_int 2048))))
9277 (define_insn "*thumb_jump"
9279 (label_ref (match_operand 0 "" "")))]
9282 if (get_attr_length (insn) == 2)
9284 return \"bl\\t%l0\\t%@ far jump\";
9286 [(set (attr "far_jump")
9288 (eq_attr "length" "4")
9289 (const_string "yes")
9290 (const_string "no")))
9291 (set (attr "length")
9293 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
9294 (le (minus (match_dup 0) (pc)) (const_int 2048)))
9299 (define_expand "call"
9300 [(parallel [(call (match_operand 0 "memory_operand" "")
9301 (match_operand 1 "general_operand" ""))
9302 (use (match_operand 2 "" ""))
9303 (clobber (reg:SI LR_REGNUM))])]
9309 /* In an untyped call, we can get NULL for operand 2. */
9310 if (operands[2] == NULL_RTX)
9311 operands[2] = const0_rtx;
9313 /* Decide if we should generate indirect calls by loading the
9314 32-bit address of the callee into a register before performing the
9316 callee = XEXP (operands[0], 0);
9317 if (GET_CODE (callee) == SYMBOL_REF
9318 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
9320 XEXP (operands[0], 0) = force_reg (Pmode, callee);
9322 pat = gen_call_internal (operands[0], operands[1], operands[2]);
9323 arm_emit_call_insn (pat, XEXP (operands[0], 0));
9328 (define_expand "call_internal"
9329 [(parallel [(call (match_operand 0 "memory_operand" "")
9330 (match_operand 1 "general_operand" ""))
9331 (use (match_operand 2 "" ""))
9332 (clobber (reg:SI LR_REGNUM))])])
9334 (define_insn "*call_reg_armv5"
9335 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9336 (match_operand 1 "" ""))
9337 (use (match_operand 2 "" ""))
9338 (clobber (reg:SI LR_REGNUM))]
9339 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9341 [(set_attr "type" "call")]
9344 (define_insn "*call_reg_arm"
9345 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9346 (match_operand 1 "" ""))
9347 (use (match_operand 2 "" ""))
9348 (clobber (reg:SI LR_REGNUM))]
9349 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9351 return output_call (operands);
9353 ;; length is worst case, normally it is only two
9354 [(set_attr "length" "12")
9355 (set_attr "type" "call")]
9359 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
9360 ;; considered a function call by the branch predictor of some cores (PR40887).
9361 ;; Falls back to blx rN (*call_reg_armv5).
9363 (define_insn "*call_mem"
9364 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
9365 (match_operand 1 "" ""))
9366 (use (match_operand 2 "" ""))
9367 (clobber (reg:SI LR_REGNUM))]
9368 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9370 return output_call_mem (operands);
9372 [(set_attr "length" "12")
9373 (set_attr "type" "call")]
9376 (define_insn "*call_reg_thumb1_v5"
9377 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9378 (match_operand 1 "" ""))
9379 (use (match_operand 2 "" ""))
9380 (clobber (reg:SI LR_REGNUM))]
9381 "TARGET_THUMB1 && arm_arch5 && !SIBLING_CALL_P (insn)"
9383 [(set_attr "length" "2")
9384 (set_attr "type" "call")]
9387 (define_insn "*call_reg_thumb1"
9388 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9389 (match_operand 1 "" ""))
9390 (use (match_operand 2 "" ""))
9391 (clobber (reg:SI LR_REGNUM))]
9392 "TARGET_THUMB1 && !arm_arch5 && !SIBLING_CALL_P (insn)"
9395 if (!TARGET_CALLER_INTERWORKING)
9396 return thumb_call_via_reg (operands[0]);
9397 else if (operands[1] == const0_rtx)
9398 return \"bl\\t%__interwork_call_via_%0\";
9399 else if (frame_pointer_needed)
9400 return \"bl\\t%__interwork_r7_call_via_%0\";
9402 return \"bl\\t%__interwork_r11_call_via_%0\";
9404 [(set_attr "type" "call")]
9407 (define_expand "call_value"
9408 [(parallel [(set (match_operand 0 "" "")
9409 (call (match_operand 1 "memory_operand" "")
9410 (match_operand 2 "general_operand" "")))
9411 (use (match_operand 3 "" ""))
9412 (clobber (reg:SI LR_REGNUM))])]
9418 /* In an untyped call, we can get NULL for operand 2. */
9419 if (operands[3] == 0)
9420 operands[3] = const0_rtx;
9422 /* Decide if we should generate indirect calls by loading the
9423 32-bit address of the callee into a register before performing the
9425 callee = XEXP (operands[1], 0);
9426 if (GET_CODE (callee) == SYMBOL_REF
9427 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
9429 XEXP (operands[1], 0) = force_reg (Pmode, callee);
9431 pat = gen_call_value_internal (operands[0], operands[1],
9432 operands[2], operands[3]);
9433 arm_emit_call_insn (pat, XEXP (operands[1], 0));
9438 (define_expand "call_value_internal"
9439 [(parallel [(set (match_operand 0 "" "")
9440 (call (match_operand 1 "memory_operand" "")
9441 (match_operand 2 "general_operand" "")))
9442 (use (match_operand 3 "" ""))
9443 (clobber (reg:SI LR_REGNUM))])])
9445 (define_insn "*call_value_reg_armv5"
9446 [(set (match_operand 0 "" "")
9447 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9448 (match_operand 2 "" "")))
9449 (use (match_operand 3 "" ""))
9450 (clobber (reg:SI LR_REGNUM))]
9451 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9453 [(set_attr "type" "call")]
9456 (define_insn "*call_value_reg_arm"
9457 [(set (match_operand 0 "" "")
9458 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9459 (match_operand 2 "" "")))
9460 (use (match_operand 3 "" ""))
9461 (clobber (reg:SI LR_REGNUM))]
9462 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9464 return output_call (&operands[1]);
9466 [(set_attr "length" "12")
9467 (set_attr "type" "call")]
9470 ;; Note: see *call_mem
9472 (define_insn "*call_value_mem"
9473 [(set (match_operand 0 "" "")
9474 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
9475 (match_operand 2 "" "")))
9476 (use (match_operand 3 "" ""))
9477 (clobber (reg:SI LR_REGNUM))]
9478 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))
9479 && !SIBLING_CALL_P (insn)"
9481 return output_call_mem (&operands[1]);
9483 [(set_attr "length" "12")
9484 (set_attr "type" "call")]
9487 (define_insn "*call_value_reg_thumb1_v5"
9488 [(set (match_operand 0 "" "")
9489 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9490 (match_operand 2 "" "")))
9491 (use (match_operand 3 "" ""))
9492 (clobber (reg:SI LR_REGNUM))]
9493 "TARGET_THUMB1 && arm_arch5"
9495 [(set_attr "length" "2")
9496 (set_attr "type" "call")]
9499 (define_insn "*call_value_reg_thumb1"
9500 [(set (match_operand 0 "" "")
9501 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9502 (match_operand 2 "" "")))
9503 (use (match_operand 3 "" ""))
9504 (clobber (reg:SI LR_REGNUM))]
9505 "TARGET_THUMB1 && !arm_arch5"
9508 if (!TARGET_CALLER_INTERWORKING)
9509 return thumb_call_via_reg (operands[1]);
9510 else if (operands[2] == const0_rtx)
9511 return \"bl\\t%__interwork_call_via_%1\";
9512 else if (frame_pointer_needed)
9513 return \"bl\\t%__interwork_r7_call_via_%1\";
9515 return \"bl\\t%__interwork_r11_call_via_%1\";
9517 [(set_attr "type" "call")]
9520 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
9521 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
9523 (define_insn "*call_symbol"
9524 [(call (mem:SI (match_operand:SI 0 "" ""))
9525 (match_operand 1 "" ""))
9526 (use (match_operand 2 "" ""))
9527 (clobber (reg:SI LR_REGNUM))]
9529 && !SIBLING_CALL_P (insn)
9530 && (GET_CODE (operands[0]) == SYMBOL_REF)
9531 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9534 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
9536 [(set_attr "type" "call")]
9539 (define_insn "*call_value_symbol"
9540 [(set (match_operand 0 "" "")
9541 (call (mem:SI (match_operand:SI 1 "" ""))
9542 (match_operand:SI 2 "" "")))
9543 (use (match_operand 3 "" ""))
9544 (clobber (reg:SI LR_REGNUM))]
9546 && !SIBLING_CALL_P (insn)
9547 && (GET_CODE (operands[1]) == SYMBOL_REF)
9548 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9551 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
9553 [(set_attr "type" "call")]
9556 (define_insn "*call_insn"
9557 [(call (mem:SI (match_operand:SI 0 "" ""))
9558 (match_operand:SI 1 "" ""))
9559 (use (match_operand 2 "" ""))
9560 (clobber (reg:SI LR_REGNUM))]
9562 && GET_CODE (operands[0]) == SYMBOL_REF
9563 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9565 [(set_attr "length" "4")
9566 (set_attr "type" "call")]
9569 (define_insn "*call_value_insn"
9570 [(set (match_operand 0 "" "")
9571 (call (mem:SI (match_operand 1 "" ""))
9572 (match_operand 2 "" "")))
9573 (use (match_operand 3 "" ""))
9574 (clobber (reg:SI LR_REGNUM))]
9576 && GET_CODE (operands[1]) == SYMBOL_REF
9577 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9579 [(set_attr "length" "4")
9580 (set_attr "type" "call")]
9583 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
9584 (define_expand "sibcall"
9585 [(parallel [(call (match_operand 0 "memory_operand" "")
9586 (match_operand 1 "general_operand" ""))
9588 (use (match_operand 2 "" ""))])]
9592 if (!REG_P (XEXP (operands[0], 0))
9593 && (GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF))
9594 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
9596 if (operands[2] == NULL_RTX)
9597 operands[2] = const0_rtx;
9601 (define_expand "sibcall_value"
9602 [(parallel [(set (match_operand 0 "" "")
9603 (call (match_operand 1 "memory_operand" "")
9604 (match_operand 2 "general_operand" "")))
9606 (use (match_operand 3 "" ""))])]
9610 if (!REG_P (XEXP (operands[1], 0)) &&
9611 (GET_CODE (XEXP (operands[1],0)) != SYMBOL_REF))
9612 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
9614 if (operands[3] == NULL_RTX)
9615 operands[3] = const0_rtx;
9619 (define_insn "*sibcall_insn"
9620 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
9621 (match_operand 1 "" ""))
9623 (use (match_operand 2 "" ""))]
9624 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9626 if (which_alternative == 1)
9627 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
9630 if (arm_arch5 || arm_arch4t)
9631 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
9633 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
9636 [(set_attr "type" "call")]
9639 (define_insn "*sibcall_value_insn"
9640 [(set (match_operand 0 "" "")
9641 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
9642 (match_operand 2 "" "")))
9644 (use (match_operand 3 "" ""))]
9645 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9647 if (which_alternative == 1)
9648 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
9651 if (arm_arch5 || arm_arch4t)
9652 return \"bx%?\\t%1\";
9654 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
9657 [(set_attr "type" "call")]
9660 (define_expand "<return_str>return"
9662 "(TARGET_ARM || (TARGET_THUMB2
9663 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
9664 && !IS_STACKALIGN (arm_current_func_type ())))
9665 <return_cond_false>"
9670 thumb2_expand_return (<return_simple_p>);
9677 ;; Often the return insn will be the same as loading from memory, so set attr
9678 (define_insn "*arm_return"
9680 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
9683 if (arm_ccfsm_state == 2)
9685 arm_ccfsm_state += 2;
9688 return output_return_instruction (const_true_rtx, true, false, false);
9690 [(set_attr "type" "load1")
9691 (set_attr "length" "12")
9692 (set_attr "predicable" "yes")]
9695 (define_insn "*cond_<return_str>return"
9697 (if_then_else (match_operator 0 "arm_comparison_operator"
9698 [(match_operand 1 "cc_register" "") (const_int 0)])
9701 "TARGET_ARM <return_cond_true>"
9704 if (arm_ccfsm_state == 2)
9706 arm_ccfsm_state += 2;
9709 return output_return_instruction (operands[0], true, false,
9712 [(set_attr "conds" "use")
9713 (set_attr "length" "12")
9714 (set_attr "type" "load1")]
9717 (define_insn "*cond_<return_str>return_inverted"
9719 (if_then_else (match_operator 0 "arm_comparison_operator"
9720 [(match_operand 1 "cc_register" "") (const_int 0)])
9723 "TARGET_ARM <return_cond_true>"
9726 if (arm_ccfsm_state == 2)
9728 arm_ccfsm_state += 2;
9731 return output_return_instruction (operands[0], true, true,
9734 [(set_attr "conds" "use")
9735 (set_attr "length" "12")
9736 (set_attr "type" "load1")]
9739 (define_insn "*arm_simple_return"
9744 if (arm_ccfsm_state == 2)
9746 arm_ccfsm_state += 2;
9749 return output_return_instruction (const_true_rtx, true, false, true);
9751 [(set_attr "type" "branch")
9752 (set_attr "length" "4")
9753 (set_attr "predicable" "yes")]
9756 ;; Generate a sequence of instructions to determine if the processor is
9757 ;; in 26-bit or 32-bit mode, and return the appropriate return address
9760 (define_expand "return_addr_mask"
9762 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9764 (set (match_operand:SI 0 "s_register_operand" "")
9765 (if_then_else:SI (eq (match_dup 1) (const_int 0))
9767 (const_int 67108860)))] ; 0x03fffffc
9770 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
9773 (define_insn "*check_arch2"
9774 [(set (match_operand:CC_NOOV 0 "cc_register" "")
9775 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9778 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9779 [(set_attr "length" "8")
9780 (set_attr "conds" "set")]
9783 ;; Call subroutine returning any type.
9785 (define_expand "untyped_call"
9786 [(parallel [(call (match_operand 0 "" "")
9788 (match_operand 1 "" "")
9789 (match_operand 2 "" "")])]
9794 rtx par = gen_rtx_PARALLEL (VOIDmode,
9795 rtvec_alloc (XVECLEN (operands[2], 0)));
9796 rtx addr = gen_reg_rtx (Pmode);
9800 emit_move_insn (addr, XEXP (operands[1], 0));
9801 mem = change_address (operands[1], BLKmode, addr);
9803 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9805 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9807 /* Default code only uses r0 as a return value, but we could
9808 be using anything up to 4 registers. */
9809 if (REGNO (src) == R0_REGNUM)
9810 src = gen_rtx_REG (TImode, R0_REGNUM);
9812 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9814 size += GET_MODE_SIZE (GET_MODE (src));
9817 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
9822 for (i = 0; i < XVECLEN (par, 0); i++)
9824 HOST_WIDE_INT offset = 0;
9825 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9828 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9830 mem = change_address (mem, GET_MODE (reg), NULL);
9831 if (REGNO (reg) == R0_REGNUM)
9833 /* On thumb we have to use a write-back instruction. */
9834 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9835 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9836 size = TARGET_ARM ? 16 : 0;
9840 emit_move_insn (mem, reg);
9841 size = GET_MODE_SIZE (GET_MODE (reg));
9845 /* The optimizer does not know that the call sets the function value
9846 registers we stored in the result block. We avoid problems by
9847 claiming that all hard registers are used and clobbered at this
9849 emit_insn (gen_blockage ());
9855 (define_expand "untyped_return"
9856 [(match_operand:BLK 0 "memory_operand" "")
9857 (match_operand 1 "" "")]
9862 rtx addr = gen_reg_rtx (Pmode);
9866 emit_move_insn (addr, XEXP (operands[0], 0));
9867 mem = change_address (operands[0], BLKmode, addr);
9869 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9871 HOST_WIDE_INT offset = 0;
9872 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9875 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9877 mem = change_address (mem, GET_MODE (reg), NULL);
9878 if (REGNO (reg) == R0_REGNUM)
9880 /* On thumb we have to use a write-back instruction. */
9881 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9882 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9883 size = TARGET_ARM ? 16 : 0;
9887 emit_move_insn (reg, mem);
9888 size = GET_MODE_SIZE (GET_MODE (reg));
9892 /* Emit USE insns before the return. */
9893 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9894 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9896 /* Construct the return. */
9897 expand_naked_return ();
9903 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9904 ;; all of memory. This blocks insns from being moved across this point.
9906 (define_insn "blockage"
9907 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9910 [(set_attr "length" "0")
9911 (set_attr "type" "block")]
9914 (define_expand "casesi"
9915 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
9916 (match_operand:SI 1 "const_int_operand" "") ; lower bound
9917 (match_operand:SI 2 "const_int_operand" "") ; total range
9918 (match_operand:SI 3 "" "") ; table label
9919 (match_operand:SI 4 "" "")] ; Out of range label
9920 "TARGET_32BIT || optimize_size || flag_pic"
9923 enum insn_code code;
9924 if (operands[1] != const0_rtx)
9926 rtx reg = gen_reg_rtx (SImode);
9928 emit_insn (gen_addsi3 (reg, operands[0],
9929 gen_int_mode (-INTVAL (operands[1]),
9935 code = CODE_FOR_arm_casesi_internal;
9936 else if (TARGET_THUMB1)
9937 code = CODE_FOR_thumb1_casesi_internal_pic;
9939 code = CODE_FOR_thumb2_casesi_internal_pic;
9941 code = CODE_FOR_thumb2_casesi_internal;
9943 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9944 operands[2] = force_reg (SImode, operands[2]);
9946 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9947 operands[3], operands[4]));
9952 ;; The USE in this pattern is needed to tell flow analysis that this is
9953 ;; a CASESI insn. It has no other purpose.
9954 (define_insn "arm_casesi_internal"
9955 [(parallel [(set (pc)
9957 (leu (match_operand:SI 0 "s_register_operand" "r")
9958 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9959 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9960 (label_ref (match_operand 2 "" ""))))
9961 (label_ref (match_operand 3 "" ""))))
9962 (clobber (reg:CC CC_REGNUM))
9963 (use (label_ref (match_dup 2)))])]
9967 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9968 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9970 [(set_attr "conds" "clob")
9971 (set_attr "length" "12")]
9974 (define_expand "thumb1_casesi_internal_pic"
9975 [(match_operand:SI 0 "s_register_operand" "")
9976 (match_operand:SI 1 "thumb1_cmp_operand" "")
9977 (match_operand 2 "" "")
9978 (match_operand 3 "" "")]
9982 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9983 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9985 reg0 = gen_rtx_REG (SImode, 0);
9986 emit_move_insn (reg0, operands[0]);
9987 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9992 (define_insn "thumb1_casesi_dispatch"
9993 [(parallel [(set (pc) (unspec [(reg:SI 0)
9994 (label_ref (match_operand 0 "" ""))
9995 ;; (label_ref (match_operand 1 "" ""))
9997 UNSPEC_THUMB1_CASESI))
9998 (clobber (reg:SI IP_REGNUM))
9999 (clobber (reg:SI LR_REGNUM))])]
10001 "* return thumb1_output_casesi(operands);"
10002 [(set_attr "length" "4")]
10005 (define_expand "indirect_jump"
10007 (match_operand:SI 0 "s_register_operand" ""))]
10010 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
10011 address and use bx. */
10015 tmp = gen_reg_rtx (SImode);
10016 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
10022 ;; NB Never uses BX.
10023 (define_insn "*arm_indirect_jump"
10025 (match_operand:SI 0 "s_register_operand" "r"))]
10027 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
10028 [(set_attr "predicable" "yes")]
10031 (define_insn "*load_indirect_jump"
10033 (match_operand:SI 0 "memory_operand" "m"))]
10035 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
10036 [(set_attr "type" "load1")
10037 (set_attr "pool_range" "4096")
10038 (set_attr "neg_pool_range" "4084")
10039 (set_attr "predicable" "yes")]
10042 ;; NB Never uses BX.
10043 (define_insn "*thumb1_indirect_jump"
10045 (match_operand:SI 0 "register_operand" "l*r"))]
10048 [(set_attr "conds" "clob")
10049 (set_attr "length" "2")]
10059 if (TARGET_UNIFIED_ASM)
10062 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
10063 return \"mov\\tr8, r8\";
10065 [(set (attr "length")
10066 (if_then_else (eq_attr "is_thumb" "yes")
10072 ;; Patterns to allow combination of arithmetic, cond code and shifts
10074 (define_insn "*arith_shiftsi"
10075 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10076 (match_operator:SI 1 "shiftable_operator"
10077 [(match_operator:SI 3 "shift_operator"
10078 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
10079 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
10080 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
10082 "%i1%?\\t%0, %2, %4%S3"
10083 [(set_attr "predicable" "yes")
10084 (set_attr "shift" "4")
10085 (set_attr "arch" "a,t2,t2,a")
10086 ;; Thumb2 doesn't allow the stack pointer to be used for
10087 ;; operand1 for all operations other than add and sub. In this case
10088 ;; the minus operation is a candidate for an rsub and hence needs
10090 ;; We have to make sure to disable the fourth alternative if
10091 ;; the shift_operator is MULT, since otherwise the insn will
10092 ;; also match a multiply_accumulate pattern and validate_change
10093 ;; will allow a replacement of the constant with a register
10094 ;; despite the checks done in shift_operator.
10095 (set_attr_alternative "insn_enabled"
10096 [(const_string "yes")
10098 (match_operand:SI 1 "add_operator" "")
10099 (const_string "yes") (const_string "no"))
10100 (const_string "yes")
10102 (match_operand:SI 3 "mult_operator" "")
10103 (const_string "no") (const_string "yes"))])
10104 (set_attr "type" "arlo_shift,arlo_shift,arlo_shift,arlo_shift_reg")])
10107 [(set (match_operand:SI 0 "s_register_operand" "")
10108 (match_operator:SI 1 "shiftable_operator"
10109 [(match_operator:SI 2 "shiftable_operator"
10110 [(match_operator:SI 3 "shift_operator"
10111 [(match_operand:SI 4 "s_register_operand" "")
10112 (match_operand:SI 5 "reg_or_int_operand" "")])
10113 (match_operand:SI 6 "s_register_operand" "")])
10114 (match_operand:SI 7 "arm_rhs_operand" "")]))
10115 (clobber (match_operand:SI 8 "s_register_operand" ""))]
10117 [(set (match_dup 8)
10118 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
10121 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
10124 (define_insn "*arith_shiftsi_compare0"
10125 [(set (reg:CC_NOOV CC_REGNUM)
10127 (match_operator:SI 1 "shiftable_operator"
10128 [(match_operator:SI 3 "shift_operator"
10129 [(match_operand:SI 4 "s_register_operand" "r,r")
10130 (match_operand:SI 5 "shift_amount_operand" "M,r")])
10131 (match_operand:SI 2 "s_register_operand" "r,r")])
10133 (set (match_operand:SI 0 "s_register_operand" "=r,r")
10134 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
10137 "%i1%.\\t%0, %2, %4%S3"
10138 [(set_attr "conds" "set")
10139 (set_attr "shift" "4")
10140 (set_attr "arch" "32,a")
10141 (set_attr "type" "arlo_shift,arlo_shift_reg")])
10143 (define_insn "*arith_shiftsi_compare0_scratch"
10144 [(set (reg:CC_NOOV CC_REGNUM)
10146 (match_operator:SI 1 "shiftable_operator"
10147 [(match_operator:SI 3 "shift_operator"
10148 [(match_operand:SI 4 "s_register_operand" "r,r")
10149 (match_operand:SI 5 "shift_amount_operand" "M,r")])
10150 (match_operand:SI 2 "s_register_operand" "r,r")])
10152 (clobber (match_scratch:SI 0 "=r,r"))]
10154 "%i1%.\\t%0, %2, %4%S3"
10155 [(set_attr "conds" "set")
10156 (set_attr "shift" "4")
10157 (set_attr "arch" "32,a")
10158 (set_attr "type" "arlo_shift,arlo_shift_reg")])
10160 (define_insn "*sub_shiftsi"
10161 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10162 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
10163 (match_operator:SI 2 "shift_operator"
10164 [(match_operand:SI 3 "s_register_operand" "r,r")
10165 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
10167 "sub%?\\t%0, %1, %3%S2"
10168 [(set_attr "predicable" "yes")
10169 (set_attr "shift" "3")
10170 (set_attr "arch" "32,a")
10171 (set_attr "type" "arlo_shift,arlo_shift_reg")])
10173 (define_insn "*sub_shiftsi_compare0"
10174 [(set (reg:CC_NOOV CC_REGNUM)
10176 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
10177 (match_operator:SI 2 "shift_operator"
10178 [(match_operand:SI 3 "s_register_operand" "r,r")
10179 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
10181 (set (match_operand:SI 0 "s_register_operand" "=r,r")
10182 (minus:SI (match_dup 1)
10183 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
10185 "sub%.\\t%0, %1, %3%S2"
10186 [(set_attr "conds" "set")
10187 (set_attr "shift" "3")
10188 (set_attr "arch" "32,a")
10189 (set_attr "type" "arlo_shift,arlo_shift_reg")])
10191 (define_insn "*sub_shiftsi_compare0_scratch"
10192 [(set (reg:CC_NOOV CC_REGNUM)
10194 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
10195 (match_operator:SI 2 "shift_operator"
10196 [(match_operand:SI 3 "s_register_operand" "r,r")
10197 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
10199 (clobber (match_scratch:SI 0 "=r,r"))]
10201 "sub%.\\t%0, %1, %3%S2"
10202 [(set_attr "conds" "set")
10203 (set_attr "shift" "3")
10204 (set_attr "arch" "32,a")
10205 (set_attr "type" "arlo_shift,arlo_shift_reg")])
10208 (define_insn_and_split "*and_scc"
10209 [(set (match_operand:SI 0 "s_register_operand" "=r")
10210 (and:SI (match_operator:SI 1 "arm_comparison_operator"
10211 [(match_operand 2 "cc_register" "") (const_int 0)])
10212 (match_operand:SI 3 "s_register_operand" "r")))]
10214 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
10215 "&& reload_completed"
10216 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
10217 (cond_exec (match_dup 4) (set (match_dup 0)
10218 (and:SI (match_dup 3) (const_int 1))))]
10220 enum machine_mode mode = GET_MODE (operands[2]);
10221 enum rtx_code rc = GET_CODE (operands[1]);
10223 /* Note that operands[4] is the same as operands[1],
10224 but with VOIDmode as the result. */
10225 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10226 if (mode == CCFPmode || mode == CCFPEmode)
10227 rc = reverse_condition_maybe_unordered (rc);
10229 rc = reverse_condition (rc);
10230 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10232 [(set_attr "conds" "use")
10233 (set_attr "type" "mov_reg")
10234 (set_attr "length" "8")]
10237 (define_insn_and_split "*ior_scc"
10238 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10239 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
10240 [(match_operand 2 "cc_register" "") (const_int 0)])
10241 (match_operand:SI 3 "s_register_operand" "0,?r")))]
10244 orr%d1\\t%0, %3, #1
10246 "&& reload_completed
10247 && REGNO (operands [0]) != REGNO (operands[3])"
10248 ;; && which_alternative == 1
10249 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
10250 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
10251 (cond_exec (match_dup 4) (set (match_dup 0)
10252 (ior:SI (match_dup 3) (const_int 1))))]
10254 enum machine_mode mode = GET_MODE (operands[2]);
10255 enum rtx_code rc = GET_CODE (operands[1]);
10257 /* Note that operands[4] is the same as operands[1],
10258 but with VOIDmode as the result. */
10259 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10260 if (mode == CCFPmode || mode == CCFPEmode)
10261 rc = reverse_condition_maybe_unordered (rc);
10263 rc = reverse_condition (rc);
10264 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10266 [(set_attr "conds" "use")
10267 (set_attr "length" "4,8")]
10270 ; A series of splitters for the compare_scc pattern below. Note that
10271 ; order is important.
10273 [(set (match_operand:SI 0 "s_register_operand" "")
10274 (lt:SI (match_operand:SI 1 "s_register_operand" "")
10276 (clobber (reg:CC CC_REGNUM))]
10277 "TARGET_32BIT && reload_completed"
10278 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
10281 [(set (match_operand:SI 0 "s_register_operand" "")
10282 (ge:SI (match_operand:SI 1 "s_register_operand" "")
10284 (clobber (reg:CC CC_REGNUM))]
10285 "TARGET_32BIT && reload_completed"
10286 [(set (match_dup 0) (not:SI (match_dup 1)))
10287 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
10290 [(set (match_operand:SI 0 "s_register_operand" "")
10291 (eq:SI (match_operand:SI 1 "s_register_operand" "")
10293 (clobber (reg:CC CC_REGNUM))]
10294 "TARGET_32BIT && reload_completed"
10296 [(set (reg:CC CC_REGNUM)
10297 (compare:CC (const_int 1) (match_dup 1)))
10299 (minus:SI (const_int 1) (match_dup 1)))])
10300 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
10301 (set (match_dup 0) (const_int 0)))])
10304 [(set (match_operand:SI 0 "s_register_operand" "")
10305 (ne:SI (match_operand:SI 1 "s_register_operand" "")
10306 (match_operand:SI 2 "const_int_operand" "")))
10307 (clobber (reg:CC CC_REGNUM))]
10308 "TARGET_32BIT && reload_completed"
10310 [(set (reg:CC CC_REGNUM)
10311 (compare:CC (match_dup 1) (match_dup 2)))
10312 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
10313 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
10314 (set (match_dup 0) (const_int 1)))]
10316 operands[3] = GEN_INT (-INTVAL (operands[2]));
10320 [(set (match_operand:SI 0 "s_register_operand" "")
10321 (ne:SI (match_operand:SI 1 "s_register_operand" "")
10322 (match_operand:SI 2 "arm_add_operand" "")))
10323 (clobber (reg:CC CC_REGNUM))]
10324 "TARGET_32BIT && reload_completed"
10326 [(set (reg:CC_NOOV CC_REGNUM)
10327 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
10329 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
10330 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
10331 (set (match_dup 0) (const_int 1)))])
10333 (define_insn_and_split "*compare_scc"
10334 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10335 (match_operator:SI 1 "arm_comparison_operator"
10336 [(match_operand:SI 2 "s_register_operand" "r,r")
10337 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
10338 (clobber (reg:CC CC_REGNUM))]
10341 "&& reload_completed"
10342 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
10343 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
10344 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
10347 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10348 operands[2], operands[3]);
10349 enum rtx_code rc = GET_CODE (operands[1]);
10351 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
10353 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10354 if (mode == CCFPmode || mode == CCFPEmode)
10355 rc = reverse_condition_maybe_unordered (rc);
10357 rc = reverse_condition (rc);
10358 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10361 ;; Attempt to improve the sequence generated by the compare_scc splitters
10362 ;; not to use conditional execution.
10364 [(set (reg:CC CC_REGNUM)
10365 (compare:CC (match_operand:SI 1 "register_operand" "")
10366 (match_operand:SI 2 "arm_rhs_operand" "")))
10367 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10368 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10369 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10370 (set (match_dup 0) (const_int 1)))
10371 (match_scratch:SI 3 "r")]
10374 [(set (reg:CC CC_REGNUM)
10375 (compare:CC (match_dup 1) (match_dup 2)))
10376 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
10378 [(set (reg:CC CC_REGNUM)
10379 (compare:CC (const_int 0) (match_dup 3)))
10380 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
10382 [(set (match_dup 0)
10383 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
10384 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
10385 (clobber (reg:CC CC_REGNUM))])])
10387 (define_insn "*cond_move"
10388 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10389 (if_then_else:SI (match_operator 3 "equality_operator"
10390 [(match_operator 4 "arm_comparison_operator"
10391 [(match_operand 5 "cc_register" "") (const_int 0)])
10393 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10394 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
10397 if (GET_CODE (operands[3]) == NE)
10399 if (which_alternative != 1)
10400 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
10401 if (which_alternative != 0)
10402 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
10405 if (which_alternative != 0)
10406 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10407 if (which_alternative != 1)
10408 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
10411 [(set_attr "conds" "use")
10412 (set_attr "type" "mov_reg")
10413 (set_attr "length" "4,4,8")]
10416 (define_insn "*cond_arith"
10417 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10418 (match_operator:SI 5 "shiftable_operator"
10419 [(match_operator:SI 4 "arm_comparison_operator"
10420 [(match_operand:SI 2 "s_register_operand" "r,r")
10421 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10422 (match_operand:SI 1 "s_register_operand" "0,?r")]))
10423 (clobber (reg:CC CC_REGNUM))]
10426 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
10427 return \"%i5\\t%0, %1, %2, lsr #31\";
10429 output_asm_insn (\"cmp\\t%2, %3\", operands);
10430 if (GET_CODE (operands[5]) == AND)
10431 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
10432 else if (GET_CODE (operands[5]) == MINUS)
10433 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
10434 else if (which_alternative != 0)
10435 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10436 return \"%i5%d4\\t%0, %1, #1\";
10438 [(set_attr "conds" "clob")
10439 (set_attr "length" "12")]
10442 (define_insn "*cond_sub"
10443 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10444 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
10445 (match_operator:SI 4 "arm_comparison_operator"
10446 [(match_operand:SI 2 "s_register_operand" "r,r")
10447 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10448 (clobber (reg:CC CC_REGNUM))]
10451 output_asm_insn (\"cmp\\t%2, %3\", operands);
10452 if (which_alternative != 0)
10453 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10454 return \"sub%d4\\t%0, %1, #1\";
10456 [(set_attr "conds" "clob")
10457 (set_attr "length" "8,12")]
10460 (define_insn "*cmp_ite0"
10461 [(set (match_operand 6 "dominant_cc_register" "")
10464 (match_operator 4 "arm_comparison_operator"
10465 [(match_operand:SI 0 "s_register_operand"
10466 "l,l,l,r,r,r,r,r,r")
10467 (match_operand:SI 1 "arm_add_operand"
10468 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10469 (match_operator:SI 5 "arm_comparison_operator"
10470 [(match_operand:SI 2 "s_register_operand"
10471 "l,r,r,l,l,r,r,r,r")
10472 (match_operand:SI 3 "arm_add_operand"
10473 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10479 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10481 {\"cmp%d5\\t%0, %1\",
10482 \"cmp%d4\\t%2, %3\"},
10483 {\"cmn%d5\\t%0, #%n1\",
10484 \"cmp%d4\\t%2, %3\"},
10485 {\"cmp%d5\\t%0, %1\",
10486 \"cmn%d4\\t%2, #%n3\"},
10487 {\"cmn%d5\\t%0, #%n1\",
10488 \"cmn%d4\\t%2, #%n3\"}
10490 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10495 \"cmn\\t%0, #%n1\"},
10496 {\"cmn\\t%2, #%n3\",
10498 {\"cmn\\t%2, #%n3\",
10499 \"cmn\\t%0, #%n1\"}
10501 static const char * const ite[2] =
10506 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10507 CMP_CMP, CMN_CMP, CMP_CMP,
10508 CMN_CMP, CMP_CMN, CMN_CMN};
10510 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10512 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10513 if (TARGET_THUMB2) {
10514 output_asm_insn (ite[swap], operands);
10516 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10519 [(set_attr "conds" "set")
10520 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10521 (set_attr_alternative "length"
10527 (if_then_else (eq_attr "is_thumb" "no")
10530 (if_then_else (eq_attr "is_thumb" "no")
10533 (if_then_else (eq_attr "is_thumb" "no")
10536 (if_then_else (eq_attr "is_thumb" "no")
10541 (define_insn "*cmp_ite1"
10542 [(set (match_operand 6 "dominant_cc_register" "")
10545 (match_operator 4 "arm_comparison_operator"
10546 [(match_operand:SI 0 "s_register_operand"
10547 "l,l,l,r,r,r,r,r,r")
10548 (match_operand:SI 1 "arm_add_operand"
10549 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10550 (match_operator:SI 5 "arm_comparison_operator"
10551 [(match_operand:SI 2 "s_register_operand"
10552 "l,r,r,l,l,r,r,r,r")
10553 (match_operand:SI 3 "arm_add_operand"
10554 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10560 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10564 {\"cmn\\t%0, #%n1\",
10567 \"cmn\\t%2, #%n3\"},
10568 {\"cmn\\t%0, #%n1\",
10569 \"cmn\\t%2, #%n3\"}
10571 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10573 {\"cmp%d4\\t%2, %3\",
10574 \"cmp%D5\\t%0, %1\"},
10575 {\"cmp%d4\\t%2, %3\",
10576 \"cmn%D5\\t%0, #%n1\"},
10577 {\"cmn%d4\\t%2, #%n3\",
10578 \"cmp%D5\\t%0, %1\"},
10579 {\"cmn%d4\\t%2, #%n3\",
10580 \"cmn%D5\\t%0, #%n1\"}
10582 static const char * const ite[2] =
10587 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10588 CMP_CMP, CMN_CMP, CMP_CMP,
10589 CMN_CMP, CMP_CMN, CMN_CMN};
10591 comparison_dominates_p (GET_CODE (operands[5]),
10592 reverse_condition (GET_CODE (operands[4])));
10594 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10595 if (TARGET_THUMB2) {
10596 output_asm_insn (ite[swap], operands);
10598 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10601 [(set_attr "conds" "set")
10602 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10603 (set_attr_alternative "length"
10609 (if_then_else (eq_attr "is_thumb" "no")
10612 (if_then_else (eq_attr "is_thumb" "no")
10615 (if_then_else (eq_attr "is_thumb" "no")
10618 (if_then_else (eq_attr "is_thumb" "no")
10623 (define_insn "*cmp_and"
10624 [(set (match_operand 6 "dominant_cc_register" "")
10627 (match_operator 4 "arm_comparison_operator"
10628 [(match_operand:SI 0 "s_register_operand"
10629 "l,l,l,r,r,r,r,r,r")
10630 (match_operand:SI 1 "arm_add_operand"
10631 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10632 (match_operator:SI 5 "arm_comparison_operator"
10633 [(match_operand:SI 2 "s_register_operand"
10634 "l,r,r,l,l,r,r,r,r")
10635 (match_operand:SI 3 "arm_add_operand"
10636 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10641 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10643 {\"cmp%d5\\t%0, %1\",
10644 \"cmp%d4\\t%2, %3\"},
10645 {\"cmn%d5\\t%0, #%n1\",
10646 \"cmp%d4\\t%2, %3\"},
10647 {\"cmp%d5\\t%0, %1\",
10648 \"cmn%d4\\t%2, #%n3\"},
10649 {\"cmn%d5\\t%0, #%n1\",
10650 \"cmn%d4\\t%2, #%n3\"}
10652 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10657 \"cmn\\t%0, #%n1\"},
10658 {\"cmn\\t%2, #%n3\",
10660 {\"cmn\\t%2, #%n3\",
10661 \"cmn\\t%0, #%n1\"}
10663 static const char *const ite[2] =
10668 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10669 CMP_CMP, CMN_CMP, CMP_CMP,
10670 CMN_CMP, CMP_CMN, CMN_CMN};
10672 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10674 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10675 if (TARGET_THUMB2) {
10676 output_asm_insn (ite[swap], operands);
10678 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10681 [(set_attr "conds" "set")
10682 (set_attr "predicable" "no")
10683 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10684 (set_attr_alternative "length"
10690 (if_then_else (eq_attr "is_thumb" "no")
10693 (if_then_else (eq_attr "is_thumb" "no")
10696 (if_then_else (eq_attr "is_thumb" "no")
10699 (if_then_else (eq_attr "is_thumb" "no")
10704 (define_insn "*cmp_ior"
10705 [(set (match_operand 6 "dominant_cc_register" "")
10708 (match_operator 4 "arm_comparison_operator"
10709 [(match_operand:SI 0 "s_register_operand"
10710 "l,l,l,r,r,r,r,r,r")
10711 (match_operand:SI 1 "arm_add_operand"
10712 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10713 (match_operator:SI 5 "arm_comparison_operator"
10714 [(match_operand:SI 2 "s_register_operand"
10715 "l,r,r,l,l,r,r,r,r")
10716 (match_operand:SI 3 "arm_add_operand"
10717 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10722 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10726 {\"cmn\\t%0, #%n1\",
10729 \"cmn\\t%2, #%n3\"},
10730 {\"cmn\\t%0, #%n1\",
10731 \"cmn\\t%2, #%n3\"}
10733 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10735 {\"cmp%D4\\t%2, %3\",
10736 \"cmp%D5\\t%0, %1\"},
10737 {\"cmp%D4\\t%2, %3\",
10738 \"cmn%D5\\t%0, #%n1\"},
10739 {\"cmn%D4\\t%2, #%n3\",
10740 \"cmp%D5\\t%0, %1\"},
10741 {\"cmn%D4\\t%2, #%n3\",
10742 \"cmn%D5\\t%0, #%n1\"}
10744 static const char *const ite[2] =
10749 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10750 CMP_CMP, CMN_CMP, CMP_CMP,
10751 CMN_CMP, CMP_CMN, CMN_CMN};
10753 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10755 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10756 if (TARGET_THUMB2) {
10757 output_asm_insn (ite[swap], operands);
10759 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10763 [(set_attr "conds" "set")
10764 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10765 (set_attr_alternative "length"
10771 (if_then_else (eq_attr "is_thumb" "no")
10774 (if_then_else (eq_attr "is_thumb" "no")
10777 (if_then_else (eq_attr "is_thumb" "no")
10780 (if_then_else (eq_attr "is_thumb" "no")
10785 (define_insn_and_split "*ior_scc_scc"
10786 [(set (match_operand:SI 0 "s_register_operand" "=Ts")
10787 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10788 [(match_operand:SI 1 "s_register_operand" "r")
10789 (match_operand:SI 2 "arm_add_operand" "rIL")])
10790 (match_operator:SI 6 "arm_comparison_operator"
10791 [(match_operand:SI 4 "s_register_operand" "r")
10792 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10793 (clobber (reg:CC CC_REGNUM))]
10795 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10798 "TARGET_32BIT && reload_completed"
10799 [(set (match_dup 7)
10802 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10803 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10805 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10807 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10810 [(set_attr "conds" "clob")
10811 (set_attr "length" "16")])
10813 ; If the above pattern is followed by a CMP insn, then the compare is
10814 ; redundant, since we can rework the conditional instruction that follows.
10815 (define_insn_and_split "*ior_scc_scc_cmp"
10816 [(set (match_operand 0 "dominant_cc_register" "")
10817 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10818 [(match_operand:SI 1 "s_register_operand" "r")
10819 (match_operand:SI 2 "arm_add_operand" "rIL")])
10820 (match_operator:SI 6 "arm_comparison_operator"
10821 [(match_operand:SI 4 "s_register_operand" "r")
10822 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10824 (set (match_operand:SI 7 "s_register_operand" "=Ts")
10825 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10826 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10829 "TARGET_32BIT && reload_completed"
10830 [(set (match_dup 0)
10833 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10834 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10836 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10838 [(set_attr "conds" "set")
10839 (set_attr "length" "16")])
10841 (define_insn_and_split "*and_scc_scc"
10842 [(set (match_operand:SI 0 "s_register_operand" "=Ts")
10843 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10844 [(match_operand:SI 1 "s_register_operand" "r")
10845 (match_operand:SI 2 "arm_add_operand" "rIL")])
10846 (match_operator:SI 6 "arm_comparison_operator"
10847 [(match_operand:SI 4 "s_register_operand" "r")
10848 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10849 (clobber (reg:CC CC_REGNUM))]
10851 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10854 "TARGET_32BIT && reload_completed
10855 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10857 [(set (match_dup 7)
10860 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10861 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10863 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10865 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10868 [(set_attr "conds" "clob")
10869 (set_attr "length" "16")])
10871 ; If the above pattern is followed by a CMP insn, then the compare is
10872 ; redundant, since we can rework the conditional instruction that follows.
10873 (define_insn_and_split "*and_scc_scc_cmp"
10874 [(set (match_operand 0 "dominant_cc_register" "")
10875 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10876 [(match_operand:SI 1 "s_register_operand" "r")
10877 (match_operand:SI 2 "arm_add_operand" "rIL")])
10878 (match_operator:SI 6 "arm_comparison_operator"
10879 [(match_operand:SI 4 "s_register_operand" "r")
10880 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10882 (set (match_operand:SI 7 "s_register_operand" "=Ts")
10883 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10884 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10887 "TARGET_32BIT && reload_completed"
10888 [(set (match_dup 0)
10891 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10892 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10894 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10896 [(set_attr "conds" "set")
10897 (set_attr "length" "16")])
10899 ;; If there is no dominance in the comparison, then we can still save an
10900 ;; instruction in the AND case, since we can know that the second compare
10901 ;; need only zero the value if false (if true, then the value is already
10903 (define_insn_and_split "*and_scc_scc_nodom"
10904 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10905 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10906 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10907 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10908 (match_operator:SI 6 "arm_comparison_operator"
10909 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10910 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10911 (clobber (reg:CC CC_REGNUM))]
10913 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10916 "TARGET_32BIT && reload_completed"
10917 [(parallel [(set (match_dup 0)
10918 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10919 (clobber (reg:CC CC_REGNUM))])
10920 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10922 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10925 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10926 operands[4], operands[5]),
10928 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10930 [(set_attr "conds" "clob")
10931 (set_attr "length" "20")])
10934 [(set (reg:CC_NOOV CC_REGNUM)
10935 (compare:CC_NOOV (ior:SI
10936 (and:SI (match_operand:SI 0 "s_register_operand" "")
10938 (match_operator:SI 1 "arm_comparison_operator"
10939 [(match_operand:SI 2 "s_register_operand" "")
10940 (match_operand:SI 3 "arm_add_operand" "")]))
10942 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10944 [(set (match_dup 4)
10945 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10947 (set (reg:CC_NOOV CC_REGNUM)
10948 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10953 [(set (reg:CC_NOOV CC_REGNUM)
10954 (compare:CC_NOOV (ior:SI
10955 (match_operator:SI 1 "arm_comparison_operator"
10956 [(match_operand:SI 2 "s_register_operand" "")
10957 (match_operand:SI 3 "arm_add_operand" "")])
10958 (and:SI (match_operand:SI 0 "s_register_operand" "")
10961 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10963 [(set (match_dup 4)
10964 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10966 (set (reg:CC_NOOV CC_REGNUM)
10967 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10970 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10972 (define_insn_and_split "*negscc"
10973 [(set (match_operand:SI 0 "s_register_operand" "=r")
10974 (neg:SI (match_operator 3 "arm_comparison_operator"
10975 [(match_operand:SI 1 "s_register_operand" "r")
10976 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10977 (clobber (reg:CC CC_REGNUM))]
10980 "&& reload_completed"
10983 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10985 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10987 /* Emit mov\\t%0, %1, asr #31 */
10988 emit_insn (gen_rtx_SET (VOIDmode,
10990 gen_rtx_ASHIFTRT (SImode,
10995 else if (GET_CODE (operands[3]) == NE)
10997 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10998 if (CONST_INT_P (operands[2]))
10999 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
11000 GEN_INT (- INTVAL (operands[2]))));
11002 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
11004 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
11005 gen_rtx_NE (SImode,
11008 gen_rtx_SET (SImode,
11015 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
11016 emit_insn (gen_rtx_SET (VOIDmode,
11018 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
11019 enum rtx_code rc = GET_CODE (operands[3]);
11021 rc = reverse_condition (rc);
11022 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
11023 gen_rtx_fmt_ee (rc,
11027 gen_rtx_SET (VOIDmode, operands[0], const0_rtx)));
11028 rc = GET_CODE (operands[3]);
11029 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
11030 gen_rtx_fmt_ee (rc,
11034 gen_rtx_SET (VOIDmode,
11041 [(set_attr "conds" "clob")
11042 (set_attr "length" "12")]
11045 (define_insn "movcond"
11046 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11048 (match_operator 5 "arm_comparison_operator"
11049 [(match_operand:SI 3 "s_register_operand" "r,r,r")
11050 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
11051 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
11052 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
11053 (clobber (reg:CC CC_REGNUM))]
11056 if (GET_CODE (operands[5]) == LT
11057 && (operands[4] == const0_rtx))
11059 if (which_alternative != 1 && REG_P (operands[1]))
11061 if (operands[2] == const0_rtx)
11062 return \"and\\t%0, %1, %3, asr #31\";
11063 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
11065 else if (which_alternative != 0 && REG_P (operands[2]))
11067 if (operands[1] == const0_rtx)
11068 return \"bic\\t%0, %2, %3, asr #31\";
11069 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
11071 /* The only case that falls through to here is when both ops 1 & 2
11075 if (GET_CODE (operands[5]) == GE
11076 && (operands[4] == const0_rtx))
11078 if (which_alternative != 1 && REG_P (operands[1]))
11080 if (operands[2] == const0_rtx)
11081 return \"bic\\t%0, %1, %3, asr #31\";
11082 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
11084 else if (which_alternative != 0 && REG_P (operands[2]))
11086 if (operands[1] == const0_rtx)
11087 return \"and\\t%0, %2, %3, asr #31\";
11088 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
11090 /* The only case that falls through to here is when both ops 1 & 2
11093 if (CONST_INT_P (operands[4])
11094 && !const_ok_for_arm (INTVAL (operands[4])))
11095 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
11097 output_asm_insn (\"cmp\\t%3, %4\", operands);
11098 if (which_alternative != 0)
11099 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
11100 if (which_alternative != 1)
11101 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
11104 [(set_attr "conds" "clob")
11105 (set_attr "length" "8,8,12")]
11108 ;; ??? The patterns below need checking for Thumb-2 usefulness.
11110 (define_insn "*ifcompare_plus_move"
11111 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11112 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11113 [(match_operand:SI 4 "s_register_operand" "r,r")
11114 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11116 (match_operand:SI 2 "s_register_operand" "r,r")
11117 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
11118 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
11119 (clobber (reg:CC CC_REGNUM))]
11122 [(set_attr "conds" "clob")
11123 (set_attr "length" "8,12")]
11126 (define_insn "*if_plus_move"
11127 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
11129 (match_operator 4 "arm_comparison_operator"
11130 [(match_operand 5 "cc_register" "") (const_int 0)])
11132 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
11133 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
11134 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
11137 add%d4\\t%0, %2, %3
11138 sub%d4\\t%0, %2, #%n3
11139 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
11140 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
11141 [(set_attr "conds" "use")
11142 (set_attr "length" "4,4,8,8")
11143 (set_attr_alternative "type"
11144 [(if_then_else (match_operand 3 "const_int_operand" "")
11145 (const_string "arlo_imm" )
11146 (const_string "*"))
11147 (const_string "arlo_imm")
11149 (const_string "*")])]
11152 (define_insn "*ifcompare_move_plus"
11153 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11154 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11155 [(match_operand:SI 4 "s_register_operand" "r,r")
11156 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11157 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11159 (match_operand:SI 2 "s_register_operand" "r,r")
11160 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
11161 (clobber (reg:CC CC_REGNUM))]
11164 [(set_attr "conds" "clob")
11165 (set_attr "length" "8,12")]
11168 (define_insn "*if_move_plus"
11169 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
11171 (match_operator 4 "arm_comparison_operator"
11172 [(match_operand 5 "cc_register" "") (const_int 0)])
11173 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
11175 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
11176 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
11179 add%D4\\t%0, %2, %3
11180 sub%D4\\t%0, %2, #%n3
11181 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
11182 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
11183 [(set_attr "conds" "use")
11184 (set_attr "length" "4,4,8,8")
11185 (set_attr_alternative "type"
11186 [(if_then_else (match_operand 3 "const_int_operand" "")
11187 (const_string "arlo_imm" )
11188 (const_string "*"))
11189 (const_string "arlo_imm")
11191 (const_string "*")])]
11194 (define_insn "*ifcompare_arith_arith"
11195 [(set (match_operand:SI 0 "s_register_operand" "=r")
11196 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
11197 [(match_operand:SI 5 "s_register_operand" "r")
11198 (match_operand:SI 6 "arm_add_operand" "rIL")])
11199 (match_operator:SI 8 "shiftable_operator"
11200 [(match_operand:SI 1 "s_register_operand" "r")
11201 (match_operand:SI 2 "arm_rhs_operand" "rI")])
11202 (match_operator:SI 7 "shiftable_operator"
11203 [(match_operand:SI 3 "s_register_operand" "r")
11204 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
11205 (clobber (reg:CC CC_REGNUM))]
11208 [(set_attr "conds" "clob")
11209 (set_attr "length" "12")]
11212 (define_insn "*if_arith_arith"
11213 [(set (match_operand:SI 0 "s_register_operand" "=r")
11214 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
11215 [(match_operand 8 "cc_register" "") (const_int 0)])
11216 (match_operator:SI 6 "shiftable_operator"
11217 [(match_operand:SI 1 "s_register_operand" "r")
11218 (match_operand:SI 2 "arm_rhs_operand" "rI")])
11219 (match_operator:SI 7 "shiftable_operator"
11220 [(match_operand:SI 3 "s_register_operand" "r")
11221 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
11223 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
11224 [(set_attr "conds" "use")
11225 (set_attr "length" "8")]
11228 (define_insn "*ifcompare_arith_move"
11229 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11230 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11231 [(match_operand:SI 2 "s_register_operand" "r,r")
11232 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
11233 (match_operator:SI 7 "shiftable_operator"
11234 [(match_operand:SI 4 "s_register_operand" "r,r")
11235 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
11236 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
11237 (clobber (reg:CC CC_REGNUM))]
11240 /* If we have an operation where (op x 0) is the identity operation and
11241 the conditional operator is LT or GE and we are comparing against zero and
11242 everything is in registers then we can do this in two instructions. */
11243 if (operands[3] == const0_rtx
11244 && GET_CODE (operands[7]) != AND
11245 && REG_P (operands[5])
11246 && REG_P (operands[1])
11247 && REGNO (operands[1]) == REGNO (operands[4])
11248 && REGNO (operands[4]) != REGNO (operands[0]))
11250 if (GET_CODE (operands[6]) == LT)
11251 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
11252 else if (GET_CODE (operands[6]) == GE)
11253 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
11255 if (CONST_INT_P (operands[3])
11256 && !const_ok_for_arm (INTVAL (operands[3])))
11257 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
11259 output_asm_insn (\"cmp\\t%2, %3\", operands);
11260 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
11261 if (which_alternative != 0)
11262 return \"mov%D6\\t%0, %1\";
11265 [(set_attr "conds" "clob")
11266 (set_attr "length" "8,12")]
11269 (define_insn "*if_arith_move"
11270 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11271 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11272 [(match_operand 6 "cc_register" "") (const_int 0)])
11273 (match_operator:SI 5 "shiftable_operator"
11274 [(match_operand:SI 2 "s_register_operand" "r,r")
11275 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
11276 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
11279 %I5%d4\\t%0, %2, %3
11280 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
11281 [(set_attr "conds" "use")
11282 (set_attr "length" "4,8")
11283 (set_attr "type" "*,*")]
11286 (define_insn "*ifcompare_move_arith"
11287 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11288 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11289 [(match_operand:SI 4 "s_register_operand" "r,r")
11290 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11291 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11292 (match_operator:SI 7 "shiftable_operator"
11293 [(match_operand:SI 2 "s_register_operand" "r,r")
11294 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
11295 (clobber (reg:CC CC_REGNUM))]
11298 /* If we have an operation where (op x 0) is the identity operation and
11299 the conditional operator is LT or GE and we are comparing against zero and
11300 everything is in registers then we can do this in two instructions */
11301 if (operands[5] == const0_rtx
11302 && GET_CODE (operands[7]) != AND
11303 && REG_P (operands[3])
11304 && REG_P (operands[1])
11305 && REGNO (operands[1]) == REGNO (operands[2])
11306 && REGNO (operands[2]) != REGNO (operands[0]))
11308 if (GET_CODE (operands[6]) == GE)
11309 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11310 else if (GET_CODE (operands[6]) == LT)
11311 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11314 if (CONST_INT_P (operands[5])
11315 && !const_ok_for_arm (INTVAL (operands[5])))
11316 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
11318 output_asm_insn (\"cmp\\t%4, %5\", operands);
11320 if (which_alternative != 0)
11321 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
11322 return \"%I7%D6\\t%0, %2, %3\";
11324 [(set_attr "conds" "clob")
11325 (set_attr "length" "8,12")]
11328 (define_insn "*if_move_arith"
11329 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11331 (match_operator 4 "arm_comparison_operator"
11332 [(match_operand 6 "cc_register" "") (const_int 0)])
11333 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11334 (match_operator:SI 5 "shiftable_operator"
11335 [(match_operand:SI 2 "s_register_operand" "r,r")
11336 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
11339 %I5%D4\\t%0, %2, %3
11340 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
11341 [(set_attr "conds" "use")
11342 (set_attr "length" "4,8")
11343 (set_attr "type" "*,*")]
11346 (define_insn "*ifcompare_move_not"
11347 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11349 (match_operator 5 "arm_comparison_operator"
11350 [(match_operand:SI 3 "s_register_operand" "r,r")
11351 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11352 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11354 (match_operand:SI 2 "s_register_operand" "r,r"))))
11355 (clobber (reg:CC CC_REGNUM))]
11358 [(set_attr "conds" "clob")
11359 (set_attr "length" "8,12")]
11362 (define_insn "*if_move_not"
11363 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11365 (match_operator 4 "arm_comparison_operator"
11366 [(match_operand 3 "cc_register" "") (const_int 0)])
11367 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11368 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11372 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
11373 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
11374 [(set_attr "conds" "use")
11375 (set_attr "type" "mvn_reg")
11376 (set_attr "length" "4,8,8")]
11379 (define_insn "*ifcompare_not_move"
11380 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11382 (match_operator 5 "arm_comparison_operator"
11383 [(match_operand:SI 3 "s_register_operand" "r,r")
11384 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11386 (match_operand:SI 2 "s_register_operand" "r,r"))
11387 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11388 (clobber (reg:CC CC_REGNUM))]
11391 [(set_attr "conds" "clob")
11392 (set_attr "length" "8,12")]
11395 (define_insn "*if_not_move"
11396 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11398 (match_operator 4 "arm_comparison_operator"
11399 [(match_operand 3 "cc_register" "") (const_int 0)])
11400 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11401 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11405 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11406 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11407 [(set_attr "conds" "use")
11408 (set_attr "type" "mvn_reg")
11409 (set_attr "length" "4,8,8")]
11412 (define_insn "*ifcompare_shift_move"
11413 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11415 (match_operator 6 "arm_comparison_operator"
11416 [(match_operand:SI 4 "s_register_operand" "r,r")
11417 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11418 (match_operator:SI 7 "shift_operator"
11419 [(match_operand:SI 2 "s_register_operand" "r,r")
11420 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11421 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11422 (clobber (reg:CC CC_REGNUM))]
11425 [(set_attr "conds" "clob")
11426 (set_attr "length" "8,12")]
11429 (define_insn "*if_shift_move"
11430 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11432 (match_operator 5 "arm_comparison_operator"
11433 [(match_operand 6 "cc_register" "") (const_int 0)])
11434 (match_operator:SI 4 "shift_operator"
11435 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11436 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11437 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11441 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11442 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11443 [(set_attr "conds" "use")
11444 (set_attr "shift" "2")
11445 (set_attr "length" "4,8,8")
11446 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11447 (const_string "mov_shift")
11448 (const_string "mov_shift_reg")))]
11451 (define_insn "*ifcompare_move_shift"
11452 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11454 (match_operator 6 "arm_comparison_operator"
11455 [(match_operand:SI 4 "s_register_operand" "r,r")
11456 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11457 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11458 (match_operator:SI 7 "shift_operator"
11459 [(match_operand:SI 2 "s_register_operand" "r,r")
11460 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11461 (clobber (reg:CC CC_REGNUM))]
11464 [(set_attr "conds" "clob")
11465 (set_attr "length" "8,12")]
11468 (define_insn "*if_move_shift"
11469 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11471 (match_operator 5 "arm_comparison_operator"
11472 [(match_operand 6 "cc_register" "") (const_int 0)])
11473 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11474 (match_operator:SI 4 "shift_operator"
11475 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11476 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11480 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11481 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11482 [(set_attr "conds" "use")
11483 (set_attr "shift" "2")
11484 (set_attr "length" "4,8,8")
11485 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11486 (const_string "mov_shift")
11487 (const_string "mov_shift_reg")))]
11490 (define_insn "*ifcompare_shift_shift"
11491 [(set (match_operand:SI 0 "s_register_operand" "=r")
11493 (match_operator 7 "arm_comparison_operator"
11494 [(match_operand:SI 5 "s_register_operand" "r")
11495 (match_operand:SI 6 "arm_add_operand" "rIL")])
11496 (match_operator:SI 8 "shift_operator"
11497 [(match_operand:SI 1 "s_register_operand" "r")
11498 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11499 (match_operator:SI 9 "shift_operator"
11500 [(match_operand:SI 3 "s_register_operand" "r")
11501 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11502 (clobber (reg:CC CC_REGNUM))]
11505 [(set_attr "conds" "clob")
11506 (set_attr "length" "12")]
11509 (define_insn "*if_shift_shift"
11510 [(set (match_operand:SI 0 "s_register_operand" "=r")
11512 (match_operator 5 "arm_comparison_operator"
11513 [(match_operand 8 "cc_register" "") (const_int 0)])
11514 (match_operator:SI 6 "shift_operator"
11515 [(match_operand:SI 1 "s_register_operand" "r")
11516 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11517 (match_operator:SI 7 "shift_operator"
11518 [(match_operand:SI 3 "s_register_operand" "r")
11519 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11521 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11522 [(set_attr "conds" "use")
11523 (set_attr "shift" "1")
11524 (set_attr "length" "8")
11525 (set (attr "type") (if_then_else
11526 (and (match_operand 2 "const_int_operand" "")
11527 (match_operand 4 "const_int_operand" ""))
11528 (const_string "mov_shift")
11529 (const_string "mov_shift_reg")))]
11532 (define_insn "*ifcompare_not_arith"
11533 [(set (match_operand:SI 0 "s_register_operand" "=r")
11535 (match_operator 6 "arm_comparison_operator"
11536 [(match_operand:SI 4 "s_register_operand" "r")
11537 (match_operand:SI 5 "arm_add_operand" "rIL")])
11538 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11539 (match_operator:SI 7 "shiftable_operator"
11540 [(match_operand:SI 2 "s_register_operand" "r")
11541 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11542 (clobber (reg:CC CC_REGNUM))]
11545 [(set_attr "conds" "clob")
11546 (set_attr "length" "12")]
11549 (define_insn "*if_not_arith"
11550 [(set (match_operand:SI 0 "s_register_operand" "=r")
11552 (match_operator 5 "arm_comparison_operator"
11553 [(match_operand 4 "cc_register" "") (const_int 0)])
11554 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11555 (match_operator:SI 6 "shiftable_operator"
11556 [(match_operand:SI 2 "s_register_operand" "r")
11557 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11559 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11560 [(set_attr "conds" "use")
11561 (set_attr "type" "mvn_reg")
11562 (set_attr "length" "8")]
11565 (define_insn "*ifcompare_arith_not"
11566 [(set (match_operand:SI 0 "s_register_operand" "=r")
11568 (match_operator 6 "arm_comparison_operator"
11569 [(match_operand:SI 4 "s_register_operand" "r")
11570 (match_operand:SI 5 "arm_add_operand" "rIL")])
11571 (match_operator:SI 7 "shiftable_operator"
11572 [(match_operand:SI 2 "s_register_operand" "r")
11573 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11574 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11575 (clobber (reg:CC CC_REGNUM))]
11578 [(set_attr "conds" "clob")
11579 (set_attr "length" "12")]
11582 (define_insn "*if_arith_not"
11583 [(set (match_operand:SI 0 "s_register_operand" "=r")
11585 (match_operator 5 "arm_comparison_operator"
11586 [(match_operand 4 "cc_register" "") (const_int 0)])
11587 (match_operator:SI 6 "shiftable_operator"
11588 [(match_operand:SI 2 "s_register_operand" "r")
11589 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11590 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11592 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11593 [(set_attr "conds" "use")
11594 (set_attr "type" "mvn_reg")
11595 (set_attr "length" "8")]
11598 (define_insn "*ifcompare_neg_move"
11599 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11601 (match_operator 5 "arm_comparison_operator"
11602 [(match_operand:SI 3 "s_register_operand" "r,r")
11603 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11604 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11605 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11606 (clobber (reg:CC CC_REGNUM))]
11609 [(set_attr "conds" "clob")
11610 (set_attr "length" "8,12")]
11613 (define_insn "*if_neg_move"
11614 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11616 (match_operator 4 "arm_comparison_operator"
11617 [(match_operand 3 "cc_register" "") (const_int 0)])
11618 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11619 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11622 rsb%d4\\t%0, %2, #0
11623 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
11624 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
11625 [(set_attr "conds" "use")
11626 (set_attr "length" "4,8,8")]
11629 (define_insn "*ifcompare_move_neg"
11630 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11632 (match_operator 5 "arm_comparison_operator"
11633 [(match_operand:SI 3 "s_register_operand" "r,r")
11634 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11635 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11636 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11637 (clobber (reg:CC CC_REGNUM))]
11640 [(set_attr "conds" "clob")
11641 (set_attr "length" "8,12")]
11644 (define_insn "*if_move_neg"
11645 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11647 (match_operator 4 "arm_comparison_operator"
11648 [(match_operand 3 "cc_register" "") (const_int 0)])
11649 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11650 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11653 rsb%D4\\t%0, %2, #0
11654 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
11655 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
11656 [(set_attr "conds" "use")
11657 (set_attr "length" "4,8,8")]
11660 (define_insn "*arith_adjacentmem"
11661 [(set (match_operand:SI 0 "s_register_operand" "=r")
11662 (match_operator:SI 1 "shiftable_operator"
11663 [(match_operand:SI 2 "memory_operand" "m")
11664 (match_operand:SI 3 "memory_operand" "m")]))
11665 (clobber (match_scratch:SI 4 "=r"))]
11666 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11672 HOST_WIDE_INT val1 = 0, val2 = 0;
11674 if (REGNO (operands[0]) > REGNO (operands[4]))
11676 ldm[1] = operands[4];
11677 ldm[2] = operands[0];
11681 ldm[1] = operands[0];
11682 ldm[2] = operands[4];
11685 base_reg = XEXP (operands[2], 0);
11687 if (!REG_P (base_reg))
11689 val1 = INTVAL (XEXP (base_reg, 1));
11690 base_reg = XEXP (base_reg, 0);
11693 if (!REG_P (XEXP (operands[3], 0)))
11694 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11696 arith[0] = operands[0];
11697 arith[3] = operands[1];
11711 if (val1 !=0 && val2 != 0)
11715 if (val1 == 4 || val2 == 4)
11716 /* Other val must be 8, since we know they are adjacent and neither
11718 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
11719 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11721 ldm[0] = ops[0] = operands[4];
11723 ops[2] = GEN_INT (val1);
11724 output_add_immediate (ops);
11726 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11728 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11732 /* Offset is out of range for a single add, so use two ldr. */
11735 ops[2] = GEN_INT (val1);
11736 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11738 ops[2] = GEN_INT (val2);
11739 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11742 else if (val1 != 0)
11745 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11747 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11752 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11754 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11756 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11759 [(set_attr "length" "12")
11760 (set_attr "predicable" "yes")
11761 (set_attr "type" "load1")]
11764 ; This pattern is never tried by combine, so do it as a peephole
11767 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11768 (match_operand:SI 1 "arm_general_register_operand" ""))
11769 (set (reg:CC CC_REGNUM)
11770 (compare:CC (match_dup 1) (const_int 0)))]
11772 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11773 (set (match_dup 0) (match_dup 1))])]
11778 [(set (match_operand:SI 0 "s_register_operand" "")
11779 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11781 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11782 [(match_operand:SI 3 "s_register_operand" "")
11783 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11784 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11786 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11787 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11792 ;; This split can be used because CC_Z mode implies that the following
11793 ;; branch will be an equality, or an unsigned inequality, so the sign
11794 ;; extension is not needed.
11797 [(set (reg:CC_Z CC_REGNUM)
11799 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11801 (match_operand 1 "const_int_operand" "")))
11802 (clobber (match_scratch:SI 2 ""))]
11804 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
11805 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
11806 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11807 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11809 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11812 ;; ??? Check the patterns above for Thumb-2 usefulness
11814 (define_expand "prologue"
11815 [(clobber (const_int 0))]
11818 arm_expand_prologue ();
11820 thumb1_expand_prologue ();
11825 (define_expand "epilogue"
11826 [(clobber (const_int 0))]
11829 if (crtl->calls_eh_return)
11830 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11833 thumb1_expand_epilogue ();
11834 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11835 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11837 else if (HAVE_return)
11839 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11840 no need for explicit testing again. */
11841 emit_jump_insn (gen_return ());
11843 else if (TARGET_32BIT)
11845 arm_expand_epilogue (true);
11851 (define_insn "prologue_thumb1_interwork"
11852 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
11854 "* return thumb1_output_interwork ();"
11855 [(set_attr "length" "8")]
11858 ;; Note - although unspec_volatile's USE all hard registers,
11859 ;; USEs are ignored after relaod has completed. Thus we need
11860 ;; to add an unspec of the link register to ensure that flow
11861 ;; does not think that it is unused by the sibcall branch that
11862 ;; will replace the standard function epilogue.
11863 (define_expand "sibcall_epilogue"
11864 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11865 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11868 arm_expand_epilogue (false);
11873 (define_insn "*epilogue_insns"
11874 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
11877 return thumb1_unexpanded_epilogue ();
11879 ; Length is absolute worst case
11880 [(set_attr "length" "44")
11881 (set_attr "type" "block")
11882 ;; We don't clobber the conditions, but the potential length of this
11883 ;; operation is sufficient to make conditionalizing the sequence
11884 ;; unlikely to be profitable.
11885 (set_attr "conds" "clob")]
11888 (define_expand "eh_epilogue"
11889 [(use (match_operand:SI 0 "register_operand" ""))
11890 (use (match_operand:SI 1 "register_operand" ""))
11891 (use (match_operand:SI 2 "register_operand" ""))]
11895 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11896 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11898 rtx ra = gen_rtx_REG (Pmode, 2);
11900 emit_move_insn (ra, operands[2]);
11903 /* This is a hack -- we may have crystalized the function type too
11905 cfun->machine->func_type = 0;
11909 ;; This split is only used during output to reduce the number of patterns
11910 ;; that need assembler instructions adding to them. We allowed the setting
11911 ;; of the conditions to be implicit during rtl generation so that
11912 ;; the conditional compare patterns would work. However this conflicts to
11913 ;; some extent with the conditional data operations, so we have to split them
11916 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11917 ;; conditional execution sufficient?
11920 [(set (match_operand:SI 0 "s_register_operand" "")
11921 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11922 [(match_operand 2 "" "") (match_operand 3 "" "")])
11924 (match_operand 4 "" "")))
11925 (clobber (reg:CC CC_REGNUM))]
11926 "TARGET_ARM && reload_completed"
11927 [(set (match_dup 5) (match_dup 6))
11928 (cond_exec (match_dup 7)
11929 (set (match_dup 0) (match_dup 4)))]
11932 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11933 operands[2], operands[3]);
11934 enum rtx_code rc = GET_CODE (operands[1]);
11936 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11937 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11938 if (mode == CCFPmode || mode == CCFPEmode)
11939 rc = reverse_condition_maybe_unordered (rc);
11941 rc = reverse_condition (rc);
11943 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11948 [(set (match_operand:SI 0 "s_register_operand" "")
11949 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11950 [(match_operand 2 "" "") (match_operand 3 "" "")])
11951 (match_operand 4 "" "")
11953 (clobber (reg:CC CC_REGNUM))]
11954 "TARGET_ARM && reload_completed"
11955 [(set (match_dup 5) (match_dup 6))
11956 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11957 (set (match_dup 0) (match_dup 4)))]
11960 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11961 operands[2], operands[3]);
11963 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11964 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11969 [(set (match_operand:SI 0 "s_register_operand" "")
11970 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11971 [(match_operand 2 "" "") (match_operand 3 "" "")])
11972 (match_operand 4 "" "")
11973 (match_operand 5 "" "")))
11974 (clobber (reg:CC CC_REGNUM))]
11975 "TARGET_ARM && reload_completed"
11976 [(set (match_dup 6) (match_dup 7))
11977 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11978 (set (match_dup 0) (match_dup 4)))
11979 (cond_exec (match_dup 8)
11980 (set (match_dup 0) (match_dup 5)))]
11983 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11984 operands[2], operands[3]);
11985 enum rtx_code rc = GET_CODE (operands[1]);
11987 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11988 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11989 if (mode == CCFPmode || mode == CCFPEmode)
11990 rc = reverse_condition_maybe_unordered (rc);
11992 rc = reverse_condition (rc);
11994 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11999 [(set (match_operand:SI 0 "s_register_operand" "")
12000 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
12001 [(match_operand:SI 2 "s_register_operand" "")
12002 (match_operand:SI 3 "arm_add_operand" "")])
12003 (match_operand:SI 4 "arm_rhs_operand" "")
12005 (match_operand:SI 5 "s_register_operand" ""))))
12006 (clobber (reg:CC CC_REGNUM))]
12007 "TARGET_ARM && reload_completed"
12008 [(set (match_dup 6) (match_dup 7))
12009 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
12010 (set (match_dup 0) (match_dup 4)))
12011 (cond_exec (match_dup 8)
12012 (set (match_dup 0) (not:SI (match_dup 5))))]
12015 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
12016 operands[2], operands[3]);
12017 enum rtx_code rc = GET_CODE (operands[1]);
12019 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
12020 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
12021 if (mode == CCFPmode || mode == CCFPEmode)
12022 rc = reverse_condition_maybe_unordered (rc);
12024 rc = reverse_condition (rc);
12026 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
12030 (define_insn "*cond_move_not"
12031 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
12032 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
12033 [(match_operand 3 "cc_register" "") (const_int 0)])
12034 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
12036 (match_operand:SI 2 "s_register_operand" "r,r"))))]
12040 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
12041 [(set_attr "conds" "use")
12042 (set_attr "type" "mvn_reg")
12043 (set_attr "length" "4,8")]
12046 ;; The next two patterns occur when an AND operation is followed by a
12047 ;; scc insn sequence
12049 (define_insn "*sign_extract_onebit"
12050 [(set (match_operand:SI 0 "s_register_operand" "=r")
12051 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12053 (match_operand:SI 2 "const_int_operand" "n")))
12054 (clobber (reg:CC CC_REGNUM))]
12057 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
12058 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
12059 return \"mvnne\\t%0, #0\";
12061 [(set_attr "conds" "clob")
12062 (set_attr "length" "8")]
12065 (define_insn "*not_signextract_onebit"
12066 [(set (match_operand:SI 0 "s_register_operand" "=r")
12068 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12070 (match_operand:SI 2 "const_int_operand" "n"))))
12071 (clobber (reg:CC CC_REGNUM))]
12074 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
12075 output_asm_insn (\"tst\\t%1, %2\", operands);
12076 output_asm_insn (\"mvneq\\t%0, #0\", operands);
12077 return \"movne\\t%0, #0\";
12079 [(set_attr "conds" "clob")
12080 (set_attr "length" "12")]
12082 ;; ??? The above patterns need auditing for Thumb-2
12084 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
12085 ;; expressions. For simplicity, the first register is also in the unspec
12087 ;; To avoid the usage of GNU extension, the length attribute is computed
12088 ;; in a C function arm_attr_length_push_multi.
12089 (define_insn "*push_multi"
12090 [(match_parallel 2 "multi_register_push"
12091 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
12092 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
12093 UNSPEC_PUSH_MULT))])]
12097 int num_saves = XVECLEN (operands[2], 0);
12099 /* For the StrongARM at least it is faster to
12100 use STR to store only a single register.
12101 In Thumb mode always use push, and the assembler will pick
12102 something appropriate. */
12103 if (num_saves == 1 && TARGET_ARM)
12104 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
12111 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
12112 else if (TARGET_THUMB2)
12113 strcpy (pattern, \"push%?\\t{%1\");
12115 strcpy (pattern, \"push\\t{%1\");
12117 for (i = 1; i < num_saves; i++)
12119 strcat (pattern, \", %|\");
12121 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
12124 strcat (pattern, \"}\");
12125 output_asm_insn (pattern, operands);
12130 [(set_attr "type" "store4")
12131 (set (attr "length")
12132 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
12135 (define_insn "stack_tie"
12136 [(set (mem:BLK (scratch))
12137 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
12138 (match_operand:SI 1 "s_register_operand" "rk")]
12142 [(set_attr "length" "0")]
12145 ;; Pop (as used in epilogue RTL)
12147 (define_insn "*load_multiple_with_writeback"
12148 [(match_parallel 0 "load_multiple_operation"
12149 [(set (match_operand:SI 1 "s_register_operand" "+rk")
12150 (plus:SI (match_dup 1)
12151 (match_operand:SI 2 "const_int_operand" "I")))
12152 (set (match_operand:SI 3 "s_register_operand" "=rk")
12153 (mem:SI (match_dup 1)))
12155 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12158 arm_output_multireg_pop (operands, /*return_pc=*/false,
12159 /*cond=*/const_true_rtx,
12165 [(set_attr "type" "load4")
12166 (set_attr "predicable" "yes")]
12169 ;; Pop with return (as used in epilogue RTL)
12171 ;; This instruction is generated when the registers are popped at the end of
12172 ;; epilogue. Here, instead of popping the value into LR and then generating
12173 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
12175 (define_insn "*pop_multiple_with_writeback_and_return"
12176 [(match_parallel 0 "pop_multiple_return"
12178 (set (match_operand:SI 1 "s_register_operand" "+rk")
12179 (plus:SI (match_dup 1)
12180 (match_operand:SI 2 "const_int_operand" "I")))
12181 (set (match_operand:SI 3 "s_register_operand" "=rk")
12182 (mem:SI (match_dup 1)))
12184 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12187 arm_output_multireg_pop (operands, /*return_pc=*/true,
12188 /*cond=*/const_true_rtx,
12194 [(set_attr "type" "load4")
12195 (set_attr "predicable" "yes")]
12198 (define_insn "*pop_multiple_with_return"
12199 [(match_parallel 0 "pop_multiple_return"
12201 (set (match_operand:SI 2 "s_register_operand" "=rk")
12202 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12204 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12207 arm_output_multireg_pop (operands, /*return_pc=*/true,
12208 /*cond=*/const_true_rtx,
12214 [(set_attr "type" "load4")
12215 (set_attr "predicable" "yes")]
12218 ;; Load into PC and return
12219 (define_insn "*ldr_with_return"
12221 (set (reg:SI PC_REGNUM)
12222 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
12223 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12224 "ldr%?\t%|pc, [%0], #4"
12225 [(set_attr "type" "load1")
12226 (set_attr "predicable" "yes")]
12228 ;; Pop for floating point registers (as used in epilogue RTL)
12229 (define_insn "*vfp_pop_multiple_with_writeback"
12230 [(match_parallel 0 "pop_multiple_fp"
12231 [(set (match_operand:SI 1 "s_register_operand" "+rk")
12232 (plus:SI (match_dup 1)
12233 (match_operand:SI 2 "const_int_operand" "I")))
12234 (set (match_operand:DF 3 "arm_hard_register_operand" "")
12235 (mem:DF (match_dup 1)))])]
12236 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
12239 int num_regs = XVECLEN (operands[0], 0);
12242 strcpy (pattern, \"fldmfdd\\t\");
12243 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
12244 strcat (pattern, \"!, {\");
12245 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
12246 strcat (pattern, \"%P0\");
12247 if ((num_regs - 1) > 1)
12249 strcat (pattern, \"-%P1\");
12250 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
12253 strcat (pattern, \"}\");
12254 output_asm_insn (pattern, op_list);
12258 [(set_attr "type" "load4")
12259 (set_attr "conds" "unconditional")
12260 (set_attr "predicable" "no")]
12263 ;; Special patterns for dealing with the constant pool
12265 (define_insn "align_4"
12266 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
12269 assemble_align (32);
12274 (define_insn "align_8"
12275 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
12278 assemble_align (64);
12283 (define_insn "consttable_end"
12284 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
12287 making_const_table = FALSE;
12292 (define_insn "consttable_1"
12293 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
12296 making_const_table = TRUE;
12297 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
12298 assemble_zeros (3);
12301 [(set_attr "length" "4")]
12304 (define_insn "consttable_2"
12305 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
12308 making_const_table = TRUE;
12309 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
12310 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
12311 assemble_zeros (2);
12314 [(set_attr "length" "4")]
12317 (define_insn "consttable_4"
12318 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
12322 rtx x = operands[0];
12323 making_const_table = TRUE;
12324 switch (GET_MODE_CLASS (GET_MODE (x)))
12327 if (GET_MODE (x) == HFmode)
12328 arm_emit_fp16_const (x);
12332 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
12333 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
12337 /* XXX: Sometimes gcc does something really dumb and ends up with
12338 a HIGH in a constant pool entry, usually because it's trying to
12339 load into a VFP register. We know this will always be used in
12340 combination with a LO_SUM which ignores the high bits, so just
12341 strip off the HIGH. */
12342 if (GET_CODE (x) == HIGH)
12344 assemble_integer (x, 4, BITS_PER_WORD, 1);
12345 mark_symbol_refs_as_used (x);
12350 [(set_attr "length" "4")]
12353 (define_insn "consttable_8"
12354 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
12358 making_const_table = TRUE;
12359 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12364 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12365 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12369 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12374 [(set_attr "length" "8")]
12377 (define_insn "consttable_16"
12378 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12382 making_const_table = TRUE;
12383 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12388 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12389 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12393 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12398 [(set_attr "length" "16")]
12401 ;; Miscellaneous Thumb patterns
12403 (define_expand "tablejump"
12404 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
12405 (use (label_ref (match_operand 1 "" "")))])]
12410 /* Hopefully, CSE will eliminate this copy. */
12411 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
12412 rtx reg2 = gen_reg_rtx (SImode);
12414 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
12415 operands[0] = reg2;
12420 ;; NB never uses BX.
12421 (define_insn "*thumb1_tablejump"
12422 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
12423 (use (label_ref (match_operand 1 "" "")))]
12426 [(set_attr "length" "2")]
12429 ;; V5 Instructions,
12431 (define_insn "clzsi2"
12432 [(set (match_operand:SI 0 "s_register_operand" "=r")
12433 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12434 "TARGET_32BIT && arm_arch5"
12436 [(set_attr "predicable" "yes")
12437 (set_attr "type" "clz")])
12439 (define_insn "rbitsi2"
12440 [(set (match_operand:SI 0 "s_register_operand" "=r")
12441 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12442 "TARGET_32BIT && arm_arch_thumb2"
12444 [(set_attr "predicable" "yes")
12445 (set_attr "type" "clz")])
12447 (define_expand "ctzsi2"
12448 [(set (match_operand:SI 0 "s_register_operand" "")
12449 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
12450 "TARGET_32BIT && arm_arch_thumb2"
12453 rtx tmp = gen_reg_rtx (SImode);
12454 emit_insn (gen_rbitsi2 (tmp, operands[1]));
12455 emit_insn (gen_clzsi2 (operands[0], tmp));
12461 ;; V5E instructions.
12463 (define_insn "prefetch"
12464 [(prefetch (match_operand:SI 0 "address_operand" "p")
12465 (match_operand:SI 1 "" "")
12466 (match_operand:SI 2 "" ""))]
12467 "TARGET_32BIT && arm_arch5e"
12470 ;; General predication pattern
12473 [(match_operator 0 "arm_comparison_operator"
12474 [(match_operand 1 "cc_register" "")
12478 [(set_attr "predicated" "yes")]
12481 (define_insn "force_register_use"
12482 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12485 [(set_attr "length" "0")]
12489 ;; Patterns for exception handling
12491 (define_expand "eh_return"
12492 [(use (match_operand 0 "general_operand" ""))]
12497 emit_insn (gen_arm_eh_return (operands[0]));
12499 emit_insn (gen_thumb_eh_return (operands[0]));
12504 ;; We can't expand this before we know where the link register is stored.
12505 (define_insn_and_split "arm_eh_return"
12506 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12508 (clobber (match_scratch:SI 1 "=&r"))]
12511 "&& reload_completed"
12515 arm_set_return_address (operands[0], operands[1]);
12520 (define_insn_and_split "thumb_eh_return"
12521 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
12523 (clobber (match_scratch:SI 1 "=&l"))]
12526 "&& reload_completed"
12530 thumb_set_return_address (operands[0], operands[1]);
12538 (define_insn "load_tp_hard"
12539 [(set (match_operand:SI 0 "register_operand" "=r")
12540 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12542 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12543 [(set_attr "predicable" "yes")]
12546 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12547 (define_insn "load_tp_soft"
12548 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12549 (clobber (reg:SI LR_REGNUM))
12550 (clobber (reg:SI IP_REGNUM))
12551 (clobber (reg:CC CC_REGNUM))]
12553 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12554 [(set_attr "conds" "clob")]
12557 ;; tls descriptor call
12558 (define_insn "tlscall"
12559 [(set (reg:SI R0_REGNUM)
12560 (unspec:SI [(reg:SI R0_REGNUM)
12561 (match_operand:SI 0 "" "X")
12562 (match_operand 1 "" "")] UNSPEC_TLS))
12563 (clobber (reg:SI R1_REGNUM))
12564 (clobber (reg:SI LR_REGNUM))
12565 (clobber (reg:SI CC_REGNUM))]
12568 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12569 INTVAL (operands[1]));
12570 return "bl\\t%c0(tlscall)";
12572 [(set_attr "conds" "clob")
12573 (set_attr "length" "4")]
12576 ;; For thread pointer builtin
12577 (define_expand "get_thread_pointersi"
12578 [(match_operand:SI 0 "s_register_operand" "=r")]
12582 arm_load_tp (operands[0]);
12588 ;; We only care about the lower 16 bits of the constant
12589 ;; being inserted into the upper 16 bits of the register.
12590 (define_insn "*arm_movtas_ze"
12591 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
12594 (match_operand:SI 1 "const_int_operand" ""))]
12597 [(set_attr "predicable" "yes")
12598 (set_attr "predicable_short_it" "no")
12599 (set_attr "length" "4")]
12602 (define_insn "*arm_rev"
12603 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12604 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12610 [(set_attr "arch" "t1,t2,32")
12611 (set_attr "length" "2,2,4")]
12614 (define_expand "arm_legacy_rev"
12615 [(set (match_operand:SI 2 "s_register_operand" "")
12616 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
12620 (lshiftrt:SI (match_dup 2)
12622 (set (match_operand:SI 3 "s_register_operand" "")
12623 (rotatert:SI (match_dup 1)
12626 (and:SI (match_dup 2)
12627 (const_int -65281)))
12628 (set (match_operand:SI 0 "s_register_operand" "")
12629 (xor:SI (match_dup 3)
12635 ;; Reuse temporaries to keep register pressure down.
12636 (define_expand "thumb_legacy_rev"
12637 [(set (match_operand:SI 2 "s_register_operand" "")
12638 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
12640 (set (match_operand:SI 3 "s_register_operand" "")
12641 (lshiftrt:SI (match_dup 1)
12644 (ior:SI (match_dup 3)
12646 (set (match_operand:SI 4 "s_register_operand" "")
12648 (set (match_operand:SI 5 "s_register_operand" "")
12649 (rotatert:SI (match_dup 1)
12652 (ashift:SI (match_dup 5)
12655 (lshiftrt:SI (match_dup 5)
12658 (ior:SI (match_dup 5)
12661 (rotatert:SI (match_dup 5)
12663 (set (match_operand:SI 0 "s_register_operand" "")
12664 (ior:SI (match_dup 5)
12670 (define_expand "bswapsi2"
12671 [(set (match_operand:SI 0 "s_register_operand" "=r")
12672 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
12673 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12677 rtx op2 = gen_reg_rtx (SImode);
12678 rtx op3 = gen_reg_rtx (SImode);
12682 rtx op4 = gen_reg_rtx (SImode);
12683 rtx op5 = gen_reg_rtx (SImode);
12685 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12686 op2, op3, op4, op5));
12690 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12699 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12700 ;; and unsigned variants, respectively. For rev16, expose
12701 ;; byte-swapping in the lower 16 bits only.
12702 (define_insn "*arm_revsh"
12703 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12704 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12710 [(set_attr "arch" "t1,t2,32")
12711 (set_attr "length" "2,2,4")]
12714 (define_insn "*arm_rev16"
12715 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12716 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12722 [(set_attr "arch" "t1,t2,32")
12723 (set_attr "length" "2,2,4")]
12726 (define_expand "bswaphi2"
12727 [(set (match_operand:HI 0 "s_register_operand" "=r")
12728 (bswap:HI (match_operand:HI 1 "s_register_operand" "r")))]
12733 ;; Patterns for LDRD/STRD in Thumb2 mode
12735 (define_insn "*thumb2_ldrd"
12736 [(set (match_operand:SI 0 "s_register_operand" "=r")
12737 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12738 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12739 (set (match_operand:SI 3 "s_register_operand" "=r")
12740 (mem:SI (plus:SI (match_dup 1)
12741 (match_operand:SI 4 "const_int_operand" ""))))]
12742 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12743 && current_tune->prefer_ldrd_strd
12744 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12745 && (operands_ok_ldrd_strd (operands[0], operands[3],
12746 operands[1], INTVAL (operands[2]),
12748 "ldrd%?\t%0, %3, [%1, %2]"
12749 [(set_attr "type" "load2")
12750 (set_attr "predicable" "yes")
12751 (set_attr "predicable_short_it" "no")])
12753 (define_insn "*thumb2_ldrd_base"
12754 [(set (match_operand:SI 0 "s_register_operand" "=r")
12755 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12756 (set (match_operand:SI 2 "s_register_operand" "=r")
12757 (mem:SI (plus:SI (match_dup 1)
12759 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12760 && current_tune->prefer_ldrd_strd
12761 && (operands_ok_ldrd_strd (operands[0], operands[2],
12762 operands[1], 0, false, true))"
12763 "ldrd%?\t%0, %2, [%1]"
12764 [(set_attr "type" "load2")
12765 (set_attr "predicable" "yes")
12766 (set_attr "predicable_short_it" "no")])
12768 (define_insn "*thumb2_ldrd_base_neg"
12769 [(set (match_operand:SI 0 "s_register_operand" "=r")
12770 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12772 (set (match_operand:SI 2 "s_register_operand" "=r")
12773 (mem:SI (match_dup 1)))]
12774 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12775 && current_tune->prefer_ldrd_strd
12776 && (operands_ok_ldrd_strd (operands[0], operands[2],
12777 operands[1], -4, false, true))"
12778 "ldrd%?\t%0, %2, [%1, #-4]"
12779 [(set_attr "type" "load2")
12780 (set_attr "predicable" "yes")
12781 (set_attr "predicable_short_it" "no")])
12783 (define_insn "*thumb2_strd"
12784 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12785 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12786 (match_operand:SI 2 "s_register_operand" "r"))
12787 (set (mem:SI (plus:SI (match_dup 0)
12788 (match_operand:SI 3 "const_int_operand" "")))
12789 (match_operand:SI 4 "s_register_operand" "r"))]
12790 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12791 && current_tune->prefer_ldrd_strd
12792 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12793 && (operands_ok_ldrd_strd (operands[2], operands[4],
12794 operands[0], INTVAL (operands[1]),
12796 "strd%?\t%2, %4, [%0, %1]"
12797 [(set_attr "type" "store2")
12798 (set_attr "predicable" "yes")
12799 (set_attr "predicable_short_it" "no")])
12801 (define_insn "*thumb2_strd_base"
12802 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12803 (match_operand:SI 1 "s_register_operand" "r"))
12804 (set (mem:SI (plus:SI (match_dup 0)
12806 (match_operand:SI 2 "s_register_operand" "r"))]
12807 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12808 && current_tune->prefer_ldrd_strd
12809 && (operands_ok_ldrd_strd (operands[1], operands[2],
12810 operands[0], 0, false, false))"
12811 "strd%?\t%1, %2, [%0]"
12812 [(set_attr "type" "store2")
12813 (set_attr "predicable" "yes")
12814 (set_attr "predicable_short_it" "no")])
12816 (define_insn "*thumb2_strd_base_neg"
12817 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12819 (match_operand:SI 1 "s_register_operand" "r"))
12820 (set (mem:SI (match_dup 0))
12821 (match_operand:SI 2 "s_register_operand" "r"))]
12822 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12823 && current_tune->prefer_ldrd_strd
12824 && (operands_ok_ldrd_strd (operands[1], operands[2],
12825 operands[0], -4, false, false))"
12826 "strd%?\t%1, %2, [%0, #-4]"
12827 [(set_attr "type" "store2")
12828 (set_attr "predicable" "yes")
12829 (set_attr "predicable_short_it" "no")])
12832 ;; Load the load/store double peephole optimizations.
12833 (include "ldrdstrd.md")
12835 ;; Load the load/store multiple patterns
12836 (include "ldmstm.md")
12838 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12839 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12840 (define_insn "*load_multiple"
12841 [(match_parallel 0 "load_multiple_operation"
12842 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12843 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12848 arm_output_multireg_pop (operands, /*return_pc=*/false,
12849 /*cond=*/const_true_rtx,
12855 [(set_attr "predicable" "yes")]
12858 ;; Vector bits common to IWMMXT and Neon
12859 (include "vec-common.md")
12860 ;; Load the Intel Wireless Multimedia Extension patterns
12861 (include "iwmmxt.md")
12862 ;; Load the VFP co-processor patterns
12864 ;; Thumb-2 patterns
12865 (include "thumb2.md")
12867 (include "neon.md")
12868 ;; Synchronization Primitives
12869 (include "sync.md")
12870 ;; Fixed-point patterns
12871 (include "arm-fixed.md")